code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
------------------------------------------------------------------------------
-- | This module contains the ServerInfo data tyoe.
--
module Autotool.XmlRpc.Types.ServerInfo (
ServerInfo (..)
) where
------------------------------------------------------------------------------
import Network.XmlRpc.Internals
import Autotool.XmlRpc.Types.Basic (Name)
import Autotool.XmlRpc.Types.Version (Version)
------------------------------------------------------------------------------
data ServerInfo = ServerInfo {
protocolVersion :: Version
, serverName :: Name
, serverVersion :: Version
} deriving (Show)
instance XmlRpcType ServerInfo where
toValue s = toValue [("protocol_version", toValue (protocolVersion s)),
("server_name", toValue (serverName s)),
("server_version", toValue (serverVersion s))]
fromValue v = do t <- fromValue v
s <- getField "ServerInfo" t
pv <- getField "protocol_version" s
sn <- getField "server_name" s
sv <- getField "server_version" s
return $ ServerInfo pv sn sv
getType _ = TStruct
| j-hannes/autotool-xmlrpc-client | src/Autotool/XmlRpc/Types/ServerInfo.hs | bsd-3-clause | 1,194 | 0 | 11 | 286 | 247 | 135 | 112 | 21 | 0 |
module Main where
import GenericGame
import AgentGeneric
import BreakthroughGame
import Board
import ThreadLocal
import Data.Default
import Data.IORef
import Data.List (sort, group, groupBy)
import Control.Monad
import Control.Arrow
import Control.Concurrent
import Control.Concurrent.Async
main = runThrLocMainIO $ do
tCount <- getNumCapabilities
let gCount = 1000
gtCount = gCount `div` tCount
gtCountExtra = gCount - (gtCount * tCount)
-- first thread do extra jobs
ttGCount 1 = gtCountExtra + gtCount
ttGCount _ = gtCount
threadJob tNum = do
a1 <- mkAgent 15 :: IO AgentMCTS
-- a1 <- mkAgent () :: IO AgentRandom
a2 <- mkAgent () :: IO AgentRandom
winners <- sequence $ replicate (ttGCount tNum) (oneGame a1 a2)
return winners
asyncThreads <- mapM (async . threadJob) [1..tCount]
all'winners <- mapM wait asyncThreads
let counts = map (head &&& length) $ group $ sort $ concat all'winners
print counts
return ()
oneGame :: (Agent2 a1, Agent2 a2) => a1 -> a2 -> ThrLocIO (Maybe Player2)
oneGame a1 a2 = do
ref <- newIORef Nothing
let g0 = freshGameDefaultParams :: Breakthrough
cb = GameDriverCallback { gameTurn = (\ g p -> do
-- print p
-- putStrLn (prettyPrintGame (g :: Breakthrough))
return True)
, gameFinished = (\ g -> do
let w = winner g
putStrLn $ "Game finished! Winner: " ++ show w
-- putStrLn (prettyPrintGame (g :: Breakthrough))
writeIORef ref w
)
}
driverG2 g0 a1 a2 cb
readIORef ref
| Tener/deeplearning-thesis | src/breakthrough-driver.hs | bsd-3-clause | 1,813 | 0 | 19 | 638 | 469 | 238 | 231 | 42 | 2 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module CLaSH.Util.Core.Types
( CoreBinding
, TypedThing(..)
, CoreContext(..)
, TransformState(..)
, TransformSession
, TransformStep
, tsTransformCounter
, tsBndrSubst
, tsUniqSupply
, emptyTransformState
)
where
-- External Modules
import Control.Monad.State.Strict (StateT)
import Control.Monad.Error (ErrorT)
import qualified Data.Label
import Language.KURE (RewriteM)
-- GHC API
import qualified CoreSyn
import qualified CoreUtils
import qualified Id
import qualified FastString
import qualified Outputable
import qualified Type
import qualified UniqSupply
import qualified VarEnv
import CLaSH.Util.Pretty (pprString)
type CoreBinding = (CoreSyn.CoreBndr, CoreSyn.CoreExpr)
class Outputable.Outputable t => TypedThing t where
getType :: t -> Maybe Type.Type
getTypeFail :: t -> Type.Type
getTypeFail t = case getType t of Just t -> t ; Nothing -> error ("getType")
instance TypedThing CoreSyn.CoreExpr where
getType (CoreSyn.Type _) = Nothing
getType expr = Just $ CoreUtils.exprType expr
instance TypedThing CoreSyn.CoreBndr where
getType = return . Id.idType
instance TypedThing Type.Type where
getType = return
data CoreContext = AppFirst
| AppSecond
| LetBinding [CoreSyn.CoreBndr]
| LetBody [CoreSyn.CoreBndr]
| LambdaBody CoreSyn.CoreBndr
| CaseAlt CoreSyn.CoreBndr
| Other
deriving (Eq, Show)
data TransformState = TransformState
{ _tsTransformCounter :: Int
, _tsBndrSubst :: VarEnv.VarEnv CoreSyn.CoreBndr
, _tsUniqSupply :: UniqSupply.UniqSupply
}
Data.Label.mkLabels [''TransformState]
type TransformSession m = ErrorT String (StateT TransformState m)
type TransformStep m = [CoreContext] -> CoreSyn.CoreExpr -> RewriteM (TransformSession m) [CoreContext] CoreSyn.CoreExpr
emptyTransformState uniqSupply = TransformState 0 VarEnv.emptyVarEnv uniqSupply
| christiaanb/clash-tryout | src/CLaSH/Util/Core/Types.hs | bsd-3-clause | 2,032 | 0 | 11 | 410 | 496 | 287 | 209 | -1 | -1 |
module Data.Cauterize.Parser.Utils ( parens , quoted , spaces' ) where
import Text.Parsec
import Text.Parsec.Text
import Data.Text
import Control.Monad (liftM)
parens :: Parser a -> Parser a
parens a = do
_ <- char '('
a' <- a
_ <- char ')'
return a'
quoted :: Parser Text
quoted = do
_ <- char '"'
liftM pack $ manyTill anyToken (char '"')
spaces' :: Parser ()
spaces' = space >> spaces
| sw17ch/Z-ARCHIVED-hscauterize | Data/Cauterize/Parser/Utils.hs | bsd-3-clause | 407 | 0 | 10 | 89 | 164 | 83 | 81 | 17 | 1 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
module Ops.Cabal.Sandbox where
import Ops.Cabal.Common
import Data.List.Split (splitWhen)
import Data.Text (Text)
import qualified Data.Text as T
import Shelly
createSandbox :: Sh ()
createSandbox = cabal_ "sandbox" ["init"]
addSource :: [Text] -> Sh ()
addSource [] = return ()
addSource dirs = command_ "cabal" ["sandbox", "add-source"] dirs
getAddSource :: Sh [Text]
getAddSource = do
inSandbox <- test_f "cabal.sandbox.config"
if inSandbox then do
cout <- cabal "sandbox" ["list-sources"]
return $ case splitWhen T.null (T.lines cout) of
(_:dirs: _) -> dirs -- middle of 3
_ -> []
else return []
cleanSandbox :: Sh ()
cleanSandbox = do
dirs <- getAddSource
cabal_ "sandbox" ["delete"]
createSandbox
addSource dirs
| diagrams/package-ops | src/Ops/Cabal/Sandbox.hs | bsd-3-clause | 973 | 0 | 16 | 261 | 276 | 146 | 130 | 29 | 3 |
{-# LANGUAGE NoImplicitPrelude #-}
module Bamboo.Theme.Blueprint.Control.Tag where
import Bamboo.Type (summary_for_tag)
import Bamboo.Type.State
import Bamboo.Theme.Blueprint.Atom.Post
import Bamboo.Theme.Blueprint.Control.Helper
import Bamboo.Theme.Blueprint.Env hiding (p, tag_id)
import Bamboo.Theme.Blueprint.Widget.Template
import qualified Bamboo.Type as C
import qualified Bamboo.Type.State as State
view :: Widget
view s = s.posts.(map render > (+++ nav p ( s.env.slashed_script_name / tag_id)) > page s)
where
p = s.pager
tag_id = s.uid
render = render_summary s (s.config.summary_for_tag)
| nfjinjing/bamboo-theme-blueprint | src/Bamboo/Theme/Blueprint/Control/Tag.hs | bsd-3-clause | 621 | 1 | 14 | 86 | 172 | 107 | 65 | -1 | -1 |
--
--
--
-----------------
-- Exercise 7.12.
-----------------
--
--
--
module E'7'12 where
import Prelude hiding ( minimum , maximum )
import qualified Data.List ( minimum )
import qualified Data.List ( maximum )
import GHC.Classes ( min )
import GHC.Classes ( max )
import Test.QuickCheck
-- Subchapter 7.4, example 6 (exercise relevant definitions):
iSort :: [Integer] -> [Integer]
iSort [] = []
iSort ( integer : remainingIntegers )
= ins integer (iSort remainingIntegers)
ins :: Integer -> [Integer] -> [Integer]
ins integer [] = [integer]
ins integer ( listInteger : remainingListIntegers )
| integer < listInteger = integer : (listInteger : remainingListIntegers)
| otherwise = listInteger : (ins integer remainingListIntegers)
-- ...
minimum :: [Integer] -> Integer
minimum integerList = head (iSort integerList)
prop_minimum :: [Integer] -> Property
prop_minimum integerList
= integerList /= []
==> minimum integerList == Data.List.minimum integerList
-- GHCi> quickCheck prop_minimum
-- Other solution for "minimum":
minimum' :: [Integer] -> Integer -- Primitive recursion:
minimum' [] = error "Empty list."
minimum' [ integer ] = integer
minimum' ( integer : remainingIntegers )
= integer `min` (minimum' remainingIntegers)
prop_minimum' :: [Integer] -> Property
prop_minimum' integerList
= integerList /= []
==> minimum' integerList == Data.List.minimum integerList
-- GHCi> quickCheck prop_minimum'
maximum :: [Integer] -> Integer
maximum integerList = head (reverse (iSort integerList))
prop_maximum :: [Integer] -> Property
prop_maximum integerList
= integerList /= []
==> maximum integerList == Data.List.maximum integerList
-- GHCi> quickCheck prop_maximum
-- Other solution for "maximum":
maximum' :: [Integer] -> Integer -- Primitive recursion:
maximum' [] = error "Empty list."
maximum' [ integer ] = integer
maximum' ( integer : remainingIntegers )
= integer `max` (maximum' remainingIntegers)
prop_maximum' :: [Integer] -> Property
prop_maximum' integerList
= integerList /= []
==> maximum' integerList == Data.List.maximum integerList
-- GHCi> quickCheck prop_maximum'
{- GHCi>
quickCheck prop_minimum
quickCheck prop_minimum'
quickCheck prop_maximum
quickCheck prop_maximum'
-}
| pascal-knodel/haskell-craft | _/links/E'7'12.hs | mit | 2,345 | 0 | 9 | 446 | 591 | 325 | 266 | -1 | -1 |
module UndefinedConstructor where
data Tree a = Bin (Tree a) (Tree a)
| Leaf a
main :: Tree Int
main = Binn (Leaf 3) (Leaf 5)
| roberth/uu-helium | test/staticerrors/UndefinedConstructor.hs | gpl-3.0 | 165 | 0 | 8 | 65 | 63 | 34 | 29 | 5 | 1 |
module ListWatched where
import qualified Github.Repos.Watching as Github
import Data.List (intercalate)
import Data.Maybe (fromMaybe)
main = do
possibleRepos <- Github.reposWatchedBy "mike-burns"
putStrLn $ either (("Error: "++) . show)
(intercalate "\n\n" . map formatRepo)
possibleRepos
formatRepo repo =
(Github.repoName repo) ++ "\t" ++
(fromMaybe "" $ Github.repoDescription repo) ++ "\n" ++
(Github.repoHtmlUrl repo) ++ "\n" ++
(fromMaybe "" $ Github.repoCloneUrl repo) ++ "\t" ++
(formatDate $ Github.repoUpdatedAt repo) ++ "\n" ++
formatLanguage (Github.repoLanguage repo) ++
"watchers: " ++ (show $ Github.repoWatchers repo) ++ "\t" ++
"forks: " ++ (show $ Github.repoForks repo)
formatDate (Just date) = show . Github.fromDate $ date
formatDate Nothing = ""
formatLanguage (Just language) = "language: " ++ language ++ "\t"
formatLanguage Nothing = ""
| jwiegley/github | samples/Repos/Watching/ListWatched.hs | bsd-3-clause | 942 | 0 | 22 | 197 | 311 | 159 | 152 | 22 | 1 |
{-|
Module : Idris.Primitives
Description : Provision of primitive data types.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards, RankNTypes, ScopedTypeVariables #-}
module Idris.Primitives(primitives, Prim(..)) where
import Idris.AbsSyntax
import Idris.Core.Evaluate
import Idris.Core.TT
import IRTS.Lang
import Data.Bits
import Data.Char
import Data.Function (on)
import Data.Int
import qualified Data.Vector.Unboxed as V
import Data.Word
import Debug.Trace
data Prim = Prim { p_name :: Name,
p_type :: Type,
p_arity :: Int,
p_def :: [Const] -> Maybe Const,
p_lexp :: (Int, PrimFn),
p_total :: Totality
}
ty :: [Const] -> Const -> Type
ty [] x = Constant x
ty (t:ts) x = Bind (sMN 0 "T") (Pi Nothing (Constant t) (TType (UVar [] (-3)))) (ty ts x)
total, partial, iopartial :: Totality
total = Total []
partial = Partial NotCovering
iopartial = Partial ExternalIO
primitives :: [Prim]
primitives =
-- operators
[iCoerce (ITFixed IT8) (ITFixed IT16) "zext" zext LZExt,
iCoerce (ITFixed IT8) (ITFixed IT32) "zext" zext LZExt,
iCoerce (ITFixed IT8) (ITFixed IT64) "zext" zext LZExt,
iCoerce (ITFixed IT8) ITBig "zext" zext LZExt,
iCoerce (ITFixed IT8) ITNative "zext" zext LZExt,
iCoerce (ITFixed IT16) (ITFixed IT32) "zext" zext LZExt,
iCoerce (ITFixed IT16) (ITFixed IT64) "zext" zext LZExt,
iCoerce (ITFixed IT16) ITBig "zext" zext LZExt,
iCoerce (ITFixed IT16) ITNative "zext" zext LZExt,
iCoerce (ITFixed IT32) (ITFixed IT64) "zext" zext LZExt,
iCoerce (ITFixed IT32) ITBig "zext" zext LZExt,
iCoerce (ITFixed IT32) ITNative "zext" zext LZExt,
iCoerce (ITFixed IT64) ITBig "zext" zext LZExt,
iCoerce ITNative ITBig "zext" zext LZExt,
iCoerce ITNative (ITFixed IT64) "zext" zext LZExt,
iCoerce ITNative (ITFixed IT32) "zext" zext LZExt,
iCoerce ITNative (ITFixed IT16) "zext" zext LZExt,
iCoerce ITChar ITBig "zext" zext LZExt,
iCoerce (ITFixed IT8) (ITFixed IT16) "sext" sext LSExt,
iCoerce (ITFixed IT8) (ITFixed IT32) "sext" sext LSExt,
iCoerce (ITFixed IT8) (ITFixed IT64) "sext" sext LSExt,
iCoerce (ITFixed IT8) ITBig "sext" sext LSExt,
iCoerce (ITFixed IT8) ITNative "sext" sext LSExt,
iCoerce (ITFixed IT16) (ITFixed IT32) "sext" sext LSExt,
iCoerce (ITFixed IT16) (ITFixed IT64) "sext" sext LSExt,
iCoerce (ITFixed IT16) ITBig "sext" sext LSExt,
iCoerce (ITFixed IT16) ITNative "sext" sext LSExt,
iCoerce (ITFixed IT32) (ITFixed IT64) "sext" sext LSExt,
iCoerce (ITFixed IT32) ITBig "sext" sext LSExt,
iCoerce (ITFixed IT32) ITNative "sext" sext LSExt,
iCoerce (ITFixed IT64) ITBig "sext" sext LSExt,
iCoerce ITNative ITBig "sext" sext LSExt,
iCoerce ITNative ITBig "sext" sext LSExt,
iCoerce ITNative (ITFixed IT64) "sext" sext LSExt,
iCoerce ITNative (ITFixed IT32) "sext" sext LSExt,
iCoerce ITNative (ITFixed IT16) "sext" sext LSExt,
iCoerce ITChar ITBig "sext" sext LSExt,
iCoerce (ITFixed IT16) (ITFixed IT8) "trunc" trunc LTrunc,
iCoerce (ITFixed IT32) (ITFixed IT8) "trunc" trunc LTrunc,
iCoerce (ITFixed IT64) (ITFixed IT8) "trunc" trunc LTrunc,
iCoerce ITBig (ITFixed IT8) "trunc" trunc LTrunc,
iCoerce ITNative (ITFixed IT8) "trunc" trunc LTrunc,
iCoerce (ITFixed IT32) (ITFixed IT16) "trunc" trunc LTrunc,
iCoerce (ITFixed IT64) (ITFixed IT16) "trunc" trunc LTrunc,
iCoerce ITBig (ITFixed IT16) "trunc" trunc LTrunc,
iCoerce ITNative (ITFixed IT16) "trunc" trunc LTrunc,
iCoerce (ITFixed IT64) (ITFixed IT32) "trunc" trunc LTrunc,
iCoerce ITBig (ITFixed IT32) "trunc" trunc LTrunc,
iCoerce ITNative (ITFixed IT32) "trunc" trunc LTrunc,
iCoerce ITBig (ITFixed IT64) "trunc" trunc LTrunc,
iCoerce (ITFixed IT16) ITNative "trunc" trunc LTrunc,
iCoerce (ITFixed IT32) ITNative "trunc" trunc LTrunc,
iCoerce (ITFixed IT64) ITNative "trunc" trunc LTrunc,
iCoerce ITBig ITNative "trunc" trunc LTrunc,
iCoerce ITNative (ITFixed IT64) "trunc" trunc LTrunc,
iCoerce ITBig ITChar "trunc" trunc LTrunc,
Prim (sUN "prim__addFloat") (ty [(AType ATFloat), (AType ATFloat)] (AType ATFloat)) 2 (fBin (+))
(2, LPlus ATFloat) total,
Prim (sUN "prim__subFloat") (ty [(AType ATFloat), (AType ATFloat)] (AType ATFloat)) 2 (fBin (-))
(2, LMinus ATFloat) total,
Prim (sUN "prim__mulFloat") (ty [(AType ATFloat), (AType ATFloat)] (AType ATFloat)) 2 (fBin (*))
(2, LTimes ATFloat) total,
Prim (sUN "prim__divFloat") (ty [(AType ATFloat), (AType ATFloat)] (AType ATFloat)) 2 (fBin (/))
(2, LSDiv ATFloat) total,
Prim (sUN "prim__eqFloat") (ty [(AType ATFloat), (AType ATFloat)] (AType (ATInt ITNative))) 2 (bfBin (==))
(2, LEq ATFloat) total,
Prim (sUN "prim__sltFloat") (ty [(AType ATFloat), (AType ATFloat)] (AType (ATInt ITNative))) 2 (bfBin (<))
(2, LSLt ATFloat) total,
Prim (sUN "prim__slteFloat") (ty [(AType ATFloat), (AType ATFloat)] (AType (ATInt ITNative))) 2 (bfBin (<=))
(2, LSLe ATFloat) total,
Prim (sUN "prim__sgtFloat") (ty [(AType ATFloat), (AType ATFloat)] (AType (ATInt ITNative))) 2 (bfBin (>))
(2, LSGt ATFloat) total,
Prim (sUN "prim__sgteFloat") (ty [(AType ATFloat), (AType ATFloat)] (AType (ATInt ITNative))) 2 (bfBin (>=))
(2, LSGe ATFloat) total,
Prim (sUN "prim__concat") (ty [StrType, StrType] StrType) 2 (sBin (++))
(2, LStrConcat) total,
Prim (sUN "prim__eqString") (ty [StrType, StrType] (AType (ATInt ITNative))) 2 (bsBin (==))
(2, LStrEq) total,
Prim (sUN "prim__ltString") (ty [StrType, StrType] (AType (ATInt ITNative))) 2 (bsBin (<))
(2, LStrLt) total,
Prim (sUN "prim_lenString") (ty [StrType] (AType (ATInt ITNative))) 1 (p_strLen)
(1, LStrLen) total,
-- Conversions
Prim (sUN "prim__charToInt") (ty [(AType (ATInt ITChar))] (AType (ATInt ITNative))) 1 (c_charToInt)
(1, LChInt ITNative) total,
Prim (sUN "prim__intToChar") (ty [(AType (ATInt ITNative))] (AType (ATInt ITChar))) 1 (c_intToChar)
(1, LIntCh ITNative) partial,
Prim (sUN "prim__strToFloat") (ty [StrType] (AType ATFloat)) 1 (c_strToFloat)
(1, LStrFloat) total,
Prim (sUN "prim__floatToStr") (ty [(AType ATFloat)] StrType) 1 (c_floatToStr)
(1, LFloatStr) total,
Prim (sUN "prim__floatExp") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatExp)
(1, LFExp) total,
Prim (sUN "prim__floatLog") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatLog)
(1, LFLog) total,
Prim (sUN "prim__floatSin") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatSin)
(1, LFSin) total,
Prim (sUN "prim__floatCos") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatCos)
(1, LFCos) total,
Prim (sUN "prim__floatTan") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatTan)
(1, LFTan) total,
Prim (sUN "prim__floatASin") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatASin)
(1, LFASin) total,
Prim (sUN "prim__floatACos") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatACos)
(1, LFACos) total,
Prim (sUN "prim__floatATan") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatATan)
(1, LFATan) total,
Prim (sUN "prim__floatSqrt") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatSqrt)
(1, LFSqrt) total,
Prim (sUN "prim__floatFloor") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatFloor)
(1, LFFloor) total,
Prim (sUN "prim__floatCeil") (ty [(AType ATFloat)] (AType ATFloat)) 1 (p_floatCeil)
(1, LFCeil) total,
Prim (sUN "prim__negFloat") (ty [(AType ATFloat)] (AType ATFloat)) 1 (c_negFloat)
(1, LFNegate) total,
Prim (sUN "prim__strHead") (ty [StrType] (AType (ATInt ITChar))) 1 (p_strHead)
(1, LStrHead) partial,
Prim (sUN "prim__strTail") (ty [StrType] StrType) 1 (p_strTail)
(1, LStrTail) partial,
Prim (sUN "prim__strCons") (ty [(AType (ATInt ITChar)), StrType] StrType) 2 (p_strCons)
(2, LStrCons) total,
Prim (sUN "prim__strIndex") (ty [StrType, (AType (ATInt ITNative))] (AType (ATInt ITChar))) 2 (p_strIndex)
(2, LStrIndex) partial,
Prim (sUN "prim__strRev") (ty [StrType] StrType) 1 (p_strRev)
(1, LStrRev) total,
Prim (sUN "prim__strSubstr") (ty [AType (ATInt ITNative), AType (ATInt ITNative), StrType] StrType) 3 (p_strSubstr)
(3, LStrSubstr) total,
Prim (sUN "prim__readString") (ty [WorldType] StrType) 1 (p_cantreduce)
(1, LReadStr) total, -- total is okay, because we have 'WorldType'
Prim (sUN "prim__writeString") (ty [WorldType,StrType] (AType (ATInt ITNative))) 2 (p_cantreduce)
(2, LWriteStr) total,
Prim (sUN "prim__systemInfo") (ty [AType (ATInt ITNative)] StrType) 1 (p_cantreduce)
(1, LSystemInfo) total
] ++ concatMap intOps [ITFixed IT8, ITFixed IT16, ITFixed IT32, ITFixed IT64, ITBig, ITNative, ITChar]
intOps :: IntTy -> [Prim]
intOps ity = intCmps ity ++ intArith ity ++ intConv ity
intSCmps :: IntTy -> [Prim]
intSCmps ity =
[ iCmp ity "slt" False (bCmp ity (sCmpOp ity (<))) (LSLt . ATInt) total
, iCmp ity "slte" False (bCmp ity (sCmpOp ity (<=))) (LSLe . ATInt) total
, iCmp ity "eq" False (bCmp ity (==)) (LEq . ATInt) total
, iCmp ity "sgte" False (bCmp ity (sCmpOp ity (>=))) (LSGe . ATInt) total
, iCmp ity "sgt" False (bCmp ity (sCmpOp ity (>))) (LSGt . ATInt) total
]
intCmps :: IntTy -> [Prim]
intCmps ITNative = intSCmps ITNative
intCmps ity =
intSCmps ity ++
[ iCmp ity "lt" False (bCmp ity (cmpOp ity (<))) LLt total
, iCmp ity "lte" False (bCmp ity (cmpOp ity (<=))) LLe total
, iCmp ity "gte" False (bCmp ity (cmpOp ity (>=))) LGe total
, iCmp ity "gt" False (bCmp ity (cmpOp ity (>))) LGt total
]
intArith :: IntTy -> [Prim]
intArith ity =
[ iBinOp ity "add" (bitBin ity (+)) (LPlus . ATInt) total
, iBinOp ity "sub" (bitBin ity (-)) (LMinus . ATInt) total
, iBinOp ity "mul" (bitBin ity (*)) (LTimes . ATInt) total
, iBinOp ity "udiv" (bitBin ity div) LUDiv partial
, iBinOp ity "sdiv" (bsdiv ity) (LSDiv . ATInt) partial
, iBinOp ity "urem" (bitBin ity rem) LURem partial
, iBinOp ity "srem" (bsrem ity) (LSRem . ATInt) partial
, iBinOp ity "shl" (bitBin ity (\x y -> shiftL x (fromIntegral y))) LSHL total
, iBinOp ity "lshr" (bitBin ity (\x y -> shiftR x (fromIntegral y))) LLSHR total
, iBinOp ity "ashr" (bashr ity) LASHR total
, iBinOp ity "and" (bitBin ity (.&.)) LAnd total
, iBinOp ity "or" (bitBin ity (.|.)) LOr total
, iBinOp ity "xor" (bitBin ity (xor)) LXOr total
, iUnOp ity "compl" (bUn ity complement) LCompl total
]
intConv :: IntTy -> [Prim]
intConv ity =
[ Prim (sUN $ "prim__toStr" ++ intTyName ity) (ty [AType . ATInt $ ity] StrType) 1 intToStr
(1, LIntStr ity) total
, Prim (sUN $ "prim__fromStr" ++ intTyName ity) (ty [StrType] (AType . ATInt $ ity)) 1 (strToInt ity)
(1, LStrInt ity) total
, Prim (sUN $ "prim__toFloat" ++ intTyName ity) (ty [AType . ATInt $ ity] (AType ATFloat)) 1 intToFloat
(1, LIntFloat ity) total
, Prim (sUN $ "prim__fromFloat" ++ intTyName ity) (ty [AType ATFloat] (AType . ATInt $ ity)) 1 (floatToInt ity)
(1, LFloatInt ity) total
]
bitcastPrim :: ArithTy -> ArithTy -> (ArithTy -> [Const] -> Maybe Const) -> PrimFn -> Prim
bitcastPrim from to impl prim =
Prim (sUN $ "prim__bitcast" ++ aTyName from ++ "_" ++ aTyName to) (ty [AType from] (AType to)) 1 (impl to)
(1, prim) total
concatWord8 :: (Word8, Word8) -> Word16
concatWord8 (high, low) = fromIntegral high .|. (fromIntegral low `shiftL` 8)
concatWord16 :: (Word16, Word16) -> Word32
concatWord16 (high, low) = fromIntegral high .|. (fromIntegral low `shiftL` 16)
concatWord32 :: (Word32, Word32) -> Word64
concatWord32 (high, low) = fromIntegral high .|. (fromIntegral low `shiftL` 32)
truncWord16 :: Bool -> Word16 -> Word8
truncWord16 True x = fromIntegral (x `shiftR` 8)
truncWord16 False x = fromIntegral x
truncWord32 :: Bool -> Word32 -> Word16
truncWord32 True x = fromIntegral (x `shiftR` 16)
truncWord32 False x = fromIntegral x
truncWord64 :: Bool -> Word64 -> Word32
truncWord64 True x = fromIntegral (x `shiftR` 32)
truncWord64 False x = fromIntegral x
aTyName :: ArithTy -> String
aTyName (ATInt t) = intTyName t
aTyName ATFloat = "Float"
iCmp :: IntTy -> String -> Bool -> ([Const] -> Maybe Const) -> (IntTy -> PrimFn) -> Totality -> Prim
iCmp ity op self impl irop totality
= Prim (sUN $ "prim__" ++ op ++ intTyName ity)
(ty (replicate 2 . AType . ATInt $ ity) (AType (ATInt (if self then ity else ITNative))))
2 impl (2, irop ity) totality
iBinOp, iUnOp :: IntTy -> String -> ([Const] -> Maybe Const) -> (IntTy -> PrimFn) -> Totality -> Prim
iBinOp ity op impl irop totality
= Prim (sUN $ "prim__" ++ op ++ intTyName ity)
(ty (replicate 2 . AType . ATInt $ ity) (AType . ATInt $ ity))
2 impl (2, irop ity) totality
iUnOp ity op impl irop totality
= Prim (sUN $ "prim__" ++ op ++ intTyName ity)
(ty [AType . ATInt $ ity] (AType . ATInt $ ity))
1 impl (1, irop ity) totality
iCoerce :: IntTy -> IntTy -> String -> (IntTy -> IntTy -> [Const] -> Maybe Const) -> (IntTy -> IntTy -> PrimFn) -> Prim
iCoerce from to op impl irop =
Prim (sUN $ "prim__" ++ op ++ intTyName from ++ "_" ++ intTyName to)
(ty [AType . ATInt $ from] (AType . ATInt $ to)) 1 (impl from to) (1, irop from to) total
fBin :: (Double -> Double -> Double) -> [Const] -> Maybe Const
fBin op [Fl x, Fl y] = Just $ Fl (op x y)
fBin _ _ = Nothing
bfBin :: (Double -> Double -> Bool) -> [Const] -> Maybe Const
bfBin op [Fl x, Fl y] = let i = (if op x y then 1 else 0) in
Just $ I i
bfBin _ _ = Nothing
bcBin :: (Char -> Char -> Bool) -> [Const] -> Maybe Const
bcBin op [Ch x, Ch y] = let i = (if op x y then 1 else 0) in
Just $ I i
bcBin _ _ = Nothing
bsBin :: (String -> String -> Bool) -> [Const] -> Maybe Const
bsBin op [Str x, Str y]
= let i = (if op x y then 1 else 0) in
Just $ I i
bsBin _ _ = Nothing
sBin :: (String -> String -> String) -> [Const] -> Maybe Const
sBin op [Str x, Str y] = Just $ Str (op x y)
sBin _ _ = Nothing
bsrem :: IntTy -> [Const] -> Maybe Const
bsrem ITBig [BI x, BI y] = Just . BI $ x `rem` y
bsrem (ITFixed IT8) [B8 x, B8 y]
= Just $ B8 (fromIntegral (fromIntegral x `rem` fromIntegral y :: Int8))
bsrem (ITFixed IT16) [B16 x, B16 y]
= Just $ B16 (fromIntegral (fromIntegral x `rem` fromIntegral y :: Int16))
bsrem (ITFixed IT32) [B32 x, B32 y]
= Just $ B32 (fromIntegral (fromIntegral x `rem` fromIntegral y :: Int32))
bsrem (ITFixed IT64) [B64 x, B64 y]
= Just $ B64 (fromIntegral (fromIntegral x `rem` fromIntegral y :: Int64))
bsrem ITNative [I x, I y] = Just $ I (x `rem` y)
bsrem ITChar [Ch x, Ch y] = Just $ Ch (chr $ (ord x) `rem` (ord y))
bsrem _ _ = Nothing
bsdiv :: IntTy -> [Const] -> Maybe Const
bsdiv ITBig [BI x, BI y] = Just . BI $ x `div` y
bsdiv (ITFixed IT8) [B8 x, B8 y]
= Just $ B8 (fromIntegral (fromIntegral x `div` fromIntegral y :: Int8))
bsdiv (ITFixed IT16) [B16 x, B16 y]
= Just $ B16 (fromIntegral (fromIntegral x `div` fromIntegral y :: Int16))
bsdiv (ITFixed IT32) [B32 x, B32 y]
= Just $ B32 (fromIntegral (fromIntegral x `div` fromIntegral y :: Int32))
bsdiv (ITFixed IT64) [B64 x, B64 y]
= Just $ B64 (fromIntegral (fromIntegral x `div` fromIntegral y :: Int64))
bsdiv ITNative [I x, I y] = Just $ I (x `div` y)
bsdiv ITChar [Ch x, Ch y] = Just $ Ch (chr $ (ord x) `div` (ord y))
bsdiv _ _ = Nothing
bashr :: IntTy -> [Const] -> Maybe Const
bashr ITBig [BI x, BI y] = Just $ BI (x `shiftR` fromIntegral y)
bashr (ITFixed IT8) [B8 x, B8 y]
= Just $ B8 (fromIntegral (fromIntegral x `shiftR` fromIntegral y :: Int8))
bashr (ITFixed IT16) [B16 x, B16 y]
= Just $ B16 (fromIntegral (fromIntegral x `shiftR` fromIntegral y :: Int16))
bashr (ITFixed IT32) [B32 x, B32 y]
= Just $ B32 (fromIntegral (fromIntegral x `shiftR` fromIntegral y :: Int32))
bashr (ITFixed IT64) [B64 x, B64 y]
= Just $ B64 (fromIntegral (fromIntegral x `shiftR` fromIntegral y :: Int64))
bashr ITNative [I x, I y] = Just $ I (x `shiftR` y)
bashr ITChar [Ch x, Ch y] = Just $ Ch (chr $ (ord x) `shiftR` (ord y))
bashr _ _ = Nothing
bUn :: IntTy -> (forall a. Bits a => a -> a) -> [Const] -> Maybe Const
bUn (ITFixed IT8) op [B8 x] = Just $ B8 (op x)
bUn (ITFixed IT16) op [B16 x] = Just $ B16 (op x)
bUn (ITFixed IT32) op [B32 x] = Just $ B32 (op x)
bUn (ITFixed IT64) op [B64 x] = Just $ B64 (op x)
bUn ITBig op [BI x] = Just $ BI (op x)
bUn ITNative op [I x] = Just $ I (op x)
bUn ITChar op [Ch x] = Just $ Ch (chr $ op (ord x))
bUn _ _ _ = Nothing
bitBin :: IntTy -> (forall a. (Bits a, Integral a) => a -> a -> a) -> [Const] -> Maybe Const
bitBin (ITFixed IT8) op [B8 x, B8 y] = Just $ B8 (op x y)
bitBin (ITFixed IT16) op [B16 x, B16 y] = Just $ B16 (op x y)
bitBin (ITFixed IT32) op [B32 x, B32 y] = Just $ B32 (op x y)
bitBin (ITFixed IT64) op [B64 x, B64 y] = Just $ B64 (op x y)
bitBin ITBig op [BI x, BI y] = Just $ BI (op x y)
bitBin ITNative op [I x, I y] = Just $ I (op x y)
bitBin ITChar op [Ch x, Ch y] = Just $ Ch (chr $ op (ord x) (ord y))
bitBin _ _ _ = Nothing
bCmp :: IntTy -> (forall a. (Integral a, Ord a) => a -> a -> Bool) -> [Const] -> Maybe Const
bCmp (ITFixed IT8) op [B8 x, B8 y] = Just $ I (if (op x y) then 1 else 0)
bCmp (ITFixed IT16) op [B16 x, B16 y] = Just $ I (if (op x y) then 1 else 0)
bCmp (ITFixed IT32) op [B32 x, B32 y] = Just $ I (if (op x y) then 1 else 0)
bCmp (ITFixed IT64) op [B64 x, B64 y] = Just $ I (if (op x y) then 1 else 0)
bCmp ITBig op [BI x, BI y] = Just $ I (if (op x y) then 1 else 0)
bCmp ITNative op [I x, I y] = Just $ I (if (op x y) then 1 else 0)
bCmp ITChar op [Ch x, Ch y] = Just $ I (if (op (ord x) (ord y)) then 1 else 0)
bCmp _ _ _ = Nothing
cmpOp :: (Ord a, Integral a) => IntTy -> (forall b. Ord b => b -> b -> Bool) -> a -> a -> Bool
cmpOp (ITFixed _) f = f
cmpOp (ITNative) f = f `on` (fromIntegral :: Integral a => a -> Word)
cmpOp (ITChar) f = f `on` ((fromIntegral :: Integral a => a -> Word))
cmpOp _ f = let xor = (/=) in (\ x y -> (f x y) `xor` (x < 0) `xor` (y < 0))
sCmpOp :: (Ord a, Integral a) => IntTy -> (forall b. Ord b => b -> b -> Bool) -> a -> a -> Bool
sCmpOp (ITFixed IT8) f = f `on` (fromIntegral :: Integral a => a -> Int8)
sCmpOp (ITFixed IT16) f = f `on` (fromIntegral :: Integral a => a -> Int16)
sCmpOp (ITFixed IT32) f = f `on` (fromIntegral :: Integral a => a -> Int32)
sCmpOp (ITFixed IT64) f = f `on` (fromIntegral :: Integral a => a -> Int64)
sCmpOp _ f = f
toInt :: Integral a => IntTy -> a -> Const
toInt (ITFixed IT8) x = B8 (fromIntegral x)
toInt (ITFixed IT16) x = B16 (fromIntegral x)
toInt (ITFixed IT32) x = B32 (fromIntegral x)
toInt (ITFixed IT64) x = B64 (fromIntegral x)
toInt ITBig x = BI (fromIntegral x)
toInt ITNative x = I (fromIntegral x)
toInt ITChar x = Ch (chr $ fromIntegral x)
intToInt :: IntTy -> IntTy -> [Const] -> Maybe Const
intToInt (ITFixed IT8) out [B8 x] = Just $ toInt out x
intToInt (ITFixed IT16) out [B16 x] = Just $ toInt out x
intToInt (ITFixed IT32) out [B32 x] = Just $ toInt out x
intToInt (ITFixed IT64) out [B64 x] = Just $ toInt out x
intToInt ITBig out [BI x] = Just $ toInt out x
intToInt ITNative out [I x] = Just $ toInt out x
intToInt ITChar out [Ch x] = Just $ toInt out (ord x)
intToInt _ _ _ = Nothing
zext :: IntTy -> IntTy -> [Const] -> Maybe Const
zext from ITBig val = intToInt from ITBig val
zext ITBig _ _ = Nothing
zext f@(ITFixed from) t@(ITFixed to) val
| nativeTyWidth from < nativeTyWidth to = intToInt f t val
zext ITNative to [I x] = Just $ toInt to (fromIntegral x :: Word)
zext from ITNative val = intToInt from ITNative val
zext _ _ _ = Nothing
sext :: IntTy -> IntTy -> [Const] -> Maybe Const
sext (ITFixed IT8) out [B8 x] = Just $ toInt out (fromIntegral x :: Int8)
sext (ITFixed IT16) out [B16 x] = Just $ toInt out (fromIntegral x :: Int16)
sext (ITFixed IT32) out [B32 x] = Just $ toInt out (fromIntegral x :: Int32)
sext (ITFixed IT64) out [B64 x] = Just $ toInt out (fromIntegral x :: Int64)
sext ITBig _ _ = Nothing
sext from to val = intToInt from to val
trunc :: IntTy -> IntTy -> [Const] -> Maybe Const
trunc ITBig to val = intToInt ITBig to val
trunc _ ITBig _ = Nothing
trunc f@(ITFixed from) t@(ITFixed to) val | nativeTyWidth from > nativeTyWidth to = intToInt f t val
trunc ITNative to [I x] = Just $ toInt to x
trunc from ITNative val = intToInt from ITNative val
trunc _ _ _ = Nothing
intToStr :: [Const] -> Maybe Const
intToStr val | [i] <- getInt val = Just $ Str (show i)
intToStr _ = Nothing
getInt :: [Const] -> [Integer]
getInt (B8 x : xs) = toInteger x : getInt xs
getInt (B16 x : xs) = toInteger x : getInt xs
getInt (B32 x : xs) = toInteger x : getInt xs
getInt (B64 x : xs) = toInteger x : getInt xs
getInt (I x : xs) = toInteger x : getInt xs
getInt (BI x : xs) = x : getInt xs
getInt _ = []
strToInt :: IntTy -> [Const] -> Maybe Const
strToInt ity [Str x] = case reads x of
[(n,"")] -> Just $ toInt ity (n :: Integer)
_ -> Just $ I 0
strToInt _ _ = Nothing
intToFloat :: [Const] -> Maybe Const
intToFloat val | [i] <- getInt val = Just $ Fl (fromIntegral i)
intToFloat _ = Nothing
floatToInt :: IntTy -> [Const] -> Maybe Const
floatToInt ity [Fl x] = Just $ toInt ity (truncate x :: Integer)
floatToInt _ _ = Nothing
c_intToChar, c_charToInt :: [Const] -> Maybe Const
c_intToChar [(I x)] = Just . Ch . toEnum $ x
c_intToChar _ = Nothing
c_charToInt [(Ch x)] = Just . I . fromEnum $ x
c_charToInt _ = Nothing
c_negFloat :: [Const] -> Maybe Const
c_negFloat [Fl x] = Just $ Fl (negate x)
c_negFloat _ = Nothing
c_floatToStr :: [Const] -> Maybe Const
c_floatToStr [Fl x] = Just $ Str (show x)
c_floatToStr _ = Nothing
c_strToFloat [Str x] = case reads x of
[(n,"")] -> Just $ Fl n
_ -> Just $ Fl 0
c_strToFloat _ = Nothing
p_fPrim :: (Double -> Double) -> [Const] -> Maybe Const
p_fPrim f [Fl x] = Just $ Fl (f x)
p_fPrim f _ = Nothing
p_floatExp, p_floatLog, p_floatSin, p_floatCos, p_floatTan, p_floatASin, p_floatACos, p_floatATan, p_floatSqrt, p_floatFloor, p_floatCeil :: [Const] -> Maybe Const
p_floatExp = p_fPrim exp
p_floatLog = p_fPrim log
p_floatSin = p_fPrim sin
p_floatCos = p_fPrim cos
p_floatTan = p_fPrim tan
p_floatASin = p_fPrim asin
p_floatACos = p_fPrim acos
p_floatATan = p_fPrim atan
p_floatSqrt = p_fPrim sqrt
p_floatFloor = p_fPrim (fromInteger . floor)
p_floatCeil = p_fPrim (fromInteger . ceiling)
p_strLen, p_strHead, p_strTail, p_strIndex, p_strCons, p_strRev, p_strSubstr :: [Const] -> Maybe Const
p_strLen [Str xs] = Just $ I (length xs)
p_strLen _ = Nothing
p_strHead [Str (x:xs)] = Just $ Ch x
p_strHead _ = Nothing
p_strTail [Str (x:xs)] = Just $ Str xs
p_strTail _ = Nothing
p_strIndex [Str xs, I i]
| i < length xs = Just $ Ch (xs!!i)
p_strIndex _ = Nothing
p_strCons [Ch x, Str xs] = Just $ Str (x:xs)
p_strCons _ = Nothing
p_strRev [Str xs] = Just $ Str (reverse xs)
p_strRev _ = Nothing
p_strSubstr [I offset, I length, Str input] = Just $ Str (take length (drop offset input))
p_strSubstr _ = Nothing
p_cantreduce :: a -> Maybe b
p_cantreduce _ = Nothing
| ben-schulz/Idris-dev | src/Idris/Primitives.hs | bsd-3-clause | 23,585 | 0 | 14 | 5,366 | 11,370 | 5,892 | 5,478 | 449 | 8 |
module Hans.IP4.Dhcp.Options where
import Hans.IP4.Dhcp.Codec
import Hans.IP4.Packet (IP4,IP4Mask)
import qualified Control.Applicative as A
import Control.Monad (unless)
import Data.Maybe (fromMaybe)
import Data.Foldable (traverse_)
import qualified Data.Traversable as T
import Data.Word (Word8, Word16, Word32)
import Data.Serialize.Get
import Data.Serialize.Put
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8
import Numeric (showHex)
-----------------------------------------------------------------------
-- Magic constants ----------------------------------------------------
-----------------------------------------------------------------------
data MagicCookie = MagicCookie
dhcp4MagicCookie :: Word32
dhcp4MagicCookie = 0x63825363
instance CodecAtom MagicCookie where
getAtom = do cookie <- getAtom
unless (cookie == dhcp4MagicCookie)
(fail "Incorrect magic cookie.")
return MagicCookie
putAtom MagicCookie = putAtom dhcp4MagicCookie
atomSize MagicCookie = atomSize dhcp4MagicCookie
-----------------------------------------------------------------------
-- DHCP option type and operations ------------------------------------
-----------------------------------------------------------------------
data Dhcp4Option
= OptSubnetMask SubnetMask
| OptTimeOffset Word32
| OptRouters [IP4]
| OptTimeServers [IP4]
| OptIEN116NameServers [IP4]
| OptNameServers [IP4]
| OptLogServers [IP4]
| OptCookieServers [IP4]
| OptLPRServers [IP4]
| OptImpressServers [IP4]
| OptResourceLocationServers [IP4]
| OptHostName NVTAsciiString
| OptBootFileSize Word16
| OptMeritDumpFile NVTAsciiString
| OptDomainName NVTAsciiString
| OptSwapServer IP4
| OptRootPath NVTAsciiString
| OptExtensionsPath NVTAsciiString
| OptEnableIPForwarding Bool
| OptEnableNonLocalSourceRouting Bool
| OptPolicyFilters [IP4Mask]
| OptMaximumDatagramReassemblySize Word16
| OptDefaultTTL Word8
| OptPathMTUAgingTimeout Word32
| OptPathMTUPlateauTable [Word16]
| OptInterfaceMTU Word16
| OptAllSubnetsAreLocal Bool
| OptBroadcastAddress IP4
| OptPerformMaskDiscovery Bool
| OptShouldSupplyMasks Bool
| OptShouldPerformRouterDiscovery Bool
| OptRouterSolicitationAddress IP4
| OptStaticRoutes [(IP4,IP4)]
| OptShouldNegotiateArpTrailers Bool
| OptArpCacheTimeout Word32
| OptUseRFC1042EthernetEncapsulation Bool
| OptTcpDefaultTTL Word8
| OptTcpKeepaliveInterval Word32
| OptTcpKeepaliveUseGarbage Bool
| OptNisDomainName NVTAsciiString
| OptNisServers [IP4]
| OptNtpServers [IP4]
| OptVendorSpecific ByteString
| OptNetBiosNameServers [IP4]
| OptNetBiosDistributionServers [IP4]
| OptNetBiosNodeType NetBiosNodeType
| OptNetBiosScope NVTAsciiString
| OptXWindowsFontServer [IP4]
| OptXWindowsDisplayManagers [IP4]
| OptNisPlusDomain NVTAsciiString
| OptNisPlusServers [IP4]
| OptSmtpServers [IP4]
| OptPopServers [IP4]
| OptNntpServers [IP4]
| OptWwwServers [IP4]
| OptFingerServers [IP4]
| OptIrcServers [IP4]
| OptStreetTalkServers [IP4]
| OptStreetTalkDirectoryAssistanceServers [IP4]
| OptFQDN NVTAsciiString -- RFC 4702
| OptRequestIPAddress IP4
| OptIPAddressLeaseTime Word32
| OptOverload OverloadOption
| OptTftpServer NVTAsciiString
| OptBootfileName NVTAsciiString
| OptMessageType Dhcp4MessageType
| OptServerIdentifier IP4
| OptParameterRequestList [OptionTagOrError]
| OptErrorMessage NVTAsciiString
| OptMaxDHCPMessageSize Word16
| OptRenewalTime Word32
| OptRebindingTime Word32
| OptVendorClass NVTAsciiString
| OptClientIdentifier ByteString
| OptNetWareDomainName NVTAsciiString -- RFC 2242
| OptNetWareInfo ByteString -- RFC 2242
| OptAutoconfiguration Bool -- RFC 2563
deriving (Show,Eq)
getDhcp4Option :: Get (Either ControlTag Dhcp4Option)
getDhcp4Option = do
mb_tag <- getOptionTag
case mb_tag of
UnknownTag t -> do xs <- getBytes =<< remaining
fail ("getDhcp4Option failed tag (" ++ show t ++ ") " ++ show xs)
KnownTag tag -> do
let r con = (Right . con) `fmap` getOption
case tag of
OptTagPad -> A.pure (Left ControlPad)
OptTagEnd -> A.pure (Left ControlEnd)
OptTagSubnetMask -> r OptSubnetMask
OptTagTimeOffset -> r OptTimeOffset
OptTagRouters -> r OptRouters
OptTagTimeServers -> r OptTimeServers
OptTagIEN116NameServers -> r OptIEN116NameServers
OptTagNameServers -> r OptNameServers
OptTagLogServers -> r OptLogServers
OptTagCookieServers -> r OptCookieServers
OptTagLPRServers -> r OptLPRServers
OptTagImpressServers -> r OptImpressServers
OptTagResourceLocationServers -> r OptResourceLocationServers
OptTagHostName -> r OptHostName
OptTagBootFileSize -> r OptBootFileSize
OptTagMeritDumpFile -> r OptMeritDumpFile
OptTagDomainName -> r OptDomainName
OptTagSwapServer -> r OptSwapServer
OptTagRootPath -> r OptRootPath
OptTagExtensionsPath -> r OptExtensionsPath
OptTagEnableIPForwarding -> r OptEnableIPForwarding
OptTagEnableNonLocalSourceRouting -> r OptEnableNonLocalSourceRouting
OptTagPolicyFilters -> r OptPolicyFilters
OptTagMaximumDatagramReassemblySize -> r OptMaximumDatagramReassemblySize
OptTagDefaultTTL -> r OptDefaultTTL
OptTagPathMTUAgingTimeout -> r OptPathMTUAgingTimeout
OptTagPathMTUPlateauTable -> r OptPathMTUPlateauTable
OptTagInterfaceMTU -> r OptInterfaceMTU
OptTagAllSubnetsAreLocal -> r OptAllSubnetsAreLocal
OptTagBroadcastAddress -> r OptBroadcastAddress
OptTagPerformMaskDiscovery -> r OptPerformMaskDiscovery
OptTagShouldSupplyMasks -> r OptShouldSupplyMasks
OptTagShouldPerformRouterDiscovery -> r OptShouldPerformRouterDiscovery
OptTagRouterSolicitationAddress -> r OptRouterSolicitationAddress
OptTagStaticRoutes -> r OptStaticRoutes
OptTagShouldNegotiateArpTrailers -> r OptShouldNegotiateArpTrailers
OptTagArpCacheTimeout -> r OptArpCacheTimeout
OptTagUseRFC1042EthernetEncapsulation -> r OptUseRFC1042EthernetEncapsulation
OptTagTcpDefaultTTL -> r OptTcpDefaultTTL
OptTagTcpKeepaliveInterval -> r OptTcpKeepaliveInterval
OptTagTcpKeepaliveUseGarbage -> r OptTcpKeepaliveUseGarbage
OptTagNisDomainName -> r OptNisDomainName
OptTagNisServers -> r OptNisServers
OptTagNtpServers -> r OptNtpServers
OptTagVendorSpecific -> r OptVendorSpecific
OptTagNetBiosNameServers -> r OptNetBiosNameServers
OptTagNetBiosDistributionServers -> r OptNetBiosDistributionServers
OptTagNetBiosNodeType -> r OptNetBiosNodeType
OptTagNetBiosScope -> r OptNetBiosScope
OptTagXWindowsFontServer -> r OptXWindowsFontServer
OptTagXWindowsDisplayManagers -> r OptXWindowsDisplayManagers
OptTagNisPlusDomain -> r OptNisPlusDomain
OptTagNisPlusServers -> r OptNisPlusServers
OptTagSmtpServers -> r OptSmtpServers
OptTagPopServers -> r OptPopServers
OptTagNntpServers -> r OptNntpServers
OptTagWwwServers -> r OptWwwServers
OptTagFingerServers -> r OptFingerServers
OptTagIrcServers -> r OptIrcServers
OptTagStreetTalkServers -> r OptStreetTalkServers
OptTagStreetTalkDirectoryAssistanceServers -> r OptStreetTalkDirectoryAssistanceServers
OptTagFQDN -> r OptFQDN
OptTagRequestIPAddress -> r OptRequestIPAddress
OptTagIPAddressLeaseTime -> r OptIPAddressLeaseTime
OptTagOverload -> r OptOverload
OptTagTftpServer -> r OptTftpServer
OptTagBootfileName -> r OptBootfileName
OptTagMessageType -> r OptMessageType
OptTagServerIdentifier -> r OptServerIdentifier
OptTagParameterRequestList -> r OptParameterRequestList
OptTagErrorMessage -> r OptErrorMessage
OptTagMaxDHCPMessageSize -> r OptMaxDHCPMessageSize
OptTagRenewalTime -> r OptRenewalTime
OptTagRebindingTime -> r OptRebindingTime
OptTagVendorClass -> r OptVendorClass
OptTagClientIdentifier -> r OptClientIdentifier
OptTagNetWareDomainName -> r OptNetWareDomainName
OptTagNetWareInfo -> r OptNetWareInfo
OptTagAutoconfiguration -> r OptAutoconfiguration
putDhcp4Option :: Dhcp4Option -> Put
putDhcp4Option opt =
let p tag val = do putAtom (KnownTag tag); putOption val in
case opt of
OptSubnetMask mask -> p OptTagSubnetMask mask
OptTimeOffset offset -> p OptTagTimeOffset offset
OptRouters routers -> p OptTagRouters routers
OptTimeServers servers -> p OptTagTimeServers servers
OptIEN116NameServers servers -> p OptTagIEN116NameServers servers
OptNameServers servers -> p OptTagNameServers servers
OptLogServers servers -> p OptTagLogServers servers
OptCookieServers servers -> p OptTagCookieServers servers
OptLPRServers servers -> p OptTagLPRServers servers
OptImpressServers servers -> p OptTagImpressServers servers
OptResourceLocationServers servers -> p OptTagResourceLocationServers servers
OptHostName hostname -> p OptTagHostName hostname
OptBootFileSize sz -> p OptTagBootFileSize sz
OptMeritDumpFile file -> p OptTagMeritDumpFile file
OptDomainName domainname -> p OptTagDomainName domainname
OptSwapServer server -> p OptTagSwapServer server
OptRootPath path -> p OptTagRootPath path
OptExtensionsPath path -> p OptTagExtensionsPath path
OptEnableIPForwarding enabled -> p OptTagEnableIPForwarding enabled
OptEnableNonLocalSourceRouting enab -> p OptTagEnableNonLocalSourceRouting enab
OptPolicyFilters filters -> p OptTagPolicyFilters filters
OptMaximumDatagramReassemblySize n -> p OptTagMaximumDatagramReassemblySize n
OptDefaultTTL ttl -> p OptTagDefaultTTL ttl
OptPathMTUAgingTimeout timeout -> p OptTagPathMTUAgingTimeout timeout
OptPathMTUPlateauTable mtus -> p OptTagPathMTUPlateauTable mtus
OptInterfaceMTU mtu -> p OptTagInterfaceMTU mtu
OptAllSubnetsAreLocal arelocal -> p OptTagAllSubnetsAreLocal arelocal
OptBroadcastAddress addr -> p OptTagBroadcastAddress addr
OptPerformMaskDiscovery perform -> p OptTagPerformMaskDiscovery perform
OptShouldSupplyMasks should -> p OptTagShouldSupplyMasks should
OptShouldPerformRouterDiscovery b -> p OptTagShouldPerformRouterDiscovery b
OptRouterSolicitationAddress addr -> p OptTagRouterSolicitationAddress addr
OptStaticRoutes routes -> p OptTagStaticRoutes routes
OptShouldNegotiateArpTrailers b -> p OptTagShouldNegotiateArpTrailers b
OptArpCacheTimeout timeout -> p OptTagArpCacheTimeout timeout
OptUseRFC1042EthernetEncapsulation b-> p OptTagUseRFC1042EthernetEncapsulation b
OptTcpDefaultTTL ttl -> p OptTagTcpDefaultTTL ttl
OptTcpKeepaliveInterval interval -> p OptTagTcpKeepaliveInterval interval
OptTcpKeepaliveUseGarbage use -> p OptTagTcpKeepaliveUseGarbage use
OptNisDomainName domainname -> p OptTagNisDomainName domainname
OptNisServers servers -> p OptTagNisServers servers
OptNtpServers servers -> p OptTagNtpServers servers
OptVendorSpecific bs -> p OptTagVendorSpecific bs
OptNetBiosNameServers servers -> p OptTagNetBiosNameServers servers
OptNetBiosDistributionServers srvs -> p OptTagNetBiosDistributionServers srvs
OptNetBiosNodeType node -> p OptTagNetBiosNodeType node
OptNetBiosScope scope -> p OptTagNetBiosScope scope
OptXWindowsFontServer servers -> p OptTagXWindowsFontServer servers
OptXWindowsDisplayManagers servers -> p OptTagXWindowsDisplayManagers servers
OptNisPlusDomain domain -> p OptTagNisPlusDomain domain
OptNisPlusServers servers -> p OptTagNisPlusServers servers
OptSmtpServers servers -> p OptTagSmtpServers servers
OptPopServers servers -> p OptTagPopServers servers
OptNntpServers servers -> p OptTagNntpServers servers
OptWwwServers servers -> p OptTagWwwServers servers
OptFingerServers servers -> p OptTagFingerServers servers
OptIrcServers servers -> p OptTagIrcServers servers
OptStreetTalkServers servers -> p OptTagStreetTalkServers servers
OptStreetTalkDirectoryAssistanceServers servers -> p OptTagStreetTalkDirectoryAssistanceServers servers
OptFQDN fqdn -> p OptTagFQDN fqdn
OptRequestIPAddress addr -> p OptTagRequestIPAddress addr
OptIPAddressLeaseTime time -> p OptTagIPAddressLeaseTime time
OptOverload overload -> p OptTagOverload overload
OptTftpServer server -> p OptTagTftpServer server
OptBootfileName filename -> p OptTagBootfileName filename
OptMessageType t -> p OptTagMessageType t
OptServerIdentifier server -> p OptTagServerIdentifier server
OptParameterRequestList ps -> p OptTagParameterRequestList ps
OptErrorMessage msg -> p OptTagErrorMessage msg
OptMaxDHCPMessageSize maxsz -> p OptTagMaxDHCPMessageSize maxsz
OptRenewalTime time -> p OptTagRenewalTime time
OptRebindingTime time -> p OptTagRebindingTime time
OptVendorClass str -> p OptTagVendorClass str
OptClientIdentifier client -> p OptTagClientIdentifier client
OptNetWareDomainName name -> p OptTagNetWareDomainName name
OptNetWareInfo info -> p OptTagNetWareInfo info
OptAutoconfiguration autoconf -> p OptTagAutoconfiguration autoconf
-----------------------------------------------------------------------
-- Message Type type and operations -----------------------------------
-----------------------------------------------------------------------
data Dhcp4MessageType
= Dhcp4Discover
| Dhcp4Offer
| Dhcp4Request
| Dhcp4Decline
| Dhcp4Ack
| Dhcp4Nak
| Dhcp4Release
| Dhcp4Inform
deriving (Eq,Show)
instance Option Dhcp4MessageType where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance CodecAtom Dhcp4MessageType where
getAtom = do
b <- getAtom
case b :: Word8 of
1 -> return Dhcp4Discover
2 -> return Dhcp4Offer
3 -> return Dhcp4Request
4 -> return Dhcp4Decline
5 -> return Dhcp4Ack
6 -> return Dhcp4Nak
7 -> return Dhcp4Release
8 -> return Dhcp4Inform
_ -> fail ("Unknown DHCP Message Type 0x" ++ showHex b "")
putAtom t = putAtom $ case t of
Dhcp4Discover -> 1 :: Word8
Dhcp4Offer -> 2
Dhcp4Request -> 3
Dhcp4Decline -> 4
Dhcp4Ack -> 5
Dhcp4Nak -> 6
Dhcp4Release -> 7
Dhcp4Inform -> 8
atomSize _ = 1
-----------------------------------------------------------------------
-- Control tag type and operations ------------------------------------
-----------------------------------------------------------------------
data ControlTag
= ControlPad
| ControlEnd
deriving (Eq, Show)
putControlOption :: ControlTag -> Put
putControlOption opt = case opt of
ControlPad -> putAtom (KnownTag OptTagPad)
ControlEnd -> putAtom (KnownTag OptTagEnd)
-----------------------------------------------------------------------
-- Option tag type and operations -------------------------------------
-----------------------------------------------------------------------
data Dhcp4OptionTag
= OptTagPad
| OptTagEnd
| OptTagSubnetMask
| OptTagTimeOffset
| OptTagRouters
| OptTagTimeServers
| OptTagIEN116NameServers
| OptTagNameServers
| OptTagLogServers
| OptTagCookieServers
| OptTagLPRServers
| OptTagImpressServers
| OptTagResourceLocationServers
| OptTagHostName
| OptTagBootFileSize
| OptTagMeritDumpFile
| OptTagDomainName
| OptTagSwapServer
| OptTagRootPath
| OptTagExtensionsPath
| OptTagEnableIPForwarding
| OptTagEnableNonLocalSourceRouting
| OptTagPolicyFilters
| OptTagMaximumDatagramReassemblySize
| OptTagDefaultTTL
| OptTagPathMTUAgingTimeout
| OptTagPathMTUPlateauTable
| OptTagInterfaceMTU
| OptTagAllSubnetsAreLocal
| OptTagBroadcastAddress
| OptTagPerformMaskDiscovery
| OptTagShouldSupplyMasks
| OptTagShouldPerformRouterDiscovery
| OptTagRouterSolicitationAddress
| OptTagStaticRoutes
| OptTagShouldNegotiateArpTrailers
| OptTagArpCacheTimeout
| OptTagUseRFC1042EthernetEncapsulation
| OptTagTcpDefaultTTL
| OptTagTcpKeepaliveInterval
| OptTagTcpKeepaliveUseGarbage
| OptTagNisDomainName
| OptTagNisServers
| OptTagNtpServers
| OptTagVendorSpecific
| OptTagNetBiosNameServers
| OptTagNetBiosDistributionServers
| OptTagNetBiosNodeType
| OptTagNetBiosScope
| OptTagXWindowsFontServer
| OptTagXWindowsDisplayManagers
| OptTagNisPlusDomain
| OptTagNisPlusServers
| OptTagSmtpServers
| OptTagPopServers
| OptTagNntpServers
| OptTagWwwServers
| OptTagFingerServers
| OptTagIrcServers
| OptTagStreetTalkServers
| OptTagStreetTalkDirectoryAssistanceServers
| OptTagFQDN
| OptTagRequestIPAddress
| OptTagIPAddressLeaseTime
| OptTagOverload
| OptTagTftpServer
| OptTagBootfileName
| OptTagMessageType
| OptTagServerIdentifier
| OptTagParameterRequestList
| OptTagErrorMessage
| OptTagMaxDHCPMessageSize
| OptTagRenewalTime
| OptTagRebindingTime
| OptTagVendorClass
| OptTagClientIdentifier
| OptTagNetWareDomainName
| OptTagNetWareInfo
| OptTagAutoconfiguration
deriving (Show,Eq)
data OptionTagOrError = UnknownTag Word8 | KnownTag Dhcp4OptionTag
deriving (Show,Eq)
getOptionTag :: Get OptionTagOrError
getOptionTag = f =<< getWord8
where
r = return . KnownTag
f 0 = r OptTagPad
f 1 = r OptTagSubnetMask
f 2 = r OptTagTimeOffset
f 3 = r OptTagRouters
f 4 = r OptTagTimeServers
f 5 = r OptTagIEN116NameServers
f 6 = r OptTagNameServers
f 7 = r OptTagLogServers
f 8 = r OptTagCookieServers
f 9 = r OptTagLPRServers
f 10 = r OptTagImpressServers
f 11 = r OptTagResourceLocationServers
f 12 = r OptTagHostName
f 13 = r OptTagBootFileSize
f 14 = r OptTagMeritDumpFile
f 15 = r OptTagDomainName
f 16 = r OptTagSwapServer
f 17 = r OptTagRootPath
f 18 = r OptTagExtensionsPath
f 19 = r OptTagEnableIPForwarding
f 20 = r OptTagEnableNonLocalSourceRouting
f 21 = r OptTagPolicyFilters
f 22 = r OptTagMaximumDatagramReassemblySize
f 23 = r OptTagDefaultTTL
f 24 = r OptTagPathMTUAgingTimeout
f 25 = r OptTagPathMTUPlateauTable
f 26 = r OptTagInterfaceMTU
f 27 = r OptTagAllSubnetsAreLocal
f 28 = r OptTagBroadcastAddress
f 29 = r OptTagPerformMaskDiscovery
f 30 = r OptTagShouldSupplyMasks
f 31 = r OptTagShouldPerformRouterDiscovery
f 32 = r OptTagRouterSolicitationAddress
f 33 = r OptTagStaticRoutes
f 34 = r OptTagShouldNegotiateArpTrailers
f 35 = r OptTagArpCacheTimeout
f 36 = r OptTagUseRFC1042EthernetEncapsulation
f 37 = r OptTagTcpDefaultTTL
f 38 = r OptTagTcpKeepaliveInterval
f 39 = r OptTagTcpKeepaliveUseGarbage
f 40 = r OptTagNisDomainName
f 41 = r OptTagNisServers
f 42 = r OptTagNtpServers
f 43 = r OptTagVendorSpecific
f 44 = r OptTagNetBiosNameServers
f 45 = r OptTagNetBiosDistributionServers
f 46 = r OptTagNetBiosNodeType
f 47 = r OptTagNetBiosScope
f 48 = r OptTagXWindowsFontServer
f 49 = r OptTagXWindowsDisplayManagers
f 50 = r OptTagRequestIPAddress
f 51 = r OptTagIPAddressLeaseTime
f 52 = r OptTagOverload
f 53 = r OptTagMessageType
f 54 = r OptTagServerIdentifier
f 55 = r OptTagParameterRequestList
f 56 = r OptTagErrorMessage
f 57 = r OptTagMaxDHCPMessageSize
f 58 = r OptTagRenewalTime
f 59 = r OptTagRebindingTime
f 60 = r OptTagVendorClass
f 61 = r OptTagClientIdentifier
f 62 = r OptTagNetWareDomainName
f 63 = r OptTagNetWareInfo
f 64 = r OptTagNisPlusDomain
f 65 = r OptTagNisPlusServers
f 66 = r OptTagTftpServer
f 67 = r OptTagBootfileName
f 69 = r OptTagSmtpServers
f 70 = r OptTagPopServers
f 71 = r OptTagNntpServers
f 72 = r OptTagWwwServers
f 73 = r OptTagFingerServers
f 74 = r OptTagIrcServers
f 75 = r OptTagStreetTalkServers
f 76 = r OptTagStreetTalkDirectoryAssistanceServers
f 81 = r OptTagFQDN
f 116 = r OptTagAutoconfiguration
f 255 = r OptTagEnd
f t = return (UnknownTag t)
putOptionTag :: OptionTagOrError -> Put
putOptionTag (UnknownTag t) = putAtom t
putOptionTag (KnownTag t) = putAtom (f t)
where
f :: Dhcp4OptionTag -> Word8
f OptTagPad = 0
f OptTagEnd = 255
f OptTagSubnetMask = 1
f OptTagTimeOffset = 2
f OptTagRouters = 3
f OptTagTimeServers = 4
f OptTagIEN116NameServers = 5
f OptTagNameServers = 6
f OptTagLogServers = 7
f OptTagCookieServers = 8
f OptTagLPRServers = 9
f OptTagImpressServers = 10
f OptTagResourceLocationServers = 11
f OptTagHostName = 12
f OptTagBootFileSize = 13
f OptTagMeritDumpFile = 14
f OptTagDomainName = 15
f OptTagSwapServer = 16
f OptTagRootPath = 17
f OptTagExtensionsPath = 18
f OptTagEnableIPForwarding = 19
f OptTagEnableNonLocalSourceRouting = 20
f OptTagPolicyFilters = 21
f OptTagMaximumDatagramReassemblySize = 22
f OptTagDefaultTTL = 23
f OptTagPathMTUAgingTimeout = 24
f OptTagPathMTUPlateauTable = 25
f OptTagInterfaceMTU = 26
f OptTagAllSubnetsAreLocal = 27
f OptTagBroadcastAddress = 28
f OptTagPerformMaskDiscovery = 29
f OptTagShouldSupplyMasks = 30
f OptTagShouldPerformRouterDiscovery = 31
f OptTagRouterSolicitationAddress = 32
f OptTagStaticRoutes = 33
f OptTagShouldNegotiateArpTrailers = 34
f OptTagArpCacheTimeout = 35
f OptTagUseRFC1042EthernetEncapsulation = 36
f OptTagTcpDefaultTTL = 37
f OptTagTcpKeepaliveInterval = 38
f OptTagTcpKeepaliveUseGarbage = 39
f OptTagNisDomainName = 40
f OptTagNisServers = 41
f OptTagNtpServers = 42
f OptTagVendorSpecific = 43
f OptTagNetBiosNameServers = 44
f OptTagNetBiosDistributionServers = 45
f OptTagNetBiosNodeType = 46
f OptTagNetBiosScope = 47
f OptTagXWindowsFontServer = 48
f OptTagXWindowsDisplayManagers = 49
f OptTagRequestIPAddress = 50
f OptTagIPAddressLeaseTime = 51
f OptTagOverload = 52
f OptTagMessageType = 53
f OptTagServerIdentifier = 54
f OptTagParameterRequestList = 55
f OptTagErrorMessage = 56
f OptTagMaxDHCPMessageSize = 57
f OptTagRenewalTime = 58
f OptTagRebindingTime = 59
f OptTagVendorClass = 60
f OptTagClientIdentifier = 61
f OptTagNetWareDomainName = 62
f OptTagNetWareInfo = 63
f OptTagNisPlusDomain = 64
f OptTagNisPlusServers = 65
f OptTagTftpServer = 66
f OptTagBootfileName = 67
f OptTagSmtpServers = 69
f OptTagPopServers = 70
f OptTagNntpServers = 71
f OptTagWwwServers = 72
f OptTagFingerServers = 73
f OptTagIrcServers = 74
f OptTagStreetTalkServers = 75
f OptTagStreetTalkDirectoryAssistanceServers = 76
f OptTagFQDN = 81
f OptTagAutoconfiguration = 116
-----------------------------------------------------------------------
-- NetBIOS node type and operations -----------------------------------
-----------------------------------------------------------------------
data NetBiosNodeType
= BNode
| PNode
| MNode
| HNode
deriving (Show,Eq)
instance Option NetBiosNodeType where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance CodecAtom NetBiosNodeType where
getAtom = do
b <- getAtom
case b :: Word8 of
0x1 -> return BNode
0x2 -> return PNode
0x4 -> return MNode
0x8 -> return HNode
_ -> fail "Unknown NetBIOS node type"
putAtom t = putAtom $ case t of
BNode -> 0x1 :: Word8
PNode -> 0x2
MNode -> 0x4
HNode -> 0x8
atomSize _ = 1
-----------------------------------------------------------------------
-- Overload option type and operations --------------------------------
-----------------------------------------------------------------------
data OverloadOption
= UsedFileField
| UsedSNameField
| UsedBothFields
deriving (Show, Eq)
instance Option OverloadOption where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance CodecAtom OverloadOption where
getAtom = do b <- getAtom
case b :: Word8 of
1 -> return UsedFileField
2 -> return UsedSNameField
3 -> return UsedBothFields
_ -> fail ("Bad overload value 0x" ++ showHex b "")
putAtom t = putAtom $ case t of
UsedFileField -> 1 :: Word8
UsedSNameField -> 2
UsedBothFields -> 3
atomSize _ = atomSize (undefined :: Word8)
-----------------------------------------------------------------------
-- Options list operations --------------------------------------------
-----------------------------------------------------------------------
getDhcp4Options :: ByteString -> ByteString
-> Get (String, String, [Dhcp4Option])
getDhcp4Options sname file = do
MagicCookie <- getAtom
options0 <- remainingAsOptions
case lookupOverload options0 of
Nothing -> return (nullTerminated sname, nullTerminated file, options0)
Just UsedFileField -> do
options1 <- localParse file remainingAsOptions
let options = options0 ++ options1
NVTAsciiString fileString
= fromMaybe (NVTAsciiString "") (lookupFile options)
return (nullTerminated sname, fileString, options)
Just UsedSNameField -> do
options1 <- localParse sname remainingAsOptions
let options = options0 ++ options1
NVTAsciiString snameString
= fromMaybe (NVTAsciiString "") (lookupSname options)
return (snameString, nullTerminated file, options)
Just UsedBothFields -> do
-- The file field MUST be interpreted for options before the sname field.
-- RFC 2131, Section 4.1, Page 24
options1 <- localParse file remainingAsOptions
options2 <- localParse sname remainingAsOptions
let options = options0 ++ options1 ++ options2
NVTAsciiString snameString
= fromMaybe (NVTAsciiString "") (lookupSname options)
NVTAsciiString fileString
= fromMaybe (NVTAsciiString "") (lookupFile options)
return (snameString, fileString, options)
where
remainingAsOptions = scrubControls =<< repeatedly getDhcp4Option
localParse bs m = case runGet m bs of
Right x -> return x
Left err -> fail err
putDhcp4Options :: [Dhcp4Option] -> Put
putDhcp4Options opts =
do putAtom MagicCookie
traverse_ putDhcp4Option opts
putControlOption ControlEnd
scrubControls :: (A.Applicative m, Monad m)
=> [Either ControlTag Dhcp4Option] -> m [Dhcp4Option]
scrubControls [] =
fail "No END option found"
scrubControls (Left ControlPad : xs) =
scrubControls xs
scrubControls (Left ControlEnd : xs) =
do traverse_ eatPad xs
return []
scrubControls (Right o : xs) =
do os <- scrubControls xs
return (o:os)
-- | 'eatPad' fails on any non 'ControlPad' option with an error message.
eatPad :: Monad m => Either ControlTag Dhcp4Option -> m ()
eatPad (Left ControlPad) = return ()
eatPad _ = fail "Unexpected option after END option"
replicateA :: A.Applicative f => Int -> f a -> f [a]
replicateA n f = T.sequenceA (replicate n f)
repeatedly :: Get a -> Get [a]
repeatedly m = go []
where
go acc =
do done <- isEmpty
if done then return (reverse acc)
else do a <- m
go (a:acc)
nullTerminated :: ByteString -> String
nullTerminated = takeWhile (/= '\NUL') . BS8.unpack
lookupOverload :: [Dhcp4Option] -> Maybe OverloadOption
lookupOverload = foldr f Nothing
where f (OptOverload o) _ = Just o
f _ a = a
lookupFile :: [Dhcp4Option] -> Maybe NVTAsciiString
lookupFile = foldr f Nothing
where f (OptBootfileName fn) _ = Just fn
f _ a = a
lookupSname :: [Dhcp4Option] -> Maybe NVTAsciiString
lookupSname = foldr f Nothing
where f (OptTftpServer n) _ = Just n
f _ a = a
lookupParams :: [Dhcp4Option] -> Maybe [OptionTagOrError]
lookupParams = foldr f Nothing
where f (OptParameterRequestList n) _ = Just n
f _ a = a
lookupMessageType :: [Dhcp4Option] -> Maybe Dhcp4MessageType
lookupMessageType = foldr f Nothing
where f (OptMessageType n) _ = Just n
f _ a = a
lookupRequestAddr :: [Dhcp4Option] -> Maybe IP4
lookupRequestAddr = foldr f Nothing
where f (OptRequestIPAddress n) _ = Just n
f _ a = a
lookupLeaseTime :: [Dhcp4Option] -> Maybe Word32
lookupLeaseTime = foldr f Nothing
where f (OptIPAddressLeaseTime t) _ = Just t
f _ a = a
-----------------------------------------------------------------------
-- Protected parser and unparser monad --------------------------------
-----------------------------------------------------------------------
class Option a where
getOption :: Get a
putOption :: a -> Put
instance CodecAtom a => Option [a] where
getOption = do
let (n, m) = getRecord
len <- getLen
let (count, remainder) = divMod len n
unless (remainder == 0) (fail ("Length was not a multiple of " ++ show n))
unless (count > 0) (fail "Minimum length not met")
replicateA count $ label "List of fixed-length values" $ isolate n m
putOption xs = do putLen (atomSize (head xs) * length xs)
traverse_ putAtom xs
instance (CodecAtom a, CodecAtom b) => Option (a,b) where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance Option Bool where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance Option Word8 where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance Option Word16 where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance Option Word32 where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance Option IP4 where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance Option SubnetMask where
getOption = defaultFixedGetOption
putOption = defaultFixedPutOption
instance Option ByteString where
getOption = do len <- getLen
getByteString len
putOption bs = do putLen (BS.length bs)
putByteString bs
defaultFixedGetOption :: CodecAtom a => Get a
defaultFixedGetOption = fixedLen n m
where (n,m) = getRecord
defaultFixedPutOption :: CodecAtom a => a -> Put
defaultFixedPutOption x = do
putLen (atomSize x)
putAtom x
fixedLen :: Int -> Get a -> Get a
fixedLen expectedLen m = do
len <- getLen
unless (len == expectedLen) (fail "Bad length on \"fixed-length\" option.")
label "Fixed length field" (isolate expectedLen m)
getRecord :: CodecAtom a => (Int, Get a)
getRecord = (atomSize undef, m)
where
(undef, m) = (undefined, getAtom) :: CodecAtom a => (a, Get a)
instance CodecAtom OptionTagOrError where
getAtom = getOptionTag
putAtom x = putOptionTag x
atomSize _ = 1
newtype NVTAsciiString = NVTAsciiString String
deriving (Eq, Show)
instance Option NVTAsciiString where
getOption = do len <- getLen
bs <- getByteString len
return (NVTAsciiString (nullTerminated bs))
putOption (NVTAsciiString str) = do
putLen (length str)
putByteString (BS8.pack str)
getLen :: Get Int
getLen = fromIntegral `fmap` getWord8
putLen :: Int -> Put
putLen n = putWord8 (fromIntegral n)
| GaloisInc/HaNS | src/Hans/IP4/Dhcp/Options.hs | bsd-3-clause | 35,326 | 187 | 21 | 10,135 | 7,323 | 3,623 | 3,700 | 772 | 80 |
{-# LANGUAGE MagicHash #-}
module Test005 where
import RIO
import Prelude (print)
import Kask.Time
import GHC.Exts
fib1 :: Integer -> Integer
fib1 n
| n == 0 || n == 1 = n
| otherwise = fib1 (n - 1) + fib1 (n - 2)
fib2 :: Int -> Int
fib2 n
| n == 0 || n == 1 = n
| otherwise = fib2 (n - 1) + fib2 (n - 2)
fib3 :: Int# -> Int#
fib3 n# = case n# of
0# -> 0#
1# -> 1#
_ -> fib3 (n# -# 1#) +# fib3 (n# -# 2#)
test1 :: IO ()
test1 = do
v1 <- logging "fib1 took " (withMsecs (fib1 40)) " msecs"
print v1
v2 <- logging "fib2 took " (withMsecs (fib2 40)) " msecs"
print v2
let s = fib3 40#
v3 <- logging "fib3 took " (withMsecs (I# s)) " msecs"
print v3
| kongra/kask-base | app/Test005.hs | bsd-3-clause | 682 | 0 | 12 | 189 | 348 | 170 | 178 | 28 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
-- |Utilities for filling 'BufferObject's.
module Graphics.GLUtil.BufferObjects where
import Data.Word (Word32)
import Graphics.Rendering.OpenGL
import Foreign.ForeignPtr
import Foreign.Ptr
import Foreign.Storable
import Data.Array.Storable
import qualified Data.Vector.Storable as V
import Data.ByteString (ByteString, useAsCStringLen)
-- |Allocate and fill a 'BufferObject' from a list of 'Storable's.
makeBuffer :: Storable a => BufferTarget -> [a] -> IO BufferObject
makeBuffer target elems = makeBufferLen target (length elems) elems
-- |Allocate and fill a 'BufferObject' from a list of 'Storable's
-- whose length is explicitly given. This is useful when the list is
-- of known length, as it avoids a traversal to find the length.
makeBufferLen :: forall a. Storable a =>
BufferTarget -> Int -> [a] -> IO BufferObject
makeBufferLen target len elems =
do [buffer] <- genObjectNames 1
bindBuffer target $= Just buffer
let n = fromIntegral $ len * sizeOf (undefined::a)
arr <- newListArray (0, len - 1) elems
withStorableArray arr $ \ptr ->
bufferData target $= (n, ptr, StaticDraw)
return buffer
-- |@replaceBuffer target elements@ replaces the buffer data attached
-- to the buffer object currently bound to @target@ with the supplied
-- list. Any previous data is deleted.
replaceBuffer :: forall a. Storable a => BufferTarget -> [a] -> IO ()
replaceBuffer target elems = do arr <- newListArray (0, len - 1) elems
withStorableArray arr $ \ptr ->
bufferData target $= (n, ptr, StaticDraw)
where len = length elems
n = fromIntegral $ len * sizeOf (undefined::a)
-- |Allocate and fill a 'BufferObject' with the given number of bytes
-- from the supplied pointer.
fromPtr :: BufferTarget -> Int -> Ptr a -> IO BufferObject
fromPtr target numBytes ptr =
do [buffer] <- genObjectNames 1
bindBuffer target $= Just buffer
bufferData target $= (fromIntegral numBytes, ptr, StaticDraw)
return buffer
-- |Fill a buffer with a 'ByteString'.
fromByteString :: BufferTarget -> ByteString -> IO BufferObject
fromByteString target b = useAsCStringLen b (uncurry . flip $ fromPtr target)
-- |Fill a buffer with data from a 'ForeignPtr'. The application
-- @fromForeignPtr target len fptr@ fills a @target@ 'BufferTarget'
-- with @len@ elements starting from @fptr@.
fromForeignPtr :: forall a. Storable a =>
BufferTarget -> Int -> ForeignPtr a -> IO BufferObject
fromForeignPtr target len fptr = withForeignPtr fptr $ fromPtr target numBytes
where numBytes = sizeOf (undefined::a) * len
-- |Fill a buffer with data from a 'V.Vector'.
fromVector :: forall a. Storable a =>
BufferTarget -> V.Vector a -> IO BufferObject
fromVector target v = V.unsafeWith v $ fromPtr target numBytes
where numBytes = fromIntegral $ V.length v * sizeOf (undefined::a)
-- |@replaceVector target v@ replaces the buffer data attached to the
-- buffer object currently bound to @target@ with the supplied
-- 'V.Vector'. Any previous data is deleted.
replaceVector :: forall a. Storable a => BufferTarget -> V.Vector a -> IO ()
replaceVector target v = V.unsafeWith v $ \ptr ->
bufferData target $= (numBytes, ptr, StaticDraw)
where numBytes = fromIntegral $ V.length v * sizeOf (undefined::a)
-- |Produce a 'Ptr' value to be used as an offset of the given number
-- of bytes.
offsetPtr :: Int -> Ptr a
offsetPtr = wordPtrToPtr . fromIntegral
-- |A zero-offset 'Ptr'.
offset0 :: Ptr a
offset0 = offsetPtr 0
-- | A class for things we know how to serialize into an OpenGL
-- buffer.
class BufferSource v where
fromSource :: BufferTarget -> v -> IO BufferObject
instance Storable a => BufferSource [a] where
fromSource = makeBuffer
instance Storable a => BufferSource (V.Vector a) where
fromSource = fromVector
-- | Create an 'ElementArrayBuffer' from a source of 'Word32's.
bufferIndices :: BufferSource (v Word32) => v Word32 -> IO BufferObject
bufferIndices = fromSource ElementArrayBuffer
| coghex/abridgefaraway | src/GLUtil/BufferObjects.hs | bsd-3-clause | 4,143 | 0 | 12 | 854 | 978 | 506 | 472 | 60 | 1 |
{-# LANGUAGE CPP, TemplateHaskell #-}
module Main where
#if METHOD == 1
import Data.Set.BUSplay
#else
import Data.Set.Splay
#endif
import Prelude hiding (minimum, maximum)
import Test.Framework.TH.Prime
import Test.Framework.Providers.DocTest.Prime
import Test.Framework.Providers.HUnit
import Test.HUnit
main :: IO ()
main = $(defaultMainGenerator)
----------------------------------------------------------------
doc_test :: DocTest
#if METHOD == 1
doc_test = docTest ["../Data/Set/BUSplay.hs"] ["-i.."]
#else
doc_test = docTest ["../Data/Set/Splay.hs"] ["-i.."]
#endif
----------------------------------------------------------------
(@?==) :: Eq a => Splay a -> Splay a -> Assertion
t @?== a = assertBool "" $ t === a
----------------------------------------------------------------
t_splay :: Splay Int
t_splay = i
where
a = Node Leaf 5 Leaf
b = Node Leaf 4 a
c = Node Leaf 3 b
d = Node Leaf 2 c
e = Node d 6 Leaf
f = Node Leaf 1 e
g = Node f 7 Leaf
h = Node g 8 Leaf
i = Node h 9 Leaf
a_splay :: Splay Int
a_splay = a
where
c = Node Leaf 3 Leaf
b = Node c 4 Leaf
d = Node Leaf 2 b
f = Node Leaf 1 d
e = Node Leaf 6 Leaf
g = Node e 7 Leaf
i = Node Leaf 9 Leaf
h = Node g 8 i
a = Node f 5 h
case_splay :: Assertion
case_splay = snd (member 5 t_splay) @?== a_splay
----------------------------------------------------------------
t_zigzig :: Splay Int
t_zigzig = g
where
a = Node Leaf 1 Leaf
b = Node a 2 Leaf
c = Node b 3 Leaf
d = Node c 4 Leaf
e = Node d 5 Leaf
f = Node e 6 Leaf
g = Node f 7 Leaf
a_zigzig :: Splay Int
a_zigzig = a
where
c = Node Leaf 3 Leaf
b = Node Leaf 2 c
e = Node Leaf 5 Leaf
d = Node b 4 e
g = Node Leaf 7 Leaf
f = Node d 6 g
a = Node Leaf 1 f
case_zigzig :: Assertion
case_zigzig = snd (member 1 t_zigzig) @?== a_zigzig
----------------------------------------------------------------
t_zigzig_zig :: Splay Int
t_zigzig_zig = l
where
Node l _ _ = t_zigzig
a_zigzig_zig :: Splay Int
a_zigzig_zig = a
where
#if METHOD == 1
c = Node Leaf 3 Leaf
b = Node Leaf 2 c
e = Node Leaf 5 Leaf
d = Node b 4 e
f = Node d 6 Leaf
a = Node Leaf 1 f
#else
b = Node Leaf 2 Leaf
d = Node Leaf 4 Leaf
c = Node b 3 d
f = Node Leaf 6 Leaf
e = Node c 5 f
a = Node Leaf 1 e
#endif
case_zigzig_zig :: Assertion
case_zigzig_zig = snd (member 1 t_zigzig_zig) @?== a_zigzig_zig
----------------------------------------------------------------
t_zigzag :: Splay Int
t_zigzag = g
where
a = Node Leaf 4 Leaf
b = Node Leaf 3 a
c = Node b 5 Leaf
d = Node Leaf 2 c
e = Node d 6 Leaf
f = Node Leaf 1 e
g = Node f 7 Leaf
a_zigzag :: Splay Int
a_zigzag = a
where
b = Node Leaf 3 Leaf
d = Node Leaf 2 b
f = Node Leaf 1 d
c = Node Leaf 5 Leaf
e = Node c 6 Leaf
g = Node e 7 Leaf
a = Node f 4 g
case_zigzag :: Assertion
case_zigzag = snd (member 4 t_zigzag) @?== a_zigzag
----------------------------------------------------------------
t_zigzag_zig :: Splay Int
t_zigzag_zig = l
where
Node l _ _ = t_zigzag
-- Surprise: TD and BU is the same!
a_zigzag_zig :: Splay Int
a_zigzag_zig = a
where
b = Node Leaf 3 Leaf
d = Node Leaf 2 b
f = Node Leaf 1 d
c = Node Leaf 5 Leaf
e = Node c 6 Leaf
a = Node f 4 e
case_zigzag_zig :: Assertion
case_zigzag_zig = snd (member 4 t_zigzag_zig) @?== a_zigzag_zig
| kazu-yamamoto/llrbtree | test/SplaySet.hs | bsd-3-clause | 3,582 | 0 | 8 | 1,027 | 1,154 | 603 | 551 | 104 | 1 |
{-#LANGUAGE NoImplicitPrelude #-}
{-#LANGUAGE OverloadedStrings #-}
{-#LANGUAGE TypeFamilies #-}
{-#LANGUAGE MultiParamTypeClasses #-}
{-#LANGUAGE FlexibleInstances #-}
{-#LANGUAGE FlexibleContexts #-}
{-#LANGUAGE LambdaCase #-}
{-#LANGUAGE DeriveGeneric #-}
{-#LANGUAGE DeriveFunctor #-}
{-#LANGUAGE TypeApplications #-}
-- | Types for and operations on backend data.
module Web.Sprinkles.Backends.Data
( BackendData (..)
, BackendMeta (..)
, BackendSource (..)
, Verification (..)
, RawBytes (..)
, toBackendData
, Items (..)
, reduceItems
, addBackendDataChildren
, rawToLBS
, rawFromLBS
, serializeBackendSource
, deserializeBackendSource
)
where
import Web.Sprinkles.Prelude
import Text.Ginger (ToGVal (..), GVal, Run (..), dict, (~>))
import qualified Text.Ginger as Ginger
import Data.Aeson as JSON
import Data.Aeson.TH as JSON
import Data.Yaml as YAML
import qualified Data.Serialize as Cereal
import Data.Serialize (Serialize)
import Foreign.C.Types (CTime (..))
import Network.Mime (MimeType)
import Data.Default (Default (..))
import Data.Time.Clock.POSIX (POSIXTime, posixSecondsToUTCTime)
import Data.Time (UTCTime, LocalTime, utc, utcToLocalTime)
import Data.Scientific (Scientific)
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Lazy.Char8 as LBS8
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Text.Printf (printf)
import Data.Foldable (Foldable (foldMap))
import Control.Monad.Except (throwError)
import Web.Sprinkles.Backends.Spec
-- | Extract raw integer value from a 'CTime'
unCTime :: CTime -> Int
unCTime (CTime i) = fromIntegral i
-- | The shapes of data that can be returned from a backend query.
data Items a = NotFound -- ^ Nothing was found
| SingleItem a -- ^ A single item was requested, and this is it
| MultiItem [a] -- ^ Multiple items were requested, here they are
deriving (Functor)
instance Foldable Items where
foldMap f NotFound = mempty
foldMap f (SingleItem x) = f x
foldMap f (MultiItem xs) = mconcat $ map f xs
-- | Transform a raw list of results into an 'Items' value. This allows us
-- later to distinguish between Nothing Found vs. Empty List, and between
-- Single Item Requested And Found vs. Many Items Requested, One Found. This
-- is needed such that when a single item is requested, it gets converted to
-- 'GVal' and JSON as a scalar, while when we request many items and receive
-- one, it becomes a singleton list.
reduceItems :: FetchMode -> [a] -> Items a
reduceItems FetchOne [] = NotFound
reduceItems FetchOne (x:_) = SingleItem x
reduceItems FetchAll xs = MultiItem xs
reduceItems (FetchN n) xs = MultiItem $ take n xs
instance ToGVal m a => ToGVal m (Items a) where
toGVal NotFound = def
toGVal (SingleItem x) = toGVal x
toGVal (MultiItem xs) = toGVal xs
instance ToJSON a => ToJSON (Items a) where
toJSON NotFound = Null
toJSON (SingleItem x) = toJSON x
toJSON (MultiItem xs) = toJSON xs
data RawBytes =
RawBytes
{ rbLength :: IO Integer
, rbGetRange :: Integer -> Integer -> IO LByteString
}
rawFromLBS :: LByteString -> RawBytes
rawFromLBS b =
RawBytes
{ rbLength = return . fromIntegral $ length b
, rbGetRange = \start len ->
return
. take (fromIntegral len)
. drop (fromIntegral start)
$ b
}
rawToLBS :: RawBytes -> IO LByteString
rawToLBS r = do
len <- rbLength r
rbGetRange r 0 len
rawToGVal :: MonadIO m => RawBytes -> GVal (Run p m h)
rawToGVal raw =
dict
[ ("length", Ginger.fromFunction (gfnLength raw))
, ("read", Ginger.fromFunction (gfnRead raw))
, ("store", Ginger.fromFunction (gfnStore raw))
]
where
gfnLength :: MonadIO m => RawBytes -> [(Maybe Text, GVal (Run p m h))] -> Run p m h (GVal (Run p m h))
gfnLength raw args = do
length <- liftIO (rbLength raw)
return . toGVal $ length
gfnRead :: MonadIO m => RawBytes -> [(Maybe Text, GVal (Run p m h))] -> Run p m h (GVal (Run p m h))
gfnRead raw args = do
inputLength <- liftIO (rbLength raw)
let extracted =
Ginger.extractArgsDefL
[ ("start", def)
, ("length", toGVal inputLength)
]
args
case extracted of
Right [startG, lengthG] -> do
let start = fromMaybe 0 $ asInteger startG
length = fromMaybe inputLength $ asInteger lengthG
bytes <- liftIO (rbGetRange raw start length)
return $ (toGVal bytes) { Ginger.asBytes = Just (LBS.toStrict bytes) }
_ -> throwError $ Ginger.ArgumentsError (Just "RawBytes.read") ""
gfnStore :: MonadIO m => RawBytes -> [(Maybe Text, GVal (Run p m h))] -> Run p m h (GVal (Run p m h))
gfnStore raw args = do
let extracted =
Ginger.extractArgsDefL
[ ("filename", "stored")
]
args
case extracted of
Right [filenameG] -> liftIO $ do
let filename = unpack . Ginger.asText $ filenameG
len <- rbLength raw
bytes <- rbGetRange raw 0 len
LBS8.writeFile filename bytes
return . toGVal $ True
_ -> throwError $ Ginger.ArgumentsError (Just "RawBytes.store") ""
asInteger :: GVal m -> Maybe Integer
asInteger = fmap round . Ginger.asNumber
instance MonadIO m => ToGVal (Run p m h) RawBytes where
toGVal = rawToGVal
-- | A parsed record from a query result.
data BackendData p m h =
BackendData
{ bdJSON :: JSON.Value -- ^ Result body as JSON
, bdGVal :: GVal (Run p m h) -- ^ Result body as GVal
, bdRaw :: RawBytes -- ^ Raw result body source
, bdMeta :: BackendMeta -- ^ Meta-information
, bdChildren :: HashMap Text (BackendData p m h) -- ^ Child documents
, bdVerification :: Verification
}
data Verification
= Trusted
| VerifyCSRF
deriving (Show, Eq, Enum, Ord, Bounded)
-- | A raw (unparsed) record from a query result.
data BackendSource =
BackendSource
{ bsMeta :: BackendMeta
, bsSource :: RawBytes
, bsVerification :: Verification
}
deriving (Generic)
data SerializableBackendSource =
SerializableBackendSource
{ sbsMeta :: BackendMeta
, sbsSource :: LByteString
}
deriving (Generic)
instance Serialize SerializableBackendSource where
serializeBackendSource :: BackendSource -> IO SerializableBackendSource
serializeBackendSource bs = do
srcBytes <- rawToLBS . bsSource $ bs
return $ SerializableBackendSource (bsMeta bs) srcBytes
deserializeBackendSource :: SerializableBackendSource -> BackendSource
deserializeBackendSource sbs =
BackendSource (sbsMeta sbs) (rawFromLBS $ sbsSource sbs) Trusted
-- | Wrap a parsed backend value in a 'BackendData' structure. The original
-- raw 'BackendSource' value is needed alongside the parsed value, because the
-- resulting structure contains both the 'BackendMeta' and the raw (unparsed)
-- data from it.
toBackendData :: (ToJSON a, ToGVal (Run p m h) a) => BackendSource -> a -> BackendData p m h
toBackendData src val =
BackendData
{ bdJSON = toJSON val
, bdGVal = toGVal val
, bdRaw = bsSource src
, bdMeta = bsMeta src
, bdChildren = mapFromList []
, bdVerification = bsVerification src
}
addBackendDataChildren :: HashMap Text (BackendData p m h)
-> BackendData p m h
-> BackendData p m h
addBackendDataChildren children bd =
bd { bdChildren = children <> bdChildren bd }
instance ToJSON (BackendData p m h) where
toJSON = bdJSON
instance MonadIO m => ToGVal (Run p m h) (BackendData p m h) where
toGVal bd =
let baseVal = bdGVal bd
baseLookup = fromMaybe (const def) $ Ginger.asLookup baseVal
baseDictItems = Ginger.asDictItems baseVal
children = bdChildren bd
childrenG = toGVal children
in baseVal
{ Ginger.asLookup = Just $ \case
"props" -> return . toGVal . bdMeta $ bd
"children" -> return childrenG
"bytes" -> return . toGVal . bdRaw $ bd
k -> baseLookup k
, Ginger.asDictItems =
(("props" ~> bdMeta bd):) .
(("bytes" ~> bdRaw bd):) .
(("children", childrenG):) <$> baseDictItems
}
-- | Metadata for a backend query result.
data BackendMeta =
BackendMeta
{ bmMimeType :: MimeType
, bmMTime :: Maybe POSIXTime -- ^ Last modification time, if available
, bmName :: Text -- ^ Human-friendly name
, bmPath :: Text -- ^ Path, according to the semantics of the backend (file path or URI)
, bmSize :: Maybe Integer -- ^ Size of the raw source, in bytes, if available
}
deriving (Show, Generic)
instance Serialize BackendMeta where
put bm = do
Cereal.put $ bmMimeType bm
Cereal.put . fmap fromEnum $ bmMTime bm
Cereal.put . encodeUtf8 $ bmName bm
Cereal.put . encodeUtf8 $ bmPath bm
Cereal.put $ bmSize bm
get =
BackendMeta <$> Cereal.get
<*> (fmap toEnum <$> Cereal.get)
<*> (decodeUtf8 <$> Cereal.get)
<*> (decodeUtf8 <$> Cereal.get)
<*> Cereal.get
mtimeFlavors :: BackendMeta -> (Maybe POSIXTime, Maybe Scientific, Maybe LocalTime)
mtimeFlavors bm =
let mtime = bmMTime bm
in ( mtime
, realToFrac <$> mtime :: Maybe Scientific
, utcToLocalTime utc . posixSecondsToUTCTime <$> mtime
)
instance ToJSON BackendMeta where
toJSON bm =
let (mtime, mtimeSci, mtimeUTC) = mtimeFlavors bm
in JSON.object
[ "mimeType" .= decodeUtf8 @Text (bmMimeType bm)
, "mtime" .= mtimeSci
, "mtimeUTC" .= mtimeUTC
, "name" .= bmName bm
, "path" .= bmPath bm
, "size" .= bmSize bm
]
instance ToGVal m BackendMeta where
toGVal bm =
let (mtime, mtimeSci, mtimeUTC) = mtimeFlavors bm
in Ginger.dict
[ "type" ~> decodeUtf8 @Text (bmMimeType bm)
, "mtime" ~> mtimeSci
, "mtimeUTC" ~> mtimeUTC
, "name" ~> bmName bm
, "path" ~> bmPath bm
, "size" ~> bmSize bm
]
| tdammers/templar | src/Web/Sprinkles/Backends/Data.hs | bsd-3-clause | 10,824 | 1 | 20 | 3,248 | 2,821 | 1,502 | 1,319 | 240 | 3 |
import Control.Monad
import Language.Haskell.Interpreter
main :: IO ()
main = do r <- runInterpreter testHint
case r of
Left err -> printInterpreterError err
Right () -> putStrLn "that's all folks"
-- observe that Interpreter () is an alias for InterpreterT IO ()
testHint :: Interpreter ()
testHint =
do
say "Load SomeModule.hs"
loadModules ["SomeModule.hs"]
--
say "Put the Prelude, Data.Map and *SomeModule in scope"
say "Data.Map is qualified as M!"
setTopLevelModules ["SomeModule"]
setImportsQ [("Prelude", Nothing), ("Data.Map", Just "M")]
--
say "Now we can query the type of an expression"
let expr1 = "M.singleton (f, g, h, 42)"
say $ "e.g. typeOf " ++ expr1
say =<< typeOf expr1
--
say $ "Observe that f, g and h are defined in SomeModule.hs, " ++
"but f is not exported. Let's check it..."
exports <- getModuleExports "SomeModule"
say (show exports)
--
say "We can also evaluate an expression; the result will be a string"
let expr2 = "length $ concat [[f,g],[h]]"
say $ concat ["e.g. eval ", show expr1]
a <- eval expr2
say (show a)
--
say "Or we can interpret it as a proper, say, int value!"
a_int <- interpret expr2 (as :: Int)
say (show a_int)
--
say "This works for any monomorphic type, even for function types"
let expr3 = "\\(Just x) -> succ x"
say $ "e.g. we interpret " ++ expr3 ++
" with type Maybe Int -> Int and apply it on Just 7"
fun <- interpret expr3 (as :: Maybe Int -> Int)
say . show $ fun (Just 7)
--
say "And sometimes we can even use the type system to infer the expected type (eg Maybe Bool -> Bool)!"
bool_val <- (interpret expr3 infer `ap` (return $ Just False))
say (show $ not bool_val)
--
say "Here we evaluate an expression of type string, that when evaluated (again) leads to a string"
res <- interpret "head $ map show [\"Worked!\", \"Didn't work\"]" infer >>= flip interpret infer
say res
say :: String -> Interpreter ()
say = liftIO . putStrLn
printInterpreterError :: InterpreterError -> IO ()
printInterpreterError e = putStrLn $ "Ups... " ++ (show e)
| hakaru-dev/hint-exts | examples/example.hs | bsd-3-clause | 2,309 | 0 | 12 | 662 | 538 | 250 | 288 | 48 | 2 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[Foreign]{Foreign calls}
-}
{-# LANGUAGE DeriveDataTypeable #-}
module ForeignCall (
ForeignCall(..), isSafeForeignCall,
Safety(..), playSafe, playInterruptible,
CExportSpec(..), CLabelString, isCLabelString, pprCLabelString,
CCallSpec(..),
CCallTarget(..), isDynamicTarget,
CCallConv(..), defaultCCallConv, ccallConvToInt, ccallConvAttribute,
Header(..), CType(..),
) where
import FastString
import Binary
import Outputable
import Module
import BasicTypes ( SourceText )
import Data.Char
import Data.Data
{-
************************************************************************
* *
\subsubsection{Data types}
* *
************************************************************************
-}
newtype ForeignCall = CCall CCallSpec
deriving Eq
{-! derive: Binary !-}
isSafeForeignCall :: ForeignCall -> Bool
isSafeForeignCall (CCall (CCallSpec _ _ safe)) = playSafe safe
-- We may need more clues to distinguish foreign calls
-- but this simple printer will do for now
instance Outputable ForeignCall where
ppr (CCall cc) = ppr cc
data Safety
= PlaySafe -- Might invoke Haskell GC, or do a call back, or
-- switch threads, etc. So make sure things are
-- tidy before the call. Additionally, in the threaded
-- RTS we arrange for the external call to be executed
-- by a separate OS thread, i.e., _concurrently_ to the
-- execution of other Haskell threads.
| PlayInterruptible -- Like PlaySafe, but additionally
-- the worker thread running this foreign call may
-- be unceremoniously killed, so it must be scheduled
-- on an unbound thread.
| PlayRisky -- None of the above can happen; the call will return
-- without interacting with the runtime system at all
deriving ( Eq, Show, Data, Typeable )
-- Show used just for Show Lex.Token, I think
{-! derive: Binary !-}
instance Outputable Safety where
ppr PlaySafe = ptext (sLit "safe")
ppr PlayInterruptible = ptext (sLit "interruptible")
ppr PlayRisky = ptext (sLit "unsafe")
playSafe :: Safety -> Bool
playSafe PlaySafe = True
playSafe PlayInterruptible = True
playSafe PlayRisky = False
playInterruptible :: Safety -> Bool
playInterruptible PlayInterruptible = True
playInterruptible _ = False
{-
************************************************************************
* *
\subsubsection{Calling C}
* *
************************************************************************
-}
data CExportSpec
= CExportStatic -- foreign export ccall foo :: ty
CLabelString -- C Name of exported function
CCallConv
deriving (Data, Typeable)
{-! derive: Binary !-}
data CCallSpec
= CCallSpec CCallTarget -- What to call
CCallConv -- Calling convention to use.
Safety
deriving( Eq )
{-! derive: Binary !-}
-- The call target:
-- | How to call a particular function in C-land.
data CCallTarget
-- An "unboxed" ccall# to named function in a particular package.
= StaticTarget
CLabelString -- C-land name of label.
(Maybe PackageKey) -- What package the function is in.
-- If Nothing, then it's taken to be in the current package.
-- Note: This information is only used for PrimCalls on Windows.
-- See CLabel.labelDynamic and CoreToStg.coreToStgApp
-- for the difference in representation between PrimCalls
-- and ForeignCalls. If the CCallTarget is representing
-- a regular ForeignCall then it's safe to set this to Nothing.
-- The first argument of the import is the name of a function pointer (an Addr#).
-- Used when importing a label as "foreign import ccall "dynamic" ..."
Bool -- True => really a function
-- False => a value; only
-- allowed in CAPI imports
| DynamicTarget
deriving( Eq, Data, Typeable )
{-! derive: Binary !-}
isDynamicTarget :: CCallTarget -> Bool
isDynamicTarget DynamicTarget = True
isDynamicTarget _ = False
{-
Stuff to do with calling convention:
ccall: Caller allocates parameters, *and* deallocates them.
stdcall: Caller allocates parameters, callee deallocates.
Function name has @N after it, where N is number of arg bytes
e.g. _Foo@8. This convention is x86 (win32) specific.
See: http://www.programmersheaven.com/2/Calling-conventions
-}
-- any changes here should be replicated in the CallConv type in template haskell
data CCallConv = CCallConv | CApiConv | StdCallConv | PrimCallConv | JavaScriptCallConv
deriving (Eq, Data, Typeable)
{-! derive: Binary !-}
instance Outputable CCallConv where
ppr StdCallConv = ptext (sLit "stdcall")
ppr CCallConv = ptext (sLit "ccall")
ppr CApiConv = ptext (sLit "capi")
ppr PrimCallConv = ptext (sLit "prim")
ppr JavaScriptCallConv = ptext (sLit "javascript")
defaultCCallConv :: CCallConv
defaultCCallConv = CCallConv
ccallConvToInt :: CCallConv -> Int
ccallConvToInt StdCallConv = 0
ccallConvToInt CCallConv = 1
ccallConvToInt CApiConv = panic "ccallConvToInt CApiConv"
ccallConvToInt (PrimCallConv {}) = panic "ccallConvToInt PrimCallConv"
ccallConvToInt JavaScriptCallConv = panic "ccallConvToInt JavaScriptCallConv"
{-
Generate the gcc attribute corresponding to the given
calling convention (used by PprAbsC):
-}
ccallConvAttribute :: CCallConv -> SDoc
ccallConvAttribute StdCallConv = text "__attribute__((__stdcall__))"
ccallConvAttribute CCallConv = empty
ccallConvAttribute CApiConv = empty
ccallConvAttribute (PrimCallConv {}) = panic "ccallConvAttribute PrimCallConv"
ccallConvAttribute JavaScriptCallConv = panic "ccallConvAttribute JavaScriptCallConv"
type CLabelString = FastString -- A C label, completely unencoded
pprCLabelString :: CLabelString -> SDoc
pprCLabelString lbl = ftext lbl
isCLabelString :: CLabelString -> Bool -- Checks to see if this is a valid C label
isCLabelString lbl
= all ok (unpackFS lbl)
where
ok c = isAlphaNum c || c == '_' || c == '.'
-- The '.' appears in e.g. "foo.so" in the
-- module part of a ExtName. Maybe it should be separate
-- Printing into C files:
instance Outputable CExportSpec where
ppr (CExportStatic str _) = pprCLabelString str
instance Outputable CCallSpec where
ppr (CCallSpec fun cconv safety)
= hcat [ ifPprDebug callconv, ppr_fun fun ]
where
callconv = text "{-" <> ppr cconv <> text "-}"
gc_suf | playSafe safety = text "_GC"
| otherwise = empty
ppr_fun (StaticTarget fn mPkgId isFun)
= text (if isFun then "__pkg_ccall"
else "__pkg_ccall_value")
<> gc_suf
<+> (case mPkgId of
Nothing -> empty
Just pkgId -> ppr pkgId)
<+> pprCLabelString fn
ppr_fun DynamicTarget
= text "__dyn_ccall" <> gc_suf <+> text "\"\""
-- The filename for a C header file
newtype Header = Header FastString
deriving (Eq, Data, Typeable)
instance Outputable Header where
ppr (Header h) = quotes $ ppr h
-- | A C type, used in CAPI FFI calls
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen' @'{-\# CTYPE'@,
-- 'ApiAnnotation.AnnHeader','ApiAnnotation.AnnVal',
-- 'ApiAnnotation.AnnClose' @'\#-}'@,
-- For details on above see note [Api annotations] in ApiAnnotation
data CType = CType SourceText -- Note [Pragma source text] in BasicTypes
(Maybe Header) -- header to include for this type
FastString -- the type itself
deriving (Data, Typeable)
instance Outputable CType where
ppr (CType _ mh ct) = hDoc <+> ftext ct
where hDoc = case mh of
Nothing -> empty
Just h -> ppr h
{-
************************************************************************
* *
\subsubsection{Misc}
* *
************************************************************************
-}
{-* Generated by DrIFT-v1.0 : Look, but Don't Touch. *-}
instance Binary ForeignCall where
put_ bh (CCall aa) = put_ bh aa
get bh = do aa <- get bh; return (CCall aa)
instance Binary Safety where
put_ bh PlaySafe = do
putByte bh 0
put_ bh PlayInterruptible = do
putByte bh 1
put_ bh PlayRisky = do
putByte bh 2
get bh = do
h <- getByte bh
case h of
0 -> do return PlaySafe
1 -> do return PlayInterruptible
_ -> do return PlayRisky
instance Binary CExportSpec where
put_ bh (CExportStatic aa ab) = do
put_ bh aa
put_ bh ab
get bh = do
aa <- get bh
ab <- get bh
return (CExportStatic aa ab)
instance Binary CCallSpec where
put_ bh (CCallSpec aa ab ac) = do
put_ bh aa
put_ bh ab
put_ bh ac
get bh = do
aa <- get bh
ab <- get bh
ac <- get bh
return (CCallSpec aa ab ac)
instance Binary CCallTarget where
put_ bh (StaticTarget aa ab ac) = do
putByte bh 0
put_ bh aa
put_ bh ab
put_ bh ac
put_ bh DynamicTarget = do
putByte bh 1
get bh = do
h <- getByte bh
case h of
0 -> do aa <- get bh
ab <- get bh
ac <- get bh
return (StaticTarget aa ab ac)
_ -> do return DynamicTarget
instance Binary CCallConv where
put_ bh CCallConv = do
putByte bh 0
put_ bh StdCallConv = do
putByte bh 1
put_ bh PrimCallConv = do
putByte bh 2
put_ bh CApiConv = do
putByte bh 3
put_ bh JavaScriptCallConv = do
putByte bh 4
get bh = do
h <- getByte bh
case h of
0 -> do return CCallConv
1 -> do return StdCallConv
2 -> do return PrimCallConv
3 -> do return CApiConv
_ -> do return JavaScriptCallConv
instance Binary CType where
put_ bh (CType s mh fs) = do put_ bh s
put_ bh mh
put_ bh fs
get bh = do s <- get bh
mh <- get bh
fs <- get bh
return (CType s mh fs)
instance Binary Header where
put_ bh (Header h) = put_ bh h
get bh = do h <- get bh
return (Header h)
| christiaanb/ghc | compiler/prelude/ForeignCall.hs | bsd-3-clause | 11,568 | 0 | 15 | 3,944 | 2,107 | 1,058 | 1,049 | 199 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Text.Parsec.String
-- Copyright : (c) Paolo Martini 2007
-- License : BSD-style (see the file libraries/parsec/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Make Strings an instance of 'Stream' with 'Char' token type.
--
-----------------------------------------------------------------------------
module Text.Parsec.String
( Parser, GenParser, parseFromFile
) where
import Text.Parsec.Error
import Text.Parsec.Prim
type Parser = Parsec String ()
type GenParser tok st = Parsec [tok] st
-- | @parseFromFile p filePath@ runs a string parser @p@ on the
-- input read from @filePath@ using 'Prelude.readFile'. Returns either a 'ParseError'
-- ('Left') or a value of type @a@ ('Right').
--
-- > main = do{ result <- parseFromFile numbers "digits.txt"
-- > ; case result of
-- > Left err -> print err
-- > Right xs -> print (sum xs)
-- > }
parseFromFile :: Parser a -> String -> IO (Either ParseError a)
parseFromFile p fname
= do input <- readFile fname
return (runP p () fname input)
| scott-fleischman/parsec | Text/Parsec/String.hs | bsd-2-clause | 1,256 | 0 | 10 | 281 | 148 | 90 | 58 | 10 | 1 |
module Fields where
data Point = P {x,y::Int} | Q {z::Int}
origin = P { x=0, y=0 }
h = P {x=1,y=1}
v = origin {y=1}
u = P {}
sq x = x*x
dist1 p = sq (x p) + sq (y p)
dist2 P{x=x,y=y} = sq x+sq y
--data Z = Z {z::Int}
data Thread s = T { name::String, status::s}
data Running = Running
data Runanble = Runnable (IO())
makeRunning t = t{status=Running}
| forste/haReFork | tools/hs2alfa/tests/Fields.hs | bsd-3-clause | 361 | 0 | 9 | 86 | 231 | 133 | 98 | 13 | 1 |
{-# LANGUAGE ViewPatterns, GADTs, FlexibleInstances, UndecidableInstances,
CPP #-}
#if __GLASGOW_HASKELL__ <= 708
{-# LANGUAGE OverlappingInstances #-}
{-# OPTIONS_GHC -fno-warn-unrecognised-pragmas #-}
#endif
{-# OPTIONS_GHC -fno-warn-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Glambda.Pretty
-- Copyright : (C) 2015 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg ([email protected])
-- Stability : experimental
--
-- Pretty-printing expressions. This allows reduction of code duplication
-- between unchecked and checked expressions.
--
----------------------------------------------------------------------------
module Language.Glambda.Pretty (
PrettyExp(..), defaultPretty,
Coloring, defaultColoring,
prettyVar, prettyLam, prettyApp, prettyArith, prettyIf, prettyFix
) where
import Language.Glambda.Token
import Language.Glambda.Type
import Language.Glambda.Util
import Text.PrettyPrint.ANSI.Leijen
lamPrec, appPrec, appLeftPrec, appRightPrec, ifPrec :: Prec
lamPrec = 1
appPrec = 9
appLeftPrec = 8.9
appRightPrec = 9
ifPrec = 1
opPrec, opLeftPrec, opRightPrec :: ArithOp ty -> Prec
opPrec (precInfo -> (x, _, _)) = x
opLeftPrec (precInfo -> (_, x, _)) = x
opRightPrec (precInfo -> (_, _, x)) = x
-- | Returns (overall, left, right) precedences for an 'ArithOp'
precInfo :: ArithOp ty -> (Prec, Prec, Prec)
precInfo Plus = (5, 4.9, 5)
precInfo Minus = (5, 4.9, 5)
precInfo Times = (6, 5.9, 6)
precInfo Divide = (6, 5.9, 6)
precInfo Mod = (6, 5.9, 6)
precInfo Less = (4, 4, 4)
precInfo LessE = (4, 4, 4)
precInfo Greater = (4, 4, 4)
precInfo GreaterE = (4, 4, 4)
precInfo Equals = (4, 4, 4)
-- | A function that changes a 'Doc's color
type ApplyColor = Doc -> Doc
-- | Information about coloring in de Bruijn indexes and binders
data Coloring = Coloring [ApplyColor]
[ApplyColor] -- ^ a stream of remaining colors to use,
-- and the colors used for bound variables
-- | A 'Coloring' for an empty context
defaultColoring :: Coloring
defaultColoring = Coloring all_colors []
where
all_colors = red : green : yellow : blue :
magenta : cyan : all_colors
-- | A class for expressions that can be pretty-printed
class Pretty exp => PrettyExp exp where
prettyExp :: Coloring -> Prec -> exp -> Doc
-- | Convenient implementation of 'pretty'
defaultPretty :: PrettyExp exp => exp -> Doc
defaultPretty = nest 2 . prettyExp defaultColoring topPrec
-- | Print a variable
prettyVar :: Coloring -> Int -> Doc
prettyVar (Coloring _ bound) n = (nthDefault id n bound) (char '#' <> int n)
-- | Print a lambda expression
prettyLam :: PrettyExp exp => Coloring -> Prec -> Maybe Ty -> exp -> Doc
prettyLam (Coloring (next : supply) existing) prec m_ty body
= maybeParens (prec >= lamPrec) $
fillSep [ char 'λ' <> next (char '#') <>
maybe empty (\ty -> text ":" <> pretty ty) m_ty <> char '.'
, prettyExp (Coloring supply (next : existing)) topPrec body ]
prettyLam _ _ _ _ = error "Infinite supply of colors ran out"
-- | Print an application
prettyApp :: (PrettyExp exp1, PrettyExp exp2)
=> Coloring -> Prec -> exp1 -> exp2 -> Doc
prettyApp coloring prec e1 e2
= maybeParens (prec >= appPrec) $
fillSep [ prettyExp coloring appLeftPrec e1
, prettyExp coloring appRightPrec e2 ]
-- | Print an arithemtic expression
prettyArith :: (PrettyExp exp1, PrettyExp exp2)
=> Coloring -> Prec -> exp1 -> ArithOp ty -> exp2 -> Doc
prettyArith coloring prec e1 op e2
= maybeParens (prec >= opPrec op) $
fillSep [ prettyExp coloring (opLeftPrec op) e1 <+> pretty op
, prettyExp coloring (opRightPrec op) e2 ]
-- | Print a conditional
prettyIf :: (PrettyExp exp1, PrettyExp exp2, PrettyExp exp3)
=> Coloring -> Prec -> exp1 -> exp2 -> exp3 -> Doc
prettyIf coloring prec e1 e2 e3
= maybeParens (prec >= ifPrec) $
fillSep [ text "if" <+> prettyExp coloring topPrec e1
, text "then" <+> prettyExp coloring topPrec e2
, text "else" <+> prettyExp coloring topPrec e3 ]
-- | Print a @fix@
prettyFix :: PrettyExp exp => Coloring -> Prec -> exp -> Doc
prettyFix coloring prec e
= maybeParens (prec >= appPrec) $
text "fix" <+> prettyExp coloring topPrec e
| ajnsit/glambda | src/Language/Glambda/Pretty.hs | bsd-3-clause | 4,454 | 0 | 14 | 981 | 1,188 | 651 | 537 | 77 | 1 |
module T6082_RULE where
-- Should warn
foo1 x = x
{-# RULES "foo1" forall x. foo1 x = x #-}
-- Should warn
foo2 x = x
{-# INLINE foo2 #-}
{-# RULES "foo2" forall x. foo2 x = x #-}
-- Should not warn
foo3 x = x
{-# NOINLINE foo3 #-}
{-# RULES "foo3" forall x. foo3 x = x #-}
| urbanslug/ghc | testsuite/tests/simplCore/should_compile/T6082-RULE.hs | bsd-3-clause | 278 | 0 | 5 | 67 | 36 | 23 | 13 | 9 | 1 |
module LilRender.Image.STB (stbLoadImage, stbWritePNG, stbWriteBMP, stbWriteTGA) where
import LilRender.Color
import LilRender.Image.Immutable
import qualified Data.Vector.Storable as V
import Foreign.C
import Foreign.ForeignPtr
import Foreign.Marshal
import Foreign.Ptr
import Foreign.Storable
foreign import ccall "stb/stb_image.h stbi_load" stbi_load :: CString -> Ptr CInt -> Ptr CInt -> Ptr CInt -> CInt -> IO (Ptr CUChar)
foreign import ccall "stb/stb_image.h &stbi_image_free" stbi_image_free :: FunPtr (Ptr CUChar -> IO ())
foreign import ccall "stb/stb_image_write.h stbi_write_png" stbi_write_png :: CString -> CInt -> CInt -> CInt -> Ptr CUChar -> CInt -> IO CInt
foreign import ccall "stb/stb_image_write.h stbi_write_bmp" stbi_write_bmp :: CString -> CInt -> CInt -> CInt -> Ptr CUChar -> IO CInt
foreign import ccall "stb/stb_image_write.h stbi_write_tga" stbi_write_tga :: CString -> CInt -> CInt -> CInt -> Ptr CUChar -> IO CInt
stbLoadImage :: FilePath -> IO Image
stbLoadImage path = do
cPath <- newCString path
widthPtr <- new 0
heightPtr <- new 0
nComponentsPtr <- new 0
dataPtr <- stbi_load cPath widthPtr heightPtr nComponentsPtr 3 -- 3 bytes per pixel (assumption!)
dataForeignPtr <- newForeignPtr stbi_image_free dataPtr
width <- fromIntegral <$> peek widthPtr :: IO Int
height <- fromIntegral <$> peek heightPtr :: IO Int
let storage = V.unsafeFromForeignPtr0 dataForeignPtr (width * height * 3)
free cPath
free widthPtr
free heightPtr
free nComponentsPtr
return $ Image (V.unsafeCast storage :: V.Vector RGBColor) width height
stbWritePNG :: FilePath -> Image -> IO ()
stbWritePNG path (Image storage width height) = do
cPath <- newCString path
let w = fromIntegral width :: CInt
let h = fromIntegral height :: CInt
withForeignPtr (fst $ V.unsafeToForeignPtr0 storage) (\pixBuf ->
stbi_write_png cPath w h 3 (castPtr pixBuf) (w * 3) -- bytes per row
)
free cPath
stbWriteBMP :: FilePath -> Image -> IO ()
stbWriteBMP path (Image storage width height) = do
cPath <- newCString path
let w = fromIntegral width :: CInt
let h = fromIntegral height :: CInt
withForeignPtr (fst $ V.unsafeToForeignPtr0 storage) $ stbi_write_bmp cPath w h 3 . castPtr
free cPath
stbWriteTGA :: FilePath -> Image -> IO ()
stbWriteTGA path (Image storage width height) = do
cPath <- newCString path
let w = fromIntegral width :: CInt
let h = fromIntegral height :: CInt
withForeignPtr (fst $ V.unsafeToForeignPtr0 storage) $ stbi_write_tga cPath w h 3 . castPtr
free cPath
| SASinestro/lil-render | src/LilRender/Image/STB.hs | isc | 2,632 | 0 | 13 | 530 | 853 | 411 | 442 | 52 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Y2016.M11.D08.Solution where
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Maybe (fromJust)
-- below import available from cabal (aeson)
import Data.Aeson
{--
So, last week we looked at a graph of twitter-users. NOICE! If you look at the
source code that generated the graph, it's a python script that makes a
request to the twitter API to get a list of the followers of the graphed
account.
Well. I happen to have a twitter API account and twitter application. Why don't
I just access these data myself and then we can work on the raw twitter (JSON)
data themselves.
Indeed!
So, today's Haskell exercise. In this directory, or at the URL:
https://github.com/geophf/1HaskellADay/blob/master/exercises/HAD/Y2016/M11/D08/1haskfollowersids.json.gz
is a gzipped list of twitter ids that follow 1HaskellADay as JSON.
I was able to get this JSON data with the following REST GET query:
https://api.twitter.com/1.1/followers/ids?screen_name=1HaskellADay
via my (o)authenticated twitter application.
Read in the JSON (after gunzipping it) and answer the below questions
--}
type TwitterId = Integer
data Tweeps = TIds { tweeps :: [TwitterId] } deriving Show
instance FromJSON Tweeps where
parseJSON (Object o) = TIds <$> o .: "ids"
followers :: FilePath -> IO Tweeps
followers = fmap (fromJust . decode) . BL.readFile
{--
1. How many followers does 1HaskellADay have?
*Y2016.M11.D08.Solution> let had = it
*Y2016.M11.D08.Solution> length (tweeps had) ~> 2118
2. What is the max TwitterID? Is it an Int-value? (ooh, tricky)
*Y2016.M11.D08.Solution> maximum (tweeps had) ~> 794184637249626112
3. What is the min TwitterID?
*Y2016.M11.D08.Solution> minimum (tweeps had) ~> 150
Don't answer this:
4. Trick question: what is 1HaskellADay's twitter ID?
--}
{--
We'll be looking at how to get screen_name from twitter id and twitter id from
screen_name throughout this week, as well as looking at social networks of
followers who follow follwers who ...
How shall we do that? The Twitter API allows summary queries, as the JSON
examined here, as well as in-depth queries, given a screen_name (from which you
can obtain the twitter ID) or a twitter id (from which you can obtain the
screen_name) and followers, as you saw in today's query.
--}
| geophf/1HaskellADay | exercises/HAD/Y2016/M11/D08/Solution.hs | mit | 2,314 | 0 | 9 | 377 | 128 | 77 | 51 | 11 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad
import Data.Maybe
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import Options.Applicative
import Path
import System.Directory
import System.Process hiding (cwd)
import System.Exit
import Network.WebSockets
import AIChallenger.Channel
import AIChallenger.Exception
import AIChallenger.Types
main :: IO ()
main = do
cfg <- getConfigFromCommandlineFlags
runClient
(cfgServerHost cfg)
(cfgServerPort cfg)
("/play/" <> cfgName cfg)
(app (toFilePath (cfgLocalExecutable cfg)))
app :: FilePath -> Connection -> IO ()
app exe conn = waitForGame
where
waitForGame = do
putStrLn "Waiting for a game"
msg <- receiveDataMessage conn
case msg of
Text "GameStart" -> do
(Just hIn, Just hOut, _, procHandle) <-
createProcess
(proc exe [])
{ std_out = CreatePipe
, std_in = CreatePipe
, close_fds = True
, create_group = True
}
putStrLn "Game started"
play (inChannelFromHandle hOut) (outChannelFromHandle hIn)
putStrLn "Game finished"
catchAll (terminateProcess procHandle)
catchAll (void $ waitForProcess procHandle)
waitForGame
_ -> die ("Unexpected message from server: " <> show msg)
play fromBot toBot =
let go = do
msg <- receiveDataMessage conn
case msg of
Text "GameOver" -> return ()
Text "." -> do
Right () <- sendLine toBot "."
Right orders <- chReadLinesUntilDot fromBot
mapM_ (sendTextData conn) orders
sendDataMessage conn (Text ".")
go
Text x -> do
Right () <- sendLine toBot (TL.toStrict (TLE.decodeUtf8 x))
go
_ -> die ("Unexpected message from server: " <> show msg)
in go
data Config = Config
{ cfgServerHost :: String
, cfgServerPort :: Int
, cfgName :: String
, cfgLocalExecutable :: Path Abs File
} deriving Show
getConfigFromCommandlineFlags :: IO Config
getConfigFromCommandlineFlags = do
cwd <- parseAbsDir =<< getCurrentDirectory
let parseFileName fn = parseAbsFile fn <|> fmap (cwd </>) (parseRelFile fn)
config <- execParser (info (parseConfig parseFileName) (progDesc "ai-challenger-remote-play"))
let exe = toFilePath (cfgLocalExecutable config)
isExecutable <- executable <$> getPermissions exe
when (not isExecutable) $
die (exe <> " is not an executable file")
return config
parseConfig :: (String -> Maybe (Path Abs File)) -> Parser Config
parseConfig parseFileName = Config
<$> strOption (long "host")
<*> option auto (long "port")
<*> strOption (long "name")
<*> fmap
(fromMaybe (error "bad executable name") . parseFileName)
(strOption (long "executable")) | ethercrow/ai-challenger | remote-play/RemotePlay.hs | mit | 3,262 | 0 | 24 | 1,133 | 862 | 422 | 440 | 83 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import qualified Graphics.UI.FLTK.LowLevel.FL as FL
import Graphics.UI.FLTK.LowLevel.Fl_Types
import Graphics.UI.FLTK.LowLevel.FLTKHS
buttonCb :: Ref Button -> IO ()
buttonCb b' = do
l' <- getLabel b'
if (l' == "Hello world")
then setLabel b' "Goodbye world"
else setLabel b' "Hello world"
ui :: IO ()
ui = do
window <- windowNew
(Size (Width 115) (Height 100))
Nothing
Nothing
begin window
b' <- buttonNew
(Rectangle (Position (X 10) (Y 30)) (Size (Width 95) (Height 30)))
(Just "Hello world")
setLabelsize b' (FontSize 10)
setCallback b' buttonCb
end window
showWidget window
main :: IO ()
main = ui >> FL.run >> FL.flush
replMain :: IO ()
replMain = ui >> FL.replRun
| deech/fltkhs-hello-world | src/hello-world.hs | mit | 795 | 0 | 14 | 185 | 298 | 151 | 147 | 29 | 2 |
{-# LANGUAGE DeriveGeneric, GeneralizedNewtypeDeriving, OverloadedStrings #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
module Imvu.Network.IstatdClient.Types
( Name
, Packet (..)
, PacketType (..)
, nameFromBytes
, bytesFromName
) where
import Control.DeepSeq (NFData)
import Data.Hashable (Hashable)
import Data.Semigroup (Semigroup ((<>), sconcat))
import Data.String (IsString (fromString))
import Data.Word (Word8)
import GHC.Generics (Generic)
import qualified Data.ByteString as BS
import qualified Data.List.NonEmpty as NonEmpty
newtype Name = Name { bytesFromName :: BS.ByteString }
deriving (Show, Eq, Ord, Generic, NFData)
data PacketType = Increment | Record
deriving (Show, Generic)
data Packet = Packet
{ packetType :: !PacketType
, packetName :: !Name
, packetSuffixes :: ![Name]
, packetValue :: !Double
} deriving (Show, Generic)
instance NFData Packet
instance NFData PacketType
instance Hashable Name
instance IsString Name where
fromString str = case nameFromBytes $ fromString str of
Just name -> name
Nothing -> error $ "counter name " ++ show str ++ " is invalid!"
{-# INLINE fromString #-}
instance Semigroup Name where
Name a <> Name b = Name $ BS.concat [a, ".", b]
{-# INLINE (<>) #-}
sconcat = Name . BS.intercalate "." . map bytesFromName . NonEmpty.toList
{-# INLINE sconcat #-}
nameFromBytes :: BS.ByteString -> Maybe Name
nameFromBytes bytes = if valid then Just $ Name bytes else Nothing
where
valid = BS.all validByte bytes &&
not adjacentPeriods &&
not leadingPeriod &&
not trailingPeriod
validByte byte =
isAsciiPeriod byte ||
isAsciiAlphaNum byte ||
isAsciiSeparator byte
adjacentPeriods = ".." `BS.isInfixOf` bytes
(leadingPeriod, trailingPeriod) = if not $ BS.null bytes
then (asciiPeriod == BS.head bytes, asciiPeriod == BS.last bytes)
else (False, False)
{-# INLINE nameFromBytes #-}
asciiPeriod :: Word8
asciiPeriod = 46
isAsciiPeriod :: Word8 -> Bool
isAsciiPeriod = (== asciiPeriod)
{-# INLINE isAsciiPeriod #-}
isAsciiSeparator :: Word8 -> Bool
isAsciiSeparator byte =
byte == 95 || -- underscore
byte == 45 -- hyphen-minus
{-# INLINE isAsciiSeparator #-}
isAsciiAlphaNum :: Word8 -> Bool
isAsciiAlphaNum byte =
(byte >= 65 && byte <= 90) || -- A-Z
(byte >= 97 && byte <= 122) || -- a-z
(byte >= 48 && byte <= 57) -- 0-9
{-# INLINE isAsciiAlphaNum #-}
| imvu-open/hs-istatd-client | Imvu/Network/IstatdClient/Types.hs | mit | 2,527 | 0 | 11 | 570 | 690 | 384 | 306 | 78 | 3 |
module Parser where
import Diagrams
import Text.Parsec
import Text.Parsec.String
-- keywords = ["assign", "deref", "new", "while", "ifzero"]
-- keyword :: Parser String
-- keyword = choice $ map string keywords
ident :: Parser String
ident = (:) <$> letter <*> many alphaNum
number :: Parser Integer
number = read <$> many1 digit
numberExpr :: Parser Term
numberExpr = Number <$> number
var :: Parser Term
var = Var <$> ident
binOp :: Parser Term
binOp = parserZero -- ToDo
-- binOp = do
-- spaces
-- char '+'
-- spaces
-- lhs <- expr
-- spaces
-- rhs <- expr
-- spaces
-- return $ BinOp Plus lhs rhs
-- binOp = spaces *> char '+' *> return (BinOp Plus) <* spaces <*> expr *> spaces <*> expr
abs' :: Parser Term
abs' = parserZero -- ToDo
ifZero :: Parser Term
ifZero = parserZero -- ToDo
while :: Parser Term
while = parserZero -- ToDo
-- while = do
-- string "while"
-- spaces
-- c <- expr
-- spaces
-- string "do"
-- spaces
-- b <- expr
-- return $ While c b
deref :: Parser Term
deref = parserZero -- ToDo
assign :: Parser Term
assign = parserZero -- ToDo
new :: Parser Term
new = parserZero -- ToDo
parens :: Parser a -> Parser a
parens = between (char '(') (char ')')
expr :: Parser Term
expr = chainl1 expr' f where
f = do
spaces
char '+'
spaces
return $ BinOp Plus
expr' :: Parser Term
expr' = chainl1 expr'' (spaces *> return App <* spaces)
expr'' :: Parser Term
expr'' = parens expr <|> ifZero <|> while <|> deref <|>
assign <|> new <|> abs' <|> var <|> numberExpr
program :: Parser Term
program = foldr1 (BinOp Seq) <$>
(spaces *> sepBy1 expr (spaces *> char ';' <* spaces) <* eof)
| nightuser/summer-school-on-game-semantics-sra-compiler | app/Parser.hs | mit | 1,703 | 0 | 13 | 418 | 438 | 239 | 199 | 43 | 1 |
module Acronyms where
getAcronym xs = [ x | x <- xs, elem x ['A'..'Z'] ]
main :: IO ()
main = do
putStrLn ( getAcronym "Three Letter Acronym" )
putStrLn ( getAcronym "National Aeronautics and Space Administration" )
| Lyapunov/haskell-programming-from-first-principles | chapter_9/acronyms.hs | mit | 224 | 0 | 9 | 47 | 76 | 38 | 38 | 6 | 1 |
module InfoSpec (spec) where
import Test.Hspec
import System.Process
import Info (formatInfo)
import Interpreter (ghc)
spec :: Spec
spec = do
describe "formatInfo" $ do
it "formats --info output" $ do
info <- readProcess ghc ["--info"] ""
formatInfo (read info) `shouldBe` info
| sol/doctest | test/InfoSpec.hs | mit | 340 | 0 | 16 | 103 | 100 | 53 | 47 | 11 | 1 |
-- Problems/Problem015.hs
module Problems.Problem015 (p15) where
import Helpers.Numbers
main = print p15
p15 :: Integer
p15 = fact 40 `div` (fact 20 * fact 20)
| Sgoettschkes/learning | haskell/ProjectEuler/src/Problems/Problem015.hs | mit | 163 | 0 | 8 | 28 | 57 | 32 | 25 | 5 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module LambdaCmsOrg.Tutorial.Models where
import Data.Text (Text)
import Data.Time (UTCTime)
import Data.Typeable (Typeable)
import Database.Persist.Quasi
import Text.Markdown
import Yesod
import Yesod.Text.Markdown ()
share [mkPersist sqlSettings, mkMigrate "migrateLambdaCmsOrgTutorial"]
$(persistFileWith lowerCaseSettings "config/models")
| lambdacms/lambdacms.org | lambdacmsorg-tutorial/LambdaCmsOrg/Tutorial/Models.hs | mit | 766 | 0 | 8 | 246 | 96 | 58 | 38 | 17 | 0 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGAnimatedNumberList
(js_getBaseVal, getBaseVal, js_getAnimVal, getAnimVal,
SVGAnimatedNumberList, castToSVGAnimatedNumberList,
gTypeSVGAnimatedNumberList)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"baseVal\"]" js_getBaseVal ::
JSRef SVGAnimatedNumberList -> IO (JSRef SVGNumberList)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAnimatedNumberList.baseVal Mozilla SVGAnimatedNumberList.baseVal documentation>
getBaseVal ::
(MonadIO m) => SVGAnimatedNumberList -> m (Maybe SVGNumberList)
getBaseVal self
= liftIO
((js_getBaseVal (unSVGAnimatedNumberList self)) >>= fromJSRef)
foreign import javascript unsafe "$1[\"animVal\"]" js_getAnimVal ::
JSRef SVGAnimatedNumberList -> IO (JSRef SVGNumberList)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAnimatedNumberList.animVal Mozilla SVGAnimatedNumberList.animVal documentation>
getAnimVal ::
(MonadIO m) => SVGAnimatedNumberList -> m (Maybe SVGNumberList)
getAnimVal self
= liftIO
((js_getAnimVal (unSVGAnimatedNumberList self)) >>= fromJSRef) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/SVGAnimatedNumberList.hs | mit | 1,958 | 12 | 11 | 252 | 471 | 287 | 184 | 33 | 1 |
module Language.Erlang.Algebra where
import Language.Erlang.Syntax
data ErlangAlgebra
pr -- 1. result for program
at -- 2. result for attribute
fn -- 3. result for function
bv -- 4. result for basic value
iop -- 5. result for infix op
exp -- 6. result for expressions
mat -- 7. result for match
pat -- 8. result for patterns
gua -- 9. result for guards
= ErlangAlgebra {
programF :: at -> [at] -> [at] -> [at] -> [fn] -> pr
, moduleF :: String -> at
, exportF :: [String] -> at
, importF :: String -> at
, defineF :: String -> bv -> at
, functionF :: String -> [pat] -> exp -> fn
, atomicLiteralF :: String -> bv
, stringLiteralF :: String -> bv
, numberLiteralF :: Integer -> bv
, processLiteralF :: String -> bv
, opLTF :: iop
, opLEqF :: iop
, opGTF :: iop
, opGEqF :: iop
, opEqF :: iop
, opNEqF :: iop
, opLAndF :: iop
, opLOrF :: iop
, opMulF :: iop
, opDivF :: iop
, opModF :: iop
, opSubF :: iop
, opBAndF :: iop
, opBXorF :: iop
, opBOrF :: iop
, opAddF :: iop
, infixExpF :: iop -> exp -> exp -> exp
, modExpF :: String -> String -> exp
, applyF :: exp -> [exp] -> exp
, callF :: exp -> exp -> exp
, caseF :: exp -> [mat] -> exp
, funAnonF :: [pat] -> exp -> exp
, receiveF :: [mat] -> exp
, ifF :: [mat] -> exp
, sendF :: exp -> exp -> exp
, seqF :: exp -> exp -> exp
, assignF :: pat -> exp -> exp
, expTF :: [exp] -> exp
, expLF :: [exp] -> exp
, expValF :: bv -> exp
, expVarF :: String -> exp
, recordCreateF :: String -> [(String, exp)] -> exp
, coercionF :: exp -> exp
, matchF :: pat -> Maybe gua -> exp -> mat
, patVarF :: String -> pat
, patTF :: [pat] -> pat
, patLF :: [pat] -> pat
, patValF :: bv -> pat
, guardValF :: bv -> gua
, guardVarF :: String -> gua
, guardCallF :: gua -> [gua] -> gua
, guardTF :: [gua] -> gua
, guardLF :: [gua] -> gua
}
| arnihermann/timedreb2erl | src/Language/Erlang/Algebra.hs | mit | 1,929 | 0 | 14 | 554 | 656 | 403 | 253 | 66 | 0 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeOperators #-}
module DSL
( FuckDSL(..)
, module Fuckdown
) where
import Coproduct
import Fuckdown
-- | The Brainfuck DSL as a functor coproduct of the different actions
-- available in Brainfuck.
newtype FuckDSL a
= FuckDSL
{ getDSL ::
( GoLeft
:+: GoRight
:+: Inc
:+: Dec
:+: Input
:+: Output
:+: Loop FuckDSL
) a
}
deriving (Functor)
instance Loop FuckDSL :<: FuckDSL where
inject = FuckDSL . inject
instance Output :<: FuckDSL where
inject = FuckDSL . InL . InR
instance Input :<: FuckDSL where
inject = FuckDSL . InL . InL . InR
instance Dec :<: FuckDSL where
inject = FuckDSL . InL . InL . InL . InR
instance Inc :<: FuckDSL where
inject = FuckDSL . InL . InL . InL . InL . InR
instance GoRight :<: FuckDSL where
inject = FuckDSL . InL . InL . InL . InL . InL . InR
instance GoLeft :<: FuckDSL where
inject = FuckDSL . InL . InL . InL . InL . InL . InL
| djeik/fuckdown2 | src/DSL.hs | mit | 1,159 | 0 | 15 | 351 | 299 | 163 | 136 | 35 | 0 |
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-}
-- | Reexports "Control.Monad.Compat"
-- from a globally unique namespace.
module Control.Monad.Compat.Repl (
module Control.Monad.Compat
) where
import "this" Control.Monad.Compat
| haskell-compat/base-compat | base-compat/src/Control/Monad/Compat/Repl.hs | mit | 284 | 0 | 5 | 31 | 28 | 21 | 7 | 5 | 0 |
{-# LANGUAGE OverloadedStrings,FlexibleInstances,QuasiQuotes,TemplateHaskell #-}
module Main where
import Prelude hiding(catch,lookup)
import Control.Applicative ((<$>))
import Control.Monad (liftM)
import Control.Exception
import Data.Foldable (fold)
import Data.Monoid (mappend,mempty)
import Data.Text
import Data.Time
import Data.Map (lookup)
import System.IO
import System.Locale (defaultTimeLocale)
import Network.HTTP.Conduit
import Data.Maybe
import qualified Data.Vector as DV
import Data.Aeson
import qualified Data.Aeson.Types as DAT
import qualified Data.ByteString.Char8 as BC
import Text.Regex
import Text.Pandoc
import Text.Pandoc.Builder
import Text.Hamlet
import Text.Blaze.Html.Renderer.String
import Types
import System.Console.GetOpt.Simple
type LoginName = String
type RepositoryName = String
{- Format: <user or orgs>/<repo> -}
getRepos :: Maybe FilePath -> IO [RepositoryName]
getRepos f = liftM Prelude.lines $ readFile $ fromMaybe "repositories" f
getPassword :: IO String
getPassword = do
putStr "Password: "
hFlush stdout
pass <- withEcho False getLine
putChar '\n'
return pass
withEcho :: Bool -> IO a -> IO a
withEcho echo action = do
old <- hGetEcho stdin
bracket_ (hSetEcho stdin echo) (hSetEcho stdin old) action
{- Format: login:pass -}
getAuth :: Maybe LoginName -> IO (Maybe BasicAuth)
getAuth Nothing = return Nothing
getAuth (Just l) = getPassword >>= (\p -> return $ Just $ BasicAuth l p)
{- Fetch Repository Events json.
And parse to pandoc Blocks -}
fetch :: Maybe BasicAuth -> String -> IO Blocks
fetch mauth url = do
(opts, _) <- getOptsArgs (makeOptions options) [] []
let filterDate = (lookup "date" opts >>= parseTime defaultTimeLocale "%F") :: Maybe Day
filterAuthor = liftM pack $ lookup "author" opts
request <- parseUrlWithAuth (toEventsApiUrl url) mauth
withManager $ \manager -> do
Response _ _ _ src <- httpLbs request manager
let objects = (fromJust (decode src :: Maybe Array))
events = DV.map (DAT.parseMaybe parseJSON) objects :: DV.Vector (Maybe Event)
bs = DV.map (\e -> toBlock (fromJust e) filterAuthor) $ DV.filter (\e -> isJust e && e `compareDate` filterDate) events
bs' = header 2 (str url) <> DV.foldl1 (\acc b -> acc <> b) bs
return bs'
where
compareDate Nothing _ = False
compareDate (Just event) mDate = (localDay $ utcToJstTime $ createdAt event) `justEql` mDate
parseUrlWithAuth u Nothing = parseUrl u
parseUrlWithAuth u (Just (BasicAuth l p)) = applyBasicAuth (BC.pack l) (BC.pack p) <$> parseUrl u
{- compare Only Maybe is not Nothing, otherwise always return True -}
justEql :: (Eq a) => a -> Maybe a -> Bool
justEql _ Nothing = True
justEql a (Just b) = a == b
toBlock :: Event -> Maybe Text -> Blocks
toBlock (PushEvent _ r cs t) author =
case commitList of
[] -> mempty
xs -> para (str ("PushEvent to " ++ unpack r ++ " at " ++ show (utcToJstTime t))) <>
bulletList xs
where
commitList = flip Prelude.map (DV.toList $ DV.filter (\c -> name c `justEql` author) cs) $ \c ->
plain (str (unpack $ name c `mappend` ": " `mappend` comment c `mappend` " : ") `mappend` link (commitUrl c) "Go To Commit" (linkStr c))
where
linkStr = str . Prelude.take 7 . unpack . sha
utcToJstTime :: UTCTime -> LocalTime
utcToJstTime = utcToLocalTime (hoursToTimeZone 9)
{- Format: <user or orgs>/<repo> -}
toEventsApiUrl :: String -> String
toEventsApiUrl s = "https://api.github.com/repos/" ++ s ++ "/events"
options :: [(FlagMaker, String, Mode, String)]
options = [ (arg, "repositories", Optional, "Target Repositories (separated by comma)"),
(arg, "login", Optional, "Github login ID"),
(arg, "file", Optional, "Repository config (per line '<user>/<repository name>'"),
(arg, "output-format", Optional, "Output file format"),
(arg, "date", Optional, "Filter by date"),
(arg, "author", Optional, "Filter by author")
]
main :: IO()
main = do
(opts, _) <- getOptsArgs (makeOptions options) [] []
let maybeRepositories = lookup "repositories" opts
maybeLogin = lookup "login" opts
maybeConf = lookup "file" opts
format = lookup "output-format" opts
case maybeRepositories of
Nothing -> do
repos <- getRepos maybeConf
process repos maybeLogin $ outputFormat $ fromMaybe "html" format
Just r -> do
repos <- return $ splitRegex (mkRegex ",") r
process repos maybeLogin $ outputFormat $ fromMaybe "html" format
process :: [String] -> Maybe LoginName -> OutputFormat -> IO ()
process repos l f = do
auth <- getAuth l
bss <- mapM (fetch auth) repos
bs <- return $ fold bss
html <- return $ write (doc bs) f
withFile ("report" ++ formatExt f) WriteMode (\h -> hPutStr h html)
putStrLn $ "Output report" ++ formatExt f
write :: Pandoc -> OutputFormat -> String
write d HtmlFormat = writeHtmlString defaultWriterOptions {writerStandalone = True, writerTemplate = template} $ d
where template = renderHtml $(shamletFile "template/layout.hamlet")
write d MarkdownFormat = writeMarkdown defaultWriterOptions $ d
write d RSTFormat = writeRST defaultWriterOptions $ d
write d MediaWikiFormat = writeMediaWiki defaultWriterOptions $ d
| joker1007/github-push-reporter | src/Main.hs | mit | 5,260 | 0 | 20 | 1,036 | 1,780 | 921 | 859 | 112 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module ReactHaskell.Types where
import Data.Aeson
import Database.PostgreSQL.Simple.FromRow
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
data Todo = Todo
{ todoId :: Integer
, todoDescription :: String
} deriving (Eq, Show)
instance FromRow Todo where
fromRow = Todo <$> field <*> field
instance ToRow Todo where
toRow Todo{..} = [ toField todoId
, toField todoDescription
]
instance FromJSON Todo where
parseJSON (Object o) = Todo <$> o .: "id"
<*> o .: "description"
parseJSON _ = mempty
instance ToJSON Todo where
toJSON Todo{..} = object [ "id" .= todoId
, "description" .= todoDescription
]
| sestrella/react-haskell | src/ReactHaskell/Types.hs | mit | 915 | 0 | 9 | 312 | 206 | 114 | 92 | 23 | 0 |
module BlocVoting.Tally.Vote where
import BlocVoting.Tally.Voter
data Vote = Vote {
voteScalar :: Int
, voter :: Voter
, height :: Integer
, superseded :: Bool
}
deriving (Show, Eq)
| XertroV/blocvoting | src/BlocVoting/Tally/Vote.hs | mit | 196 | 0 | 8 | 43 | 57 | 36 | 21 | 8 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Data.API.CrunchBase.Person
( Person(..)
, Degree(..)
, Relationship(..)
, Milestone(..)
, Investment(..)
) where
import Data.API.CrunchBase.Response
import Data.API.CrunchBase.PersonQuery (PersonPermalink(..))
import qualified Data.API.CrunchBase.SearchResult as S
import Data.API.CrunchBase.Image
import Data.API.CrunchBase.FundingRound
import Data.API.CrunchBase.VideoEmbed
import Data.API.CrunchBase.ExternalLink
import Data.Time.FuzzyDate
import Data.Aeson
import Data.Aeson.Types (Parser)
import Data.Text (Text)
import Control.Applicative
data Person = Person { firstName :: Text
, lastName :: Text
, permalink :: PersonPermalink
, crunchbaseUrl :: Text
, homepageUrl :: Maybe Text
, birthplace :: Maybe Text
, twitterUsername :: Maybe Text
, blogUrl :: Maybe Text
, blogFeedUrl :: Maybe Text
, affiliationName :: Maybe Text
, bornDate :: Maybe FuzzyDate
, tagList :: Maybe Text
, aliasList :: Maybe Text
, createdAt :: Maybe Text
, updatedAt :: Maybe Text
, overview :: Maybe Text
, image :: Maybe Image
, degrees :: [Degree]
, relationships :: [Relationship]
, investments :: [Investment]
, milestones :: [Milestone]
, videoEmbeds :: [VideoEmbed]
, externalLinks :: [ExternalLink]
, webPresences :: [ExternalLink]
} deriving (Eq, Show)
instance FromJSON Person where
parseJSON (Object o) = Person
<$> o .: "first_name"
<*> o .: "last_name"
<*> o .: "permalink"
<*> o .: "crunchbase_url"
<*> o .:- "homepage_url"
<*> o .:- "birthplace"
<*> o .:- "twitter_username"
<*> o .:- "blog_url"
<*> o .:- "blog_feed_url"
<*> o .:- "affiliation_name"
<*> mkDate o "born_year" "born_month" "born_day"
<*> o .:- "tag_list"
<*> o .:- "alias_list"
<*> o .:- "created_at"
<*> o .:- "updated_at"
<*> o .:- "overview"
<*> o .:? "image"
<*> o .: "degrees"
<*> o .: "relationships"
<*> o .: "investments"
<*> o .: "milestones"
<*> o .: "video_embeds"
<*> o .: "external_links"
<*> o .: "web_presences"
data Degree = Degree { degreeType :: Maybe Text
, subject :: Maybe Text
, institution :: Maybe Text
, graduatedDate :: Maybe FuzzyDate
} deriving (Eq, Show)
instance FromJSON Degree where
parseJSON (Object o) = Degree
<$> o .:- "degree_type"
<*> o .:- "subject"
<*> o .:- "institution"
<*> mkDate o "graduated_year" "graduated_month" "graduated_day"
data Relationship = Relationship { isPast :: Maybe Bool
, title :: Maybe Text
, firm :: S.SearchResult
} deriving (Eq, Show)
instance FromJSON Relationship where
parseJSON (Object o) = Relationship
<$> o .:? "is_past"
<*> o .:- "title"
<*> ((o .: "firm") >>= mkFirm)
mkFirm :: Value -> Parser S.SearchResult
mkFirm v@(Object o) = o .: "type_of_entity" >>= flip mkFirm' v
mkFirm' :: Text -> Value -> Parser S.SearchResult
mkFirm' "company" = S.mkCompany
mkFirm' "financial_org" = S.mkFinancialOrganization
data Investment = Investment { fundingRound :: FundingRound
} deriving (Eq, Show)
instance FromJSON Investment where
parseJSON (Object o) = Investment <$> o .: "funding_round"
data Milestone = Milestone { description :: Text
, stonedDate :: Maybe FuzzyDate
, sourceUrl :: Maybe Text
, sourceText :: Maybe Text
, sourceDescription :: Maybe Text
, stonedValue :: Maybe Value
, stonedValueType :: Maybe Value
, stonedAcquirer :: Maybe Value
} deriving (Eq, Show)
instance FromJSON Milestone where
parseJSON (Object o) = Milestone
<$> o .: "description"
<*> mkDate o "stoned_year" "stoned_month" "stoned_day"
<*> o .:- "source_url"
<*> o .:- "source_text"
<*> o .:- "source_description"
<*> o .:? "stoned_value"
<*> o .:? "stoned_value_type"
<*> o .:? "stoned_acquirer"
| whittle/crunchbase | Data/API/CrunchBase/Person.hs | mit | 5,526 | 0 | 52 | 2,493 | 1,122 | 624 | 498 | 118 | 1 |
module Encode
( packWithEncoding
, unpackWithEncoding
, fromStrict' ) where
import Data.Text.Encoding
import qualified Data.Text as T
import qualified Data.ByteString as P (ByteString) -- type name only
import qualified Data.ByteString as S
import qualified Data.ByteString.Internal as S
import qualified Data.ByteString.Unsafe as S
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Internal as BI
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.ByteString.Lazy.Internal as BLI
import Prelude
-- | Convert String to ByteString(Strict) with Encoding.
packWithEncoding :: String -> B.ByteString
packWithEncoding = encodeUtf8 . T.pack
-- | Convert ByteString to String with Encoding
unpackWithEncoding :: B.ByteString -> String
unpackWithEncoding = T.unpack . decodeUtf8
-- |/O(1)/ Convert a strict 'ByteString' into a lazy 'ByteString'.
--
-- sadly hack...
--
-- Because of bytestring 0.9.x version, nothing provides Lazy \<-\> Strict conversion function.
--
-- written referring to bytestring-0.10.x
fromStrict' :: P.ByteString -> BL.ByteString
fromStrict' bs | S.null bs = BLI.Empty
| otherwise = BLI.Chunk bs BLI.Empty | cosmo0920/file-monitor-hs | Encode.hs | mit | 1,195 | 0 | 9 | 176 | 215 | 139 | 76 | 22 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html
module Stratosphere.Resources.DocDBDBCluster where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.Tag
-- | Full data type definition for DocDBDBCluster. See 'docDBDBCluster' for a
-- more convenient constructor.
data DocDBDBCluster =
DocDBDBCluster
{ _docDBDBClusterAvailabilityZones :: Maybe (ValList Text)
, _docDBDBClusterBackupRetentionPeriod :: Maybe (Val Integer)
, _docDBDBClusterDBClusterIdentifier :: Maybe (Val Text)
, _docDBDBClusterDBClusterParameterGroupName :: Maybe (Val Text)
, _docDBDBClusterDBSubnetGroupName :: Maybe (Val Text)
, _docDBDBClusterEngineVersion :: Maybe (Val Text)
, _docDBDBClusterKmsKeyId :: Maybe (Val Text)
, _docDBDBClusterMasterUserPassword :: Maybe (Val Text)
, _docDBDBClusterMasterUsername :: Maybe (Val Text)
, _docDBDBClusterPort :: Maybe (Val Integer)
, _docDBDBClusterPreferredBackupWindow :: Maybe (Val Text)
, _docDBDBClusterPreferredMaintenanceWindow :: Maybe (Val Text)
, _docDBDBClusterSnapshotIdentifier :: Maybe (Val Text)
, _docDBDBClusterStorageEncrypted :: Maybe (Val Bool)
, _docDBDBClusterTags :: Maybe [Tag]
, _docDBDBClusterVpcSecurityGroupIds :: Maybe (ValList Text)
} deriving (Show, Eq)
instance ToResourceProperties DocDBDBCluster where
toResourceProperties DocDBDBCluster{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::DocDB::DBCluster"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("AvailabilityZones",) . toJSON) _docDBDBClusterAvailabilityZones
, fmap (("BackupRetentionPeriod",) . toJSON) _docDBDBClusterBackupRetentionPeriod
, fmap (("DBClusterIdentifier",) . toJSON) _docDBDBClusterDBClusterIdentifier
, fmap (("DBClusterParameterGroupName",) . toJSON) _docDBDBClusterDBClusterParameterGroupName
, fmap (("DBSubnetGroupName",) . toJSON) _docDBDBClusterDBSubnetGroupName
, fmap (("EngineVersion",) . toJSON) _docDBDBClusterEngineVersion
, fmap (("KmsKeyId",) . toJSON) _docDBDBClusterKmsKeyId
, fmap (("MasterUserPassword",) . toJSON) _docDBDBClusterMasterUserPassword
, fmap (("MasterUsername",) . toJSON) _docDBDBClusterMasterUsername
, fmap (("Port",) . toJSON) _docDBDBClusterPort
, fmap (("PreferredBackupWindow",) . toJSON) _docDBDBClusterPreferredBackupWindow
, fmap (("PreferredMaintenanceWindow",) . toJSON) _docDBDBClusterPreferredMaintenanceWindow
, fmap (("SnapshotIdentifier",) . toJSON) _docDBDBClusterSnapshotIdentifier
, fmap (("StorageEncrypted",) . toJSON) _docDBDBClusterStorageEncrypted
, fmap (("Tags",) . toJSON) _docDBDBClusterTags
, fmap (("VpcSecurityGroupIds",) . toJSON) _docDBDBClusterVpcSecurityGroupIds
]
}
-- | Constructor for 'DocDBDBCluster' containing required fields as arguments.
docDBDBCluster
:: DocDBDBCluster
docDBDBCluster =
DocDBDBCluster
{ _docDBDBClusterAvailabilityZones = Nothing
, _docDBDBClusterBackupRetentionPeriod = Nothing
, _docDBDBClusterDBClusterIdentifier = Nothing
, _docDBDBClusterDBClusterParameterGroupName = Nothing
, _docDBDBClusterDBSubnetGroupName = Nothing
, _docDBDBClusterEngineVersion = Nothing
, _docDBDBClusterKmsKeyId = Nothing
, _docDBDBClusterMasterUserPassword = Nothing
, _docDBDBClusterMasterUsername = Nothing
, _docDBDBClusterPort = Nothing
, _docDBDBClusterPreferredBackupWindow = Nothing
, _docDBDBClusterPreferredMaintenanceWindow = Nothing
, _docDBDBClusterSnapshotIdentifier = Nothing
, _docDBDBClusterStorageEncrypted = Nothing
, _docDBDBClusterTags = Nothing
, _docDBDBClusterVpcSecurityGroupIds = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-availabilityzones
ddbdbcAvailabilityZones :: Lens' DocDBDBCluster (Maybe (ValList Text))
ddbdbcAvailabilityZones = lens _docDBDBClusterAvailabilityZones (\s a -> s { _docDBDBClusterAvailabilityZones = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-backupretentionperiod
ddbdbcBackupRetentionPeriod :: Lens' DocDBDBCluster (Maybe (Val Integer))
ddbdbcBackupRetentionPeriod = lens _docDBDBClusterBackupRetentionPeriod (\s a -> s { _docDBDBClusterBackupRetentionPeriod = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-dbclusteridentifier
ddbdbcDBClusterIdentifier :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcDBClusterIdentifier = lens _docDBDBClusterDBClusterIdentifier (\s a -> s { _docDBDBClusterDBClusterIdentifier = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-dbclusterparametergroupname
ddbdbcDBClusterParameterGroupName :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcDBClusterParameterGroupName = lens _docDBDBClusterDBClusterParameterGroupName (\s a -> s { _docDBDBClusterDBClusterParameterGroupName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-dbsubnetgroupname
ddbdbcDBSubnetGroupName :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcDBSubnetGroupName = lens _docDBDBClusterDBSubnetGroupName (\s a -> s { _docDBDBClusterDBSubnetGroupName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-engineversion
ddbdbcEngineVersion :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcEngineVersion = lens _docDBDBClusterEngineVersion (\s a -> s { _docDBDBClusterEngineVersion = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-kmskeyid
ddbdbcKmsKeyId :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcKmsKeyId = lens _docDBDBClusterKmsKeyId (\s a -> s { _docDBDBClusterKmsKeyId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-masteruserpassword
ddbdbcMasterUserPassword :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcMasterUserPassword = lens _docDBDBClusterMasterUserPassword (\s a -> s { _docDBDBClusterMasterUserPassword = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-masterusername
ddbdbcMasterUsername :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcMasterUsername = lens _docDBDBClusterMasterUsername (\s a -> s { _docDBDBClusterMasterUsername = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-port
ddbdbcPort :: Lens' DocDBDBCluster (Maybe (Val Integer))
ddbdbcPort = lens _docDBDBClusterPort (\s a -> s { _docDBDBClusterPort = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-preferredbackupwindow
ddbdbcPreferredBackupWindow :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcPreferredBackupWindow = lens _docDBDBClusterPreferredBackupWindow (\s a -> s { _docDBDBClusterPreferredBackupWindow = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-preferredmaintenancewindow
ddbdbcPreferredMaintenanceWindow :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcPreferredMaintenanceWindow = lens _docDBDBClusterPreferredMaintenanceWindow (\s a -> s { _docDBDBClusterPreferredMaintenanceWindow = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-snapshotidentifier
ddbdbcSnapshotIdentifier :: Lens' DocDBDBCluster (Maybe (Val Text))
ddbdbcSnapshotIdentifier = lens _docDBDBClusterSnapshotIdentifier (\s a -> s { _docDBDBClusterSnapshotIdentifier = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-storageencrypted
ddbdbcStorageEncrypted :: Lens' DocDBDBCluster (Maybe (Val Bool))
ddbdbcStorageEncrypted = lens _docDBDBClusterStorageEncrypted (\s a -> s { _docDBDBClusterStorageEncrypted = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-tags
ddbdbcTags :: Lens' DocDBDBCluster (Maybe [Tag])
ddbdbcTags = lens _docDBDBClusterTags (\s a -> s { _docDBDBClusterTags = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-docdb-dbcluster.html#cfn-docdb-dbcluster-vpcsecuritygroupids
ddbdbcVpcSecurityGroupIds :: Lens' DocDBDBCluster (Maybe (ValList Text))
ddbdbcVpcSecurityGroupIds = lens _docDBDBClusterVpcSecurityGroupIds (\s a -> s { _docDBDBClusterVpcSecurityGroupIds = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/DocDBDBCluster.hs | mit | 9,040 | 0 | 14 | 968 | 1,551 | 875 | 676 | 100 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Keystone.Web.Project.Types
where
import Data.Data (Typeable)
data ProjectCreateRequest = ProjectCreateRequest
{ description :: Maybe String
, domainId :: Maybe String
, name :: String
, enabled :: Maybe Bool
} deriving (Show, Read, Eq, Ord, Typeable)
| VictorDenisov/keystone | src/Keystone/Web/Project/Types.hs | gpl-2.0 | 440 | 0 | 9 | 186 | 84 | 49 | 35 | 9 | 0 |
module Text.Pit.Types (
-- general compiler data
Message,
showMessage,
-- Lexer
TokenType(..),
Token,
getTokenType,
getTokenPosition,
-- Parser
Term(..),
Definition(..),
Statement(..),
showTerm,
showDefinition,
showStatement,
-- Unification
Substitution,
emptySubstitution,
showSubstitution,
lookupSubstitution,
) where
import Data.List (intercalate, isPrefixOf)
import Data.Maybe (isNothing, listToMaybe, catMaybes)
import Prelude hiding (showList)
import qualified Data.Map as Map -- TODO: use Strict version as per [recommendation](http://hackage.haskell.org/package/containers-0.5.5.1/docs/Data-Map-Lazy.html)?
import qualified Data.Set as Set
-- A message is a string accompanied by a line and column where it got originated
type Message = (String, Int, Int)
showMessage :: Message -> String
showMessage (s, l, c) = show l ++ ":" ++ show c ++ ": " ++ s
-- The token types understood by the lexer
data TokenType = TParenOpen -- (
| TParenClose -- )
| TComma -- ,
| TDot -- .
| TImpliedBy -- :-
| TQuery -- ?-
| TEqual -- =
| TBracketOpen -- [
| TBracketClose -- ]
| TBar -- |
| TOr -- ;
| TWhiteSpace -- anything accepted by isSpace and comments
| TVariable String -- A variable name matching [A-Z][A-Za-z0-9_]* | _[A-Za-z0-9_]\+
| TWildCard -- _
| TAtom String -- An atom matching '\([^']\|''\)*' | [a-z][A-Za-z0-9_]* | graphic\+
-- where graphic characters are one of #&*+-./:<=>?@\^~
-- See: http://www.amzi.com/manuals/amzi/pro/ref_terms.htm
| TEnd -- End of the stream
deriving (Show, Eq)
-- A token is a token type along with information such as line number and column number
type Token = (TokenType, Int, Int)
getTokenType :: Token -> TokenType
getTokenType (t, _, _) = t
getTokenPosition :: Token -> String
getTokenPosition (_, l, c) = show l ++ ":" ++ show c
-- A term is a structure, variable, wildcard or a list.
data Term = Structure String [Term] -- A structure is an atom with a set of terms as arguments
-- Note: A list is a structure with name "[" and contains a head and a tail
| Assignable String String -- An assignable is either a variable or a wildcard ("_")
-- Note: The second string is used during (co)SLD to give new names to variables for unification
deriving (Show, Eq)
-- A definition is a combination of tokens that define an atom and consist of requisites (term or unification) that are "and"ed or "or"ed.
data Definition = Requisite Term | Unification Term Term | Or Definition Definition | And Definition Definition | Fact
deriving (Show, Eq)
-- A statement is either a declaration or a query, which is one processing unit
data Statement = Declaration Term Definition -- A declaration is a definition for a structure. Term is used here for head,
-- but other types of terms are invalid
| Query Definition -- A query has the same form of a definition
deriving (Show, Eq)
showList :: [Term] -> String
showList [] = ""
showList [x, Structure "[" []] = showTerm x
showList [x, Structure "[" xs] = showTerm x ++ ", " ++ showList xs
showList [x, y] = showTerm x ++ "|" ++ showTerm y
showList ts = error $ "internal error: bad list format (list arguments are always 0 or 2, but\n"
++ " this list has " ++ show (length ts) ++ " arguments.\n"
++ " The list arguments are: " ++ show ts
showTerm :: Term -> String
showTerm (Structure "[" ts) = "[" ++ showList ts ++ "]"
showTerm (Structure s []) = s
showTerm (Structure s ts) = s ++ "(" ++ (intercalate ", " . map showTerm $ ts) ++ ")"
showTerm (Assignable _ s) = s
showDefinition :: Definition -> String
showDefinition (Requisite t) = showTerm t
showDefinition (Unification t1 t2) = showTerm t1 ++ " = " ++ showTerm t2
showDefinition (Or d1 d2) = "(" ++ showDefinition d1 ++ "; " ++ showDefinition d2 ++ ")"
showDefinition (And d1 d2) = showDefinition d1 ++ ", " ++ showDefinition d2
showDefinition Fact = ""
showStatement :: Statement -> String
showStatement s = case s of
Declaration t Fact -> showTerm t
Declaration t d -> showTerm t ++ " :- " ++ showDefinition d
Query d -> "?- " ++ showDefinition d
++ "."
-- substitution as a result of unification is a map of a set of equivalent variables to their possible Structure term
type Substitution = Map.Map (Set.Set String) (Maybe Term)
emptySubstitution :: Substitution
emptySubstitution = Map.empty
showSubstitution :: Substitution -> String
showSubstitution = (\x -> if null x then "true" else x)
. intercalate ",\n"
-- convert equivalent variables to chain of equals and possibly the last one equal to the resulting structure
. map (\(k, v) -> intercalate ", " . filter (not . null) $ chainEqual k:lastEqual (last k) v)
. varsToShow
where
chainEqual :: [String] -> String
chainEqual k = intercalate ", " . map (\(x, y) -> x ++ " = " ++ y) $ zip k (tail k)
lastEqual :: String -> (Maybe Term) -> [String]
lastEqual _ Nothing = []
lastEqual v (Just t) = [v ++ " = " ++ showTerm t]
getTermVars :: Term -> Set.Set String
getTermVars (Assignable _ s) = if isPrefixOf "$" s then Set.singleton s else Set.empty
getTermVars (Structure _ args) = foldr Set.union Set.empty . map getTermVars $ args
onlyExternalVars :: Substitution -> Set.Set String
-- filter out wildcards and internally renamed variables and then filter out empty resulting sets or sets with only one unexpanded single variable.
-- Put all the remaining variables together
onlyExternalVars = foldr (\(k, _) a -> Set.union k a) Set.empty
. filter (\(k, v) -> not $ Set.null k || Set.size k == 1 && isNothing v)
. Map.foldrWithKey (\k v a -> (Set.filter (not . isPrefixOf "$") k, v):a) []
necessaryVars :: Substitution -> Set.Set String -> Set.Set String -> Set.Set String
-- take the onlyExternalVars, and check what internal variables they depend on. Then for the newly found variables repeat the
-- process until no new variables are found.
necessaryVars subst vars result = if Set.null news then result else newsRecursive
where
terms = catMaybes . Map.foldrWithKey (\_ v a -> v:a) [] . Map.filterWithKey (\k _ -> not . Set.null $ Set.intersection k vars) $ subst
deps = foldr Set.union Set.empty . map getTermVars $ terms
news = Set.difference deps result
newsRecursive = necessaryVars subst news (Set.union result news)
varsToShow :: Substitution -> [([String], Maybe Term)]
-- reduce the substitutions to a list, filtering out unnecessary variables and then
-- filter out empty resulting sets or sets with only one unexpanded single variable.
varsToShow subst = filter (\(k, v) -> not $ null k || length k == 1 && isNothing v)
. Map.foldrWithKey (\k v a -> (Set.toAscList . Set.filter (\s -> Set.member s necessaries) $ k, v):a) [] $ subst
where
exts = onlyExternalVars subst
necessaries = necessaryVars subst exts exts
lookupSubstitution :: String -> Substitution -> Maybe (Set.Set String, Maybe Term)
lookupSubstitution s = listToMaybe . Map.toList . Map.filterWithKey (\k _ -> Set.member s k)
| ShabbyX/pit | src/Text/Pit/Types.hs | gpl-2.0 | 8,287 | 0 | 20 | 2,653 | 1,904 | 1,025 | 879 | 113 | 6 |
{-# LANGUAGE Arrows, FlexibleContexts, NoMonomorphismRestriction, RankNTypes, TypeOperators #-}
{- The Muddy Children puzzle in circuits.
- Copyright : (C)opyright 2009-2011 peteg42 at gmail dot com
- License : GPL (see COPYING for details)
-
- ghci -package ADHOC MuddyChildren_clock.hs
-
- dot kautos
- test_children_forget
-
-}
module MuddyChildren_clock where
-------------------------------------------------------------------
-- Dependencies.
-------------------------------------------------------------------
import Prelude hiding ( id, (.) )
import ADHOC
import ADHOC.NonDet
import ADHOC.ModelChecker.CTL
import ADHOC.Knowledge
-------------------------------------------------------------------
-- Parameters.
-------------------------------------------------------------------
type NumChildren = Three
num_children :: Integer
num_children = c2num (undefined :: NumChildren)
childName :: Integer -> String
childName i = "child_" ++ show i
dirtyP :: Integer -> String
dirtyP i = "child_" ++ show i ++ "_is_dirty"
dirtyProbe :: ArrowProbe (~>) (B (~>)) => Integer -> B (~>) ~> B (~>)
dirtyProbe i = probeA (dirtyP i)
-------------------------------------------------------------------
-- The children.
-------------------------------------------------------------------
-- | Child /i/ can observe the dirtiness of all the other children,
-- but not herself.
--
-- Each child simply runs a KBP.
childAs = mkSizedListf (\i -> (childName i, childInitObs i, childA i))
where
childA i = kTest (childName i `knows_hat` dirtyP i)
childInitObs i = second (mapSLn (\j -> if i == j then zeroA else id))
-------------------------------------------------------------------
-- Muddy children top-level: environment/father.
-------------------------------------------------------------------
environment = proc () ->
do -- Initially: non-deterministically choose some children to be
-- dirty and fix these for all time.
d <- nondetLatchAC trueA -< ()
mapSLn dirtyProbe -< d
anyDirty <- disjoinSL -< d
-- Initially the children say they do not know whether they are
-- muddy (they have not been asked).
rec acts <- (| delayAC (replicateSL <<< falseA -< ())
(| (broadcast childAs)
(returnA -< (anyDirty, d `asLengthOf` acts))
(returnA -< acts) |) |)
idSL (undefined :: NumChildren) -< d
returnA -< (anyDirty, d, acts)
-------------------------------------------------------------------
-- Propositions.
-------------------------------------------------------------------
all_children_say_yesp = proc (anyDirty, d, acts) ->
conjoinSL -< acts
props = proc x ->
do all_children_say_yesv <- all_children_say_yesp -< x
returnA -< (all_children_say_yesv)
cprops = environment >>> props
-------------------------------------------------------------------
-- Synthesis and model checking.
-------------------------------------------------------------------
-- Synthesis using the clock semantics
-- Just (kautos, m, all_children_say_yes) = clockSynth MinNone cprops
-- Just (kautos, m, all_children_say_yes) = clockSynth MinBisim cprops
Just (kautos, m, all_children_say_yes) = clockSynth MinSTAMINA cprops
ctlM = mkCTLModel m
-- | The muddiness of the children is constant.
test_muddiness_constant =
isOK (mc ctlM (conjoin [ ag (p <-> ax p)
| i <- [1 .. num_children]
, let p = probe (dirtyP i) ]))
-- | All children eventually say 'yes'. Not true for the clock semantics.
test_children_eventually_say_yes =
isFailure (mc ctlM (af (prop all_children_say_yes)))
-- | Clock semantics: once the children say 'yes', they immediately
-- forget they knew their muddiness.
test_children_forget =
isOK (mc ctlM (ag (prop all_children_say_yes --> ax (ax (neg (prop all_children_say_yes))))))
| peteg/ADHOC | Apps/MuddyChildren/MuddyChildren_clock.hs | gpl-2.0 | 3,937 | 7 | 18 | 728 | 719 | 394 | 325 | -1 | -1 |
module ProgramOptions
(
getHomeDir,
checkHomeDir
)
where
import System.Environment
import System.Directory
import System.FilePath
getHomeDir :: IO FilePath
getHomeDir = do
userHomeDir <- getHomeDirectory
return $ userHomeDir </> ".SrcGen"
checkHomeDir :: IO ()
checkHomeDir = do
homeDir <- getHomeDir
createDirectoryIfMissing True homeDir
| thomkoehler/SrcGen | src/ProgramOptions.hs | gpl-2.0 | 390 | 0 | 8 | 92 | 88 | 46 | 42 | 15 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.OpCodes
( testOpCodes
, OpCodes.OpCode(..)
) where
import Test.HUnit as HUnit
import Test.QuickCheck as QuickCheck
import Control.Applicative
import Control.Monad
import Data.Char
import Data.List
import qualified Data.Map as Map
import qualified Text.JSON as J
import Text.Printf (printf)
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Types ()
import Test.Ganeti.Query.Language ()
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import qualified Ganeti.OpCodes as OpCodes
import Ganeti.Types
import Ganeti.OpParams
import Ganeti.JSON
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Arbitrary instances
instance (Ord k, Arbitrary k, Arbitrary a) => Arbitrary (Map.Map k a) where
arbitrary = Map.fromList <$> arbitrary
arbitraryOpTagsGet :: Gen OpCodes.OpCode
arbitraryOpTagsGet = do
kind <- arbitrary
OpCodes.OpTagsSet kind <$> arbitrary <*> genOpCodesTagName kind
arbitraryOpTagsSet :: Gen OpCodes.OpCode
arbitraryOpTagsSet = do
kind <- arbitrary
OpCodes.OpTagsSet kind <$> genTags <*> genOpCodesTagName kind
arbitraryOpTagsDel :: Gen OpCodes.OpCode
arbitraryOpTagsDel = do
kind <- arbitrary
OpCodes.OpTagsDel kind <$> genTags <*> genOpCodesTagName kind
$(genArbitrary ''OpCodes.ReplaceDisksMode)
$(genArbitrary ''DiskAccess)
$(genArbitrary ''ImportExportCompression)
instance Arbitrary OpCodes.DiskIndex where
arbitrary = choose (0, C.maxDisks - 1) >>= OpCodes.mkDiskIndex
instance Arbitrary INicParams where
arbitrary = INicParams <$> genMaybe genNameNE <*> genMaybe genName <*>
genMaybe genNameNE <*> genMaybe genNameNE <*>
genMaybe genNameNE <*> genMaybe genName <*>
genMaybe genNameNE
instance Arbitrary IDiskParams where
arbitrary = IDiskParams <$> arbitrary <*> arbitrary <*>
genMaybe genNameNE <*> genMaybe genNameNE <*>
genMaybe genNameNE <*> genMaybe genNameNE <*>
genMaybe genNameNE <*> arbitrary <*> genAndRestArguments
instance Arbitrary RecreateDisksInfo where
arbitrary = oneof [ pure RecreateDisksAll
, RecreateDisksIndices <$> arbitrary
, RecreateDisksParams <$> arbitrary
]
instance Arbitrary DdmOldChanges where
arbitrary = oneof [ DdmOldIndex <$> arbitrary
, DdmOldMod <$> arbitrary
]
instance (Arbitrary a) => Arbitrary (SetParamsMods a) where
arbitrary = oneof [ pure SetParamsEmpty
, SetParamsDeprecated <$> arbitrary
, SetParamsNew <$> arbitrary
]
instance Arbitrary ExportTarget where
arbitrary = oneof [ ExportTargetLocal <$> genNodeNameNE
, ExportTargetRemote <$> pure []
]
instance Arbitrary OpCodes.OpCode where
arbitrary = do
op_id <- elements OpCodes.allOpIDs
case op_id of
"OP_TEST_DELAY" ->
OpCodes.OpTestDelay <$> arbitrary <*> arbitrary <*>
genNodeNamesNE <*> return Nothing <*> arbitrary <*> arbitrary
"OP_INSTANCE_REPLACE_DISKS" ->
OpCodes.OpInstanceReplaceDisks <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*> genDiskIndices <*>
genMaybe genNodeNameNE <*> return Nothing <*> genMaybe genNameNE
"OP_INSTANCE_FAILOVER" ->
OpCodes.OpInstanceFailover <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> genMaybe genNodeNameNE <*>
return Nothing <*> arbitrary <*> arbitrary <*> genMaybe genNameNE
"OP_INSTANCE_MIGRATE" ->
OpCodes.OpInstanceMigrate <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> genMaybe genNodeNameNE <*>
return Nothing <*> arbitrary <*> arbitrary <*> arbitrary <*>
genMaybe genNameNE <*> arbitrary
"OP_TAGS_GET" ->
arbitraryOpTagsGet
"OP_TAGS_SEARCH" ->
OpCodes.OpTagsSearch <$> genNameNE
"OP_TAGS_SET" ->
arbitraryOpTagsSet
"OP_TAGS_DEL" ->
arbitraryOpTagsDel
"OP_CLUSTER_POST_INIT" -> pure OpCodes.OpClusterPostInit
"OP_CLUSTER_RENEW_CRYPTO" -> pure OpCodes.OpClusterRenewCrypto
"OP_CLUSTER_DESTROY" -> pure OpCodes.OpClusterDestroy
"OP_CLUSTER_QUERY" -> pure OpCodes.OpClusterQuery
"OP_CLUSTER_VERIFY" ->
OpCodes.OpClusterVerify <$> arbitrary <*> arbitrary <*>
genListSet Nothing <*> genListSet Nothing <*> arbitrary <*>
genMaybe genNameNE
"OP_CLUSTER_VERIFY_CONFIG" ->
OpCodes.OpClusterVerifyConfig <$> arbitrary <*> arbitrary <*>
genListSet Nothing <*> arbitrary
"OP_CLUSTER_VERIFY_GROUP" ->
OpCodes.OpClusterVerifyGroup <$> genNameNE <*> arbitrary <*>
arbitrary <*> genListSet Nothing <*> genListSet Nothing <*> arbitrary
"OP_CLUSTER_VERIFY_DISKS" -> pure OpCodes.OpClusterVerifyDisks
"OP_GROUP_VERIFY_DISKS" ->
OpCodes.OpGroupVerifyDisks <$> genNameNE
"OP_CLUSTER_REPAIR_DISK_SIZES" ->
OpCodes.OpClusterRepairDiskSizes <$> genNodeNamesNE
"OP_CLUSTER_CONFIG_QUERY" ->
OpCodes.OpClusterConfigQuery <$> genFieldsNE
"OP_CLUSTER_RENAME" ->
OpCodes.OpClusterRename <$> genNameNE
"OP_CLUSTER_SET_PARAMS" ->
OpCodes.OpClusterSetParams
<$> arbitrary -- force
<*> emptyMUD -- hv_state
<*> emptyMUD -- disk_state
<*> arbitrary -- vg_name
<*> genMaybe arbitrary -- enabled_hypervisors
<*> genMaybe genEmptyContainer -- hvparams
<*> emptyMUD -- beparams
<*> genMaybe genEmptyContainer -- os_hvp
<*> genMaybe genEmptyContainer -- osparams
<*> genMaybe genEmptyContainer -- osparams_private_cluster
<*> genMaybe genEmptyContainer -- diskparams
<*> genMaybe arbitrary -- candidate_pool_size
<*> genMaybe arbitrary -- max_running_jobs
<*> arbitrary -- uid_pool
<*> arbitrary -- add_uids
<*> arbitrary -- remove_uids
<*> arbitrary -- maintain_node_health
<*> arbitrary -- prealloc_wipe_disks
<*> arbitrary -- nicparams
<*> emptyMUD -- ndparams
<*> emptyMUD -- ipolicy
<*> arbitrary -- drbd_helper
<*> arbitrary -- default_iallocator
<*> emptyMUD -- default_iallocator_params
<*> genMaybe genMacPrefix -- mac_prefix
<*> arbitrary -- master_netdev
<*> arbitrary -- master_netmask
<*> arbitrary -- reserved_lvs
<*> arbitrary -- hidden_os
<*> arbitrary -- blacklisted_os
<*> arbitrary -- use_external_mip_script
<*> arbitrary -- enabled_disk_templates
<*> arbitrary -- modify_etc_hosts
<*> genMaybe genName -- file_storage_dir
<*> genMaybe genName -- shared_file_storage_dir
<*> genMaybe genName -- gluster_file_storage_dir
<*> arbitrary -- instance_communication_network
<*> arbitrary -- zeroing_image
"OP_CLUSTER_REDIST_CONF" -> pure OpCodes.OpClusterRedistConf
"OP_CLUSTER_ACTIVATE_MASTER_IP" ->
pure OpCodes.OpClusterActivateMasterIp
"OP_CLUSTER_DEACTIVATE_MASTER_IP" ->
pure OpCodes.OpClusterDeactivateMasterIp
"OP_QUERY" ->
OpCodes.OpQuery <$> arbitrary <*> arbitrary <*> arbitrary <*>
pure Nothing
"OP_QUERY_FIELDS" ->
OpCodes.OpQueryFields <$> arbitrary <*> arbitrary
"OP_OOB_COMMAND" ->
OpCodes.OpOobCommand <$> genNodeNamesNE <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*>
(arbitrary `suchThat` (>0))
"OP_NODE_REMOVE" ->
OpCodes.OpNodeRemove <$> genNodeNameNE <*> return Nothing
"OP_NODE_ADD" ->
OpCodes.OpNodeAdd <$> genNodeNameNE <*> emptyMUD <*> emptyMUD <*>
genMaybe genNameNE <*> genMaybe genNameNE <*> arbitrary <*>
genMaybe genNameNE <*> arbitrary <*> arbitrary <*> emptyMUD
"OP_NODE_QUERYVOLS" ->
OpCodes.OpNodeQueryvols <$> arbitrary <*> genNodeNamesNE
"OP_NODE_QUERY_STORAGE" ->
OpCodes.OpNodeQueryStorage <$> arbitrary <*> arbitrary <*>
genNodeNamesNE <*> genMaybe genNameNE
"OP_NODE_MODIFY_STORAGE" ->
OpCodes.OpNodeModifyStorage <$> genNodeNameNE <*> return Nothing <*>
arbitrary <*> genMaybe genNameNE <*> pure emptyJSObject
"OP_REPAIR_NODE_STORAGE" ->
OpCodes.OpRepairNodeStorage <$> genNodeNameNE <*> return Nothing <*>
arbitrary <*> genMaybe genNameNE <*> arbitrary
"OP_NODE_SET_PARAMS" ->
OpCodes.OpNodeSetParams <$> genNodeNameNE <*> return Nothing <*>
arbitrary <*> emptyMUD <*> emptyMUD <*> arbitrary <*> arbitrary <*>
arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*>
genMaybe genNameNE <*> emptyMUD <*> arbitrary
"OP_NODE_POWERCYCLE" ->
OpCodes.OpNodePowercycle <$> genNodeNameNE <*> return Nothing <*>
arbitrary
"OP_NODE_MIGRATE" ->
OpCodes.OpNodeMigrate <$> genNodeNameNE <*> return Nothing <*>
arbitrary <*> arbitrary <*> genMaybe genNodeNameNE <*>
return Nothing <*> arbitrary <*> arbitrary <*> genMaybe genNameNE
"OP_NODE_EVACUATE" ->
OpCodes.OpNodeEvacuate <$> arbitrary <*> genNodeNameNE <*>
return Nothing <*> genMaybe genNodeNameNE <*> return Nothing <*>
genMaybe genNameNE <*> arbitrary
"OP_INSTANCE_CREATE" ->
OpCodes.OpInstanceCreate
<$> genFQDN -- instance_name
<*> arbitrary -- force_variant
<*> arbitrary -- wait_for_sync
<*> arbitrary -- name_check
<*> arbitrary -- ignore_ipolicy
<*> arbitrary -- opportunistic_locking
<*> pure emptyJSObject -- beparams
<*> arbitrary -- disks
<*> arbitrary -- disk_template
<*> arbitrary -- file_driver
<*> genMaybe genNameNE -- file_storage_dir
<*> pure emptyJSObject -- hvparams
<*> arbitrary -- hypervisor
<*> genMaybe genNameNE -- iallocator
<*> arbitrary -- identify_defaults
<*> arbitrary -- ip_check
<*> arbitrary -- conflicts_check
<*> arbitrary -- mode
<*> arbitrary -- nics
<*> arbitrary -- no_install
<*> pure emptyJSObject -- osparams
<*> genMaybe arbitraryPrivateJSObj -- osparams_private
<*> genMaybe arbitraryPrivateJSObj -- osparams_secret
<*> genMaybe genNameNE -- os_type
<*> genMaybe genNodeNameNE -- pnode
<*> return Nothing -- pnode_uuid
<*> genMaybe genNodeNameNE -- snode
<*> return Nothing -- snode_uuid
<*> genMaybe (pure []) -- source_handshake
<*> genMaybe genNodeNameNE -- source_instance_name
<*> arbitrary -- source_shutdown_timeout
<*> genMaybe genNodeNameNE -- source_x509_ca
<*> return Nothing -- src_node
<*> genMaybe genNodeNameNE -- src_node_uuid
<*> genMaybe genNameNE -- src_path
<*> arbitrary -- compress
<*> arbitrary -- start
<*> (genTags >>= mapM mkNonEmpty) -- tags
<*> arbitrary -- instance_communication
"OP_INSTANCE_MULTI_ALLOC" ->
OpCodes.OpInstanceMultiAlloc <$> arbitrary <*> genMaybe genNameNE <*>
pure []
"OP_INSTANCE_REINSTALL" ->
OpCodes.OpInstanceReinstall <$> genFQDN <*> return Nothing <*>
arbitrary <*> genMaybe genNameNE <*> genMaybe (pure emptyJSObject)
<*> genMaybe arbitraryPrivateJSObj <*> genMaybe arbitraryPrivateJSObj
"OP_INSTANCE_REMOVE" ->
OpCodes.OpInstanceRemove <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary
"OP_INSTANCE_RENAME" ->
OpCodes.OpInstanceRename <$> genFQDN <*> return Nothing <*>
genNodeNameNE <*> arbitrary <*> arbitrary
"OP_INSTANCE_STARTUP" ->
OpCodes.OpInstanceStartup <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> pure emptyJSObject <*>
pure emptyJSObject <*> arbitrary <*> arbitrary
"OP_INSTANCE_SHUTDOWN" ->
OpCodes.OpInstanceShutdown <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
"OP_INSTANCE_REBOOT" ->
OpCodes.OpInstanceReboot <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary
"OP_INSTANCE_MOVE" ->
OpCodes.OpInstanceMove <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> genNodeNameNE <*> return Nothing <*>
arbitrary <*> arbitrary
"OP_INSTANCE_CONSOLE" -> OpCodes.OpInstanceConsole <$> genFQDN <*>
return Nothing
"OP_INSTANCE_ACTIVATE_DISKS" ->
OpCodes.OpInstanceActivateDisks <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary
"OP_INSTANCE_DEACTIVATE_DISKS" ->
OpCodes.OpInstanceDeactivateDisks <$> genFQDN <*> return Nothing <*>
arbitrary
"OP_INSTANCE_RECREATE_DISKS" ->
OpCodes.OpInstanceRecreateDisks <$> genFQDN <*> return Nothing <*>
arbitrary <*> genNodeNamesNE <*> return Nothing <*>
genMaybe genNameNE
"OP_INSTANCE_QUERY_DATA" ->
OpCodes.OpInstanceQueryData <$> arbitrary <*>
genNodeNamesNE <*> arbitrary
"OP_INSTANCE_SET_PARAMS" ->
OpCodes.OpInstanceSetParams
<$> genFQDN -- instance_name
<*> return Nothing -- instance_uuid
<*> arbitrary -- force
<*> arbitrary -- force_variant
<*> arbitrary -- ignore_ipolicy
<*> arbitrary -- nics
<*> arbitrary -- disks
<*> pure emptyJSObject -- beparams
<*> arbitrary -- runtime_mem
<*> pure emptyJSObject -- hvparams
<*> arbitrary -- disk_template
<*> genMaybe genNodeNameNE -- pnode
<*> return Nothing -- pnode_uuid
<*> genMaybe genNodeNameNE -- remote_node
<*> return Nothing -- remote_node_uuid
<*> genMaybe genNameNE -- os_name
<*> pure emptyJSObject -- osparams
<*> genMaybe arbitraryPrivateJSObj -- osparams_private
<*> arbitrary -- wait_for_sync
<*> arbitrary -- offline
<*> arbitrary -- conflicts_check
<*> arbitrary -- hotplug
<*> arbitrary -- hotplug_if_possible
<*> arbitrary -- instance_communication
"OP_INSTANCE_GROW_DISK" ->
OpCodes.OpInstanceGrowDisk <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
"OP_INSTANCE_CHANGE_GROUP" ->
OpCodes.OpInstanceChangeGroup <$> genFQDN <*> return Nothing <*>
arbitrary <*> genMaybe genNameNE <*>
genMaybe (resize maxNodes (listOf genNameNE))
"OP_GROUP_ADD" ->
OpCodes.OpGroupAdd <$> genNameNE <*> arbitrary <*>
emptyMUD <*> genMaybe genEmptyContainer <*>
emptyMUD <*> emptyMUD <*> emptyMUD
"OP_GROUP_ASSIGN_NODES" ->
OpCodes.OpGroupAssignNodes <$> genNameNE <*> arbitrary <*>
genNodeNamesNE <*> return Nothing
"OP_GROUP_SET_PARAMS" ->
OpCodes.OpGroupSetParams <$> genNameNE <*> arbitrary <*>
emptyMUD <*> genMaybe genEmptyContainer <*>
emptyMUD <*> emptyMUD <*> emptyMUD
"OP_GROUP_REMOVE" ->
OpCodes.OpGroupRemove <$> genNameNE
"OP_GROUP_RENAME" ->
OpCodes.OpGroupRename <$> genNameNE <*> genNameNE
"OP_GROUP_EVACUATE" ->
OpCodes.OpGroupEvacuate <$> genNameNE <*> arbitrary <*>
genMaybe genNameNE <*> genMaybe genNamesNE
"OP_OS_DIAGNOSE" ->
OpCodes.OpOsDiagnose <$> genFieldsNE <*> genNamesNE
"OP_EXT_STORAGE_DIAGNOSE" ->
OpCodes.OpOsDiagnose <$> genFieldsNE <*> genNamesNE
"OP_BACKUP_PREPARE" ->
OpCodes.OpBackupPrepare <$> genFQDN <*> return Nothing <*> arbitrary
"OP_BACKUP_EXPORT" ->
OpCodes.OpBackupExport <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*>
genMaybe (pure []) <*> genMaybe genNameNE <*> arbitrary <*>
arbitrary <*> arbitrary
"OP_BACKUP_REMOVE" ->
OpCodes.OpBackupRemove <$> genFQDN <*> return Nothing
"OP_TEST_ALLOCATOR" ->
OpCodes.OpTestAllocator <$> arbitrary <*> arbitrary <*>
genNameNE <*> genMaybe (pure []) <*> genMaybe (pure []) <*>
arbitrary <*> genMaybe genNameNE <*>
(genTags >>= mapM mkNonEmpty) <*>
arbitrary <*> arbitrary <*> genMaybe genNameNE <*>
arbitrary <*> genMaybe genNodeNamesNE <*> arbitrary <*>
genMaybe genNamesNE <*> arbitrary <*> arbitrary
"OP_TEST_JQUEUE" ->
OpCodes.OpTestJqueue <$> arbitrary <*> arbitrary <*>
resize 20 (listOf genFQDN) <*> arbitrary
"OP_TEST_DUMMY" ->
OpCodes.OpTestDummy <$> pure J.JSNull <*> pure J.JSNull <*>
pure J.JSNull <*> pure J.JSNull
"OP_NETWORK_ADD" ->
OpCodes.OpNetworkAdd <$> genNameNE <*> genIPv4Network <*>
genMaybe genIPv4Address <*> pure Nothing <*> pure Nothing <*>
genMaybe genMacPrefix <*> genMaybe (listOf genIPv4Address) <*>
arbitrary <*> (genTags >>= mapM mkNonEmpty)
"OP_NETWORK_REMOVE" ->
OpCodes.OpNetworkRemove <$> genNameNE <*> arbitrary
"OP_NETWORK_SET_PARAMS" ->
OpCodes.OpNetworkSetParams <$> genNameNE <*>
genMaybe genIPv4Address <*> pure Nothing <*> pure Nothing <*>
genMaybe genMacPrefix <*> genMaybe (listOf genIPv4Address) <*>
genMaybe (listOf genIPv4Address)
"OP_NETWORK_CONNECT" ->
OpCodes.OpNetworkConnect <$> genNameNE <*> genNameNE <*>
arbitrary <*> genNameNE <*> arbitrary
"OP_NETWORK_DISCONNECT" ->
OpCodes.OpNetworkDisconnect <$> genNameNE <*> genNameNE
"OP_RESTRICTED_COMMAND" ->
OpCodes.OpRestrictedCommand <$> arbitrary <*> genNodeNamesNE <*>
return Nothing <*> genNameNE
_ -> fail $ "Undefined arbitrary for opcode " ++ op_id
-- | Generates one element of a reason trail
genReasonElem :: Gen ReasonElem
genReasonElem = (,,) <$> genFQDN <*> genFQDN <*> arbitrary
-- | Generates a reason trail
genReasonTrail :: Gen ReasonTrail
genReasonTrail = do
size <- choose (0, 10)
vectorOf size genReasonElem
instance Arbitrary OpCodes.CommonOpParams where
arbitrary = OpCodes.CommonOpParams <$> arbitrary <*> arbitrary <*>
arbitrary <*> resize 5 arbitrary <*> genMaybe genName <*>
genReasonTrail
-- * Helper functions
-- | Empty JSObject.
emptyJSObject :: J.JSObject J.JSValue
emptyJSObject = J.toJSObject []
-- | Empty maybe unchecked dictionary.
emptyMUD :: Gen (Maybe (J.JSObject J.JSValue))
emptyMUD = genMaybe $ pure emptyJSObject
-- | Generates an empty container.
genEmptyContainer :: (Ord a) => Gen (GenericContainer a b)
genEmptyContainer = pure . GenericContainer $ Map.fromList []
-- | Generates list of disk indices.
genDiskIndices :: Gen [DiskIndex]
genDiskIndices = do
cnt <- choose (0, C.maxDisks)
genUniquesList cnt arbitrary
-- | Generates a list of node names.
genNodeNames :: Gen [String]
genNodeNames = resize maxNodes (listOf genFQDN)
-- | Generates a list of node names in non-empty string type.
genNodeNamesNE :: Gen [NonEmptyString]
genNodeNamesNE = genNodeNames >>= mapM mkNonEmpty
-- | Gets a node name in non-empty type.
genNodeNameNE :: Gen NonEmptyString
genNodeNameNE = genFQDN >>= mkNonEmpty
-- | Gets a name (non-fqdn) in non-empty type.
genNameNE :: Gen NonEmptyString
genNameNE = genName >>= mkNonEmpty
-- | Gets a list of names (non-fqdn) in non-empty type.
genNamesNE :: Gen [NonEmptyString]
genNamesNE = resize maxNodes (listOf genNameNE)
-- | Returns a list of non-empty fields.
genFieldsNE :: Gen [NonEmptyString]
genFieldsNE = genFields >>= mapM mkNonEmpty
-- | Generate a 3-byte MAC prefix.
genMacPrefix :: Gen NonEmptyString
genMacPrefix = do
octets <- vectorOf 3 $ choose (0::Int, 255)
mkNonEmpty . intercalate ":" $ map (printf "%02x") octets
-- | JSObject of arbitrary data.
--
-- Since JSValue does not implement Arbitrary, I'll simply generate
-- (String, String) objects.
arbitraryPrivateJSObj :: Gen (J.JSObject (Private J.JSValue))
arbitraryPrivateJSObj =
constructor <$> (fromNonEmpty <$> genNameNE)
<*> (fromNonEmpty <$> genNameNE)
where constructor k v = showPrivateJSObject [(k, v)]
-- | Arbitrary instance for MetaOpCode, defined here due to TH ordering.
$(genArbitrary ''OpCodes.MetaOpCode)
-- | Small helper to check for a failed JSON deserialisation
isJsonError :: J.Result a -> Bool
isJsonError (J.Error _) = True
isJsonError _ = False
-- * Test cases
-- | Check that opcode serialization is idempotent.
prop_serialization :: OpCodes.OpCode -> Property
prop_serialization = testSerialisation
-- | Check that Python and Haskell defined the same opcode list.
case_AllDefined :: HUnit.Assertion
case_AllDefined = do
py_stdout <-
runPython "from ganeti import opcodes\n\
\from ganeti import serializer\n\
\import sys\n\
\print serializer.Dump([opid for opid in opcodes.OP_MAPPING])\n"
""
>>= checkPythonResult
py_ops <- case J.decode py_stdout::J.Result [String] of
J.Ok ops -> return ops
J.Error msg ->
HUnit.assertFailure ("Unable to decode opcode names: " ++ msg)
-- this already raised an expection, but we need it
-- for proper types
>> fail "Unable to decode opcode names"
let hs_ops = sort OpCodes.allOpIDs
extra_py = py_ops \\ hs_ops
extra_hs = hs_ops \\ py_ops
HUnit.assertBool ("Missing OpCodes from the Haskell code:\n" ++
unlines extra_py) (null extra_py)
HUnit.assertBool ("Extra OpCodes in the Haskell code code:\n" ++
unlines extra_hs) (null extra_hs)
-- | Custom HUnit test case that forks a Python process and checks
-- correspondence between Haskell-generated OpCodes and their Python
-- decoded, validated and re-encoded version.
--
-- Note that we have a strange beast here: since launching Python is
-- expensive, we don't do this via a usual QuickProperty, since that's
-- slow (I've tested it, and it's indeed quite slow). Rather, we use a
-- single HUnit assertion, and in it we manually use QuickCheck to
-- generate 500 opcodes times the number of defined opcodes, which
-- then we pass in bulk to Python. The drawbacks to this method are
-- two fold: we cannot control the number of generated opcodes, since
-- HUnit assertions don't get access to the test options, and for the
-- same reason we can't run a repeatable seed. We should probably find
-- a better way to do this, for example by having a
-- separately-launched Python process (if not running the tests would
-- be skipped).
case_py_compat_types :: HUnit.Assertion
case_py_compat_types = do
let num_opcodes = length OpCodes.allOpIDs * 100
opcodes <- genSample (vectorOf num_opcodes
(arbitrary::Gen OpCodes.MetaOpCode))
let with_sum = map (\o -> (OpCodes.opSummary $
OpCodes.metaOpCode o, o)) opcodes
serialized = J.encode opcodes
-- check for non-ASCII fields, usually due to 'arbitrary :: String'
mapM_ (\op -> when (any (not . isAscii) (J.encode op)) .
HUnit.assertFailure $
"OpCode has non-ASCII fields: " ++ show op
) opcodes
py_stdout <-
runPython "from ganeti import opcodes\n\
\from ganeti import serializer\n\
\import sys\n\
\op_data = serializer.Load(sys.stdin.read())\n\
\decoded = [opcodes.OpCode.LoadOpCode(o) for o in op_data]\n\
\for op in decoded:\n\
\ op.Validate(True)\n\
\encoded = [(op.Summary(), op.__getstate__())\n\
\ for op in decoded]\n\
\print serializer.Dump(\
\ encoded,\
\ private_encoder=serializer.EncodeWithPrivateFields)"
serialized
>>= checkPythonResult
let deserialised =
J.decode py_stdout::J.Result [(String, OpCodes.MetaOpCode)]
decoded <- case deserialised of
J.Ok ops -> return ops
J.Error msg ->
HUnit.assertFailure ("Unable to decode opcodes: " ++ msg)
-- this already raised an expection, but we need it
-- for proper types
>> fail "Unable to decode opcodes"
HUnit.assertEqual "Mismatch in number of returned opcodes"
(length decoded) (length with_sum)
mapM_ (uncurry (HUnit.assertEqual "Different result after encoding/decoding")
) $ zip with_sum decoded
-- | Custom HUnit test case that forks a Python process and checks
-- correspondence between Haskell OpCodes fields and their Python
-- equivalent.
case_py_compat_fields :: HUnit.Assertion
case_py_compat_fields = do
let hs_fields = sort $ map (\op_id -> (op_id, OpCodes.allOpFields op_id))
OpCodes.allOpIDs
py_stdout <-
runPython "from ganeti import opcodes\n\
\import sys\n\
\from ganeti import serializer\n\
\fields = [(k, sorted([p[0] for p in v.OP_PARAMS]))\n\
\ for k, v in opcodes.OP_MAPPING.items()]\n\
\print serializer.Dump(fields)" ""
>>= checkPythonResult
let deserialised = J.decode py_stdout::J.Result [(String, [String])]
py_fields <- case deserialised of
J.Ok v -> return $ sort v
J.Error msg ->
HUnit.assertFailure ("Unable to decode op fields: " ++ msg)
-- this already raised an expection, but we need it
-- for proper types
>> fail "Unable to decode op fields"
HUnit.assertEqual "Mismatch in number of returned opcodes"
(length hs_fields) (length py_fields)
HUnit.assertEqual "Mismatch in defined OP_IDs"
(map fst hs_fields) (map fst py_fields)
mapM_ (\((py_id, py_flds), (hs_id, hs_flds)) -> do
HUnit.assertEqual "Mismatch in OP_ID" py_id hs_id
HUnit.assertEqual ("Mismatch in fields for " ++ hs_id)
py_flds hs_flds
) $ zip hs_fields py_fields
-- | Checks that setOpComment works correctly.
prop_setOpComment :: OpCodes.MetaOpCode -> String -> Property
prop_setOpComment op comment =
let (OpCodes.MetaOpCode common _) = OpCodes.setOpComment comment op
in OpCodes.opComment common ==? Just comment
-- | Tests wrong (negative) disk index.
prop_mkDiskIndex_fail :: QuickCheck.Positive Int -> Property
prop_mkDiskIndex_fail (Positive i) =
case mkDiskIndex (negate i) of
Bad msg -> printTestCase "error message " $
"Invalid value" `isPrefixOf` msg
Ok v -> failTest $ "Succeeded to build disk index '" ++ show v ++
"' from negative value " ++ show (negate i)
-- | Tests a few invalid 'readRecreateDisks' cases.
case_readRecreateDisks_fail :: Assertion
case_readRecreateDisks_fail = do
assertBool "null" $
isJsonError (J.readJSON J.JSNull::J.Result RecreateDisksInfo)
assertBool "string" $
isJsonError (J.readJSON (J.showJSON "abc")::J.Result RecreateDisksInfo)
-- | Tests a few invalid 'readDdmOldChanges' cases.
case_readDdmOldChanges_fail :: Assertion
case_readDdmOldChanges_fail = do
assertBool "null" $
isJsonError (J.readJSON J.JSNull::J.Result DdmOldChanges)
assertBool "string" $
isJsonError (J.readJSON (J.showJSON "abc")::J.Result DdmOldChanges)
-- | Tests a few invalid 'readExportTarget' cases.
case_readExportTarget_fail :: Assertion
case_readExportTarget_fail = do
assertBool "null" $
isJsonError (J.readJSON J.JSNull::J.Result ExportTarget)
assertBool "int" $
isJsonError (J.readJSON (J.showJSON (5::Int))::J.Result ExportTarget)
testSuite "OpCodes"
[ 'prop_serialization
, 'case_AllDefined
, 'case_py_compat_types
, 'case_py_compat_fields
, 'prop_setOpComment
, 'prop_mkDiskIndex_fail
, 'case_readRecreateDisks_fail
, 'case_readDdmOldChanges_fail
, 'case_readExportTarget_fail
]
| kawamuray/ganeti | test/hs/Test/Ganeti/OpCodes.hs | gpl-2.0 | 31,072 | 0 | 50 | 9,172 | 5,525 | 2,780 | 2,745 | 543 | 2 |
-- taken from RosettaCode
-- http://rosettacode.org/wiki/Hostname#Haskell
module GetHostname where
import Foreign.Marshal.Array ( allocaArray0, peekArray0 )
import Foreign.C.Types ( CInt(..), CSize(..) )
import Foreign.C.String ( CString, peekCString )
import Foreign.C.Error ( throwErrnoIfMinus1_ )
getHostname :: IO String
getHostname = do
let size = 256
allocaArray0 size $ \ cstr -> do
throwErrnoIfMinus1_ "getHostname" $ c_gethostname cstr (fromIntegral size)
peekCString cstr
foreign import ccall "gethostname"
c_gethostname :: CString -> CSize -> IO CInt
| obreitwi/dotfiles_desktop | xmonad/lib/GetHostname.hs | gpl-2.0 | 584 | 0 | 14 | 92 | 154 | 86 | 68 | 13 | 1 |
module Logic.PropositionalLogic.ResolutionSpec (spec) where
import Prelude
import Control.Monad (replicateM)
import Data.Text.Arbitrary
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Checkers
import Test.QuickCheck.Classes (monoid)
import TestUtils (batch)
import Logic.PropositionalLogic.Resolution
instance EqProp CNFSentence where
(=-=) = eq
instance Arbitrary CNFSentence where
arbitrary = scale (const 5) $ sized go
where
go :: Int -> Gen CNFSentence
go n = Conjunction <$> replicateM n arbitrary
instance EqProp Disjunction where
(=-=) = eq
instance Arbitrary Disjunction where
arbitrary = scale (const 5) $ sized go
where
go :: Int -> Gen Disjunction
go n = Disjunct <$> replicateM n arbitrary
instance Arbitrary CNFLit where
arbitrary = oneof [
JustLit <$> arbitrary
, NotLit <$> arbitrary
]
spec :: Spec
spec = do
describe "CNFSentence" $ do
batch "is a Monoid" $ monoid (undefined :: CNFSentence)
describe "Disjunction" $ do
batch "is a Monoid" $ monoid (undefined :: Disjunction)
| NorfairKing/the-notes | test/Logic/PropositionalLogic/ResolutionSpec.hs | gpl-2.0 | 1,325 | 0 | 12 | 458 | 320 | 170 | 150 | 32 | 1 |
{- |
Module : $Header$
Description : abstract syntax for CASL_DL logic extension of CASL
Copyright : (c) Klaus Luettich, Dominik Luecke, Uni Bremen 2004-2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Abstract syntax for CASL_DL logic extension of CASL
Only the added syntax is specified
-}
module CASL_DL.AS_CASL_DL where
import Common.Id
import Common.AS_Annotation
import CASL.AS_Basic_CASL
-- DrIFT command
{-! global: GetRange !-}
type DL_BASIC_SPEC = BASIC_SPEC () () DL_FORMULA
type AnDLFORM = Annoted (FORMULA DL_FORMULA)
data CardType = CMin | CMax | CExact deriving (Eq, Ord)
minCardinalityS, maxCardinalityS, cardinalityS :: String
cardinalityS = "cardinality"
minCardinalityS = "minC" ++ tail cardinalityS
maxCardinalityS = "maxC" ++ tail cardinalityS
instance Show CardType where
show ct = case ct of
CMin -> minCardinalityS
CMax -> maxCardinalityS
CExact -> cardinalityS
-- | for a detailed specification of all the components look into the sources
data DL_FORMULA =
Cardinality CardType
PRED_SYMB -- refers to a declared (binary) predicate
(TERM DL_FORMULA)
{- this term is restricted to constructors
denoting a (typed) variable -}
(TERM DL_FORMULA)
{- the second term is restricted to an Application denoting
a literal of type nonNegativeInteger (Nat) -}
(Maybe (FORMULA DL_FORMULA))
-- an optional qualification for the number restriction
Range
-- position of keyword, brackets, parens and comma
deriving (Eq, Ord, Show)
-- TODO: Erweiterung ueber maybe
caslDLCardTypes :: [CardType]
caslDLCardTypes = [CExact, CMin, CMax]
casl_DL_reserved_words :: [String]
casl_DL_reserved_words = map show caslDLCardTypes
{- parser will need 7 functions: concept1, concept2, concept3, concept4,
classProperty, basicItem, basicSpec -}
| nevrenato/HetsAlloy | CASL_DL/AS_CASL_DL.der.hs | gpl-2.0 | 2,100 | 0 | 10 | 525 | 262 | 150 | 112 | 28 | 1 |
import Control.Parallel
import Control.Monad
import Text.Printf
cutoff = 35
fib' :: Int -> Integer
fib' 0 = 0
fib' 1 = 1
fib' n = fib' (n-1) + fib' (n-2)
fib :: Int -> Integer
fib n | n < cutoff = fib' n
| otherwise = r `par` (l `pseq` l + r)
where
l = fib (n-1)
r = fib (n-2)
main = forM_ [0..45] $ \i ->
printf "n=%d => %d\n" i (fib i)
| limonheiro/OS | ex/haskel_fib.hs | gpl-3.0 | 374 | 0 | 9 | 113 | 203 | 106 | 97 | 15 | 1 |
module Main where
main = do
print "" | antonpetkoff/learning | haskell/fmi-fp/lambda.hs | gpl-3.0 | 43 | 0 | 7 | 14 | 15 | 8 | 7 | 3 | 1 |
module Problems46thru50Spec where
import Test.Hspec
import Problems46thru50
spec :: Spec
spec = do
it "can generate a predicate table for a given logical expression in 2\
\variables" $ do
let expected = unlines ["True True True"
,"True False True"
,"False True False"
,"False False False"]
tablePure (\a b -> (and' a (or' a b))) `shouldBe` expected
it "can generate a predicate table for a given logical expression in 2\
\variables and predicates as operators" $ do
let expected = unlines ["True True True"
,"True False True"
,"False True False"
,"False False False"]
tablePure (\a b -> a `and'` (a `or'` not' b)) `shouldBe` expected
it "can generate a predicate table for a given logical expression in 2\
\variables" $ do
let actual = tablePure' 3 (\[a,b,c] -> a `and'` (b `or'` c) `equ'` a `and'` b `or'` a `and'` c)
let expected = unlines ["True True True True"
,"True True False True"
,"True False True True"
,"True False False False"
,"False True True False"
,"False True False False"
,"False False True False"
,"False False False False"]
actual `shouldBe` expected
it "can generate n-bit Gray code" $ do
gray 3 `shouldBe` ["000","001","011","010","110","111","101","100"]
| zcesur/h99 | tests/Problems46thru50Spec.hs | gpl-3.0 | 1,665 | 0 | 22 | 678 | 339 | 190 | 149 | 33 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Reseller.Subscriptions.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists subscriptions of a reseller, optionally filtered by a customer
-- name prefix.
--
-- /See:/ <https://developers.google.com/google-apps/reseller/ Enterprise Apps Reseller API Reference> for @reseller.subscriptions.list@.
module Network.Google.Resource.Reseller.Subscriptions.List
(
-- * REST Resource
SubscriptionsListResource
-- * Creating a Request
, subscriptionsList
, SubscriptionsList
-- * Request Lenses
, slCustomerNamePrefix
, slCustomerId
, slCustomerAuthToken
, slPageToken
, slMaxResults
) where
import Network.Google.AppsReseller.Types
import Network.Google.Prelude
-- | A resource alias for @reseller.subscriptions.list@ method which the
-- 'SubscriptionsList' request conforms to.
type SubscriptionsListResource =
"apps" :>
"reseller" :>
"v1" :>
"subscriptions" :>
QueryParam "customerNamePrefix" Text :>
QueryParam "customerId" Text :>
QueryParam "customerAuthToken" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] Subscriptions
-- | Lists subscriptions of a reseller, optionally filtered by a customer
-- name prefix.
--
-- /See:/ 'subscriptionsList' smart constructor.
data SubscriptionsList = SubscriptionsList'
{ _slCustomerNamePrefix :: !(Maybe Text)
, _slCustomerId :: !(Maybe Text)
, _slCustomerAuthToken :: !(Maybe Text)
, _slPageToken :: !(Maybe Text)
, _slMaxResults :: !(Maybe (Textual Word32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'SubscriptionsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'slCustomerNamePrefix'
--
-- * 'slCustomerId'
--
-- * 'slCustomerAuthToken'
--
-- * 'slPageToken'
--
-- * 'slMaxResults'
subscriptionsList
:: SubscriptionsList
subscriptionsList =
SubscriptionsList'
{ _slCustomerNamePrefix = Nothing
, _slCustomerId = Nothing
, _slCustomerAuthToken = Nothing
, _slPageToken = Nothing
, _slMaxResults = Nothing
}
-- | Prefix of the customer\'s domain name by which the subscriptions should
-- be filtered. Optional
slCustomerNamePrefix :: Lens' SubscriptionsList (Maybe Text)
slCustomerNamePrefix
= lens _slCustomerNamePrefix
(\ s a -> s{_slCustomerNamePrefix = a})
-- | Id of the Customer
slCustomerId :: Lens' SubscriptionsList (Maybe Text)
slCustomerId
= lens _slCustomerId (\ s a -> s{_slCustomerId = a})
-- | An auth token needed if the customer is not a resold customer of this
-- reseller. Can be generated at
-- https:\/\/www.google.com\/a\/cpanel\/customer-domain\/TransferToken.Optional.
slCustomerAuthToken :: Lens' SubscriptionsList (Maybe Text)
slCustomerAuthToken
= lens _slCustomerAuthToken
(\ s a -> s{_slCustomerAuthToken = a})
-- | Token to specify next page in the list
slPageToken :: Lens' SubscriptionsList (Maybe Text)
slPageToken
= lens _slPageToken (\ s a -> s{_slPageToken = a})
-- | Maximum number of results to return
slMaxResults :: Lens' SubscriptionsList (Maybe Word32)
slMaxResults
= lens _slMaxResults (\ s a -> s{_slMaxResults = a})
. mapping _Coerce
instance GoogleRequest SubscriptionsList where
type Rs SubscriptionsList = Subscriptions
type Scopes SubscriptionsList =
'["https://www.googleapis.com/auth/apps.order",
"https://www.googleapis.com/auth/apps.order.readonly"]
requestClient SubscriptionsList'{..}
= go _slCustomerNamePrefix _slCustomerId
_slCustomerAuthToken
_slPageToken
_slMaxResults
(Just AltJSON)
appsResellerService
where go
= buildClient
(Proxy :: Proxy SubscriptionsListResource)
mempty
| rueshyna/gogol | gogol-apps-reseller/gen/Network/Google/Resource/Reseller/Subscriptions/List.hs | mpl-2.0 | 4,812 | 0 | 17 | 1,117 | 657 | 384 | 273 | 95 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.AdExchangeBuyer2.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.AdExchangeBuyer2.Types
(
-- * Service Configuration
adExchangeBuyer2Service
-- * OAuth Scopes
, adExchangeBuyerScope
-- * HTMLContent
, HTMLContent
, htmlContent
, hcHeight
, hcSnippet
, hcWidth
-- * ListFilteredBidsResponse
, ListFilteredBidsResponse
, listFilteredBidsResponse
, lfbrNextPageToken
, lfbrCreativeStatusRows
-- * ResumeProposalRequest
, ResumeProposalRequest
, resumeProposalRequest
-- * DealProgrammaticCreativeSource
, DealProgrammaticCreativeSource (..)
-- * SecurityContextSecuritiesItem
, SecurityContextSecuritiesItem (..)
-- * DealCreativePreApprovalPolicy
, DealCreativePreApprovalPolicy (..)
-- * NonGuaranteedFixedPriceTerms
, NonGuaranteedFixedPriceTerms
, nonGuaranteedFixedPriceTerms
, ngfptFixedPrices
-- * VideoTargetingTargetedPositionTypesItem
, VideoTargetingTargetedPositionTypesItem (..)
-- * DeliveryControlCreativeBlockingLevel
, DeliveryControlCreativeBlockingLevel (..)
-- * ListDealAssociationsResponse
, ListDealAssociationsResponse
, listDealAssociationsResponse
, ldarNextPageToken
, ldarAssociations
-- * ProposalProposalState
, ProposalProposalState (..)
-- * Image
, Image
, image
, iHeight
, iURL
, iWidth
-- * TechnologyTargeting
, TechnologyTargeting
, technologyTargeting
, ttDeviceCategoryTargeting
, ttOperatingSystemTargeting
, ttDeviceCapabilityTargeting
-- * ProductSyndicationProduct
, ProductSyndicationProduct (..)
-- * ListPublisherProFilesResponse
, ListPublisherProFilesResponse
, listPublisherProFilesResponse
, lppfrNextPageToken
, lppfrPublisherProFiles
-- * NonGuaranteedAuctionTerms
, NonGuaranteedAuctionTerms
, nonGuaranteedAuctionTerms
, ngatReservePricesPerBuyer
, ngatAutoOptimizePrivateAuction
-- * AccountsCreativesCreateDuplicateIdMode
, AccountsCreativesCreateDuplicateIdMode (..)
-- * FilterSetFormatsItem
, FilterSetFormatsItem (..)
-- * PublisherProFileMobileApplication
, PublisherProFileMobileApplication
, publisherProFileMobileApplication
, ppfmaExternalAppId
, ppfmaName
, ppfmaAppStore
-- * CreativeRestrictionsSkippableAdType
, CreativeRestrictionsSkippableAdType (..)
-- * ListClientUsersResponse
, ListClientUsersResponse
, listClientUsersResponse
, lcurNextPageToken
, lcurUsers
-- * BidMetricsRow
, BidMetricsRow
, bidMetricsRow
, bmrBids
, bmrBidsInAuction
, bmrImpressionsWon
, bmrRowDimensions
, bmrMeasurableImpressions
, bmrViewableImpressions
, bmrBilledImpressions
, bmrReachedQueries
-- * ServingRestrictionStatus
, ServingRestrictionStatus (..)
-- * DayPartTargeting
, DayPartTargeting
, dayPartTargeting
, dptTimeZoneType
, dptDayParts
-- * CriteriaTargeting
, CriteriaTargeting
, criteriaTargeting
, ctExcludedCriteriaIds
, ctTargetedCriteriaIds
-- * DealPauseStatus
, DealPauseStatus
, dealPauseStatus
, dpsFirstPausedBy
, dpsBuyerPauseReason
, dpsHasBuyerPaused
, dpsSellerPauseReason
, dpsHasSellerPaused
-- * DisApprovalReason
, DisApprovalReason (..)
-- * FilteredBidDetailRow
, FilteredBidDetailRow
, filteredBidDetailRow
, fbdrDetailId
, fbdrRowDimensions
, fbdrBidCount
, fbdrDetail
-- * PrivateData
, PrivateData
, privateData
, pdReferenceId
-- * ClientRole
, ClientRole (..)
-- * DealCreativeSafeFrameCompatibility
, DealCreativeSafeFrameCompatibility (..)
-- * DeliveryControlDeliveryRateType
, DeliveryControlDeliveryRateType (..)
-- * Empty
, Empty
, empty
-- * ServingContext
, ServingContext
, servingContext
, scPlatform
, scLocation
, scSecurityType
, scAll
, scAuctionType
, scAppType
-- * PublisherProFile
, PublisherProFile
, publisherProFile
, ppfDirectDealsContact
, ppfAudienceDescription
, ppfLogoURL
, ppfOverview
, ppfIsParent
, ppfSamplePageURL
, ppfSeller
, ppfMediaKitURL
, ppfMobileApps
, ppfBuyerPitchStatement
, ppfDisplayName
, ppfPublisherProFileId
, ppfGooglePlusURL
, ppfDomains
, ppfRateCardInfoURL
, ppfTopHeadlines
, ppfProgrammaticDealsContact
-- * AcceptProposalRequest
, AcceptProposalRequest
, acceptProposalRequest
, aprProposalRevision
-- * Size
, Size
, size
, sHeight
, sWidth
-- * ListFilteredBidRequestsResponse
, ListFilteredBidRequestsResponse
, listFilteredBidRequestsResponse
, lfbrrNextPageToken
, lfbrrCalloutStatusRows
-- * AddDealAssociationRequest
, AddDealAssociationRequest
, addDealAssociationRequest
, adarAssociation
-- * CreativeStatusRow
, CreativeStatusRow
, creativeStatusRow
, csrRowDimensions
, csrBidCount
, csrCreativeStatusId
-- * RealtimeTimeRange
, RealtimeTimeRange
, realtimeTimeRange
, rtrStartTimestamp
-- * DealPauseStatusFirstPausedBy
, DealPauseStatusFirstPausedBy (..)
-- * Note
, Note
, note
, nProposalRevision
, nNote
, nNoteId
, nCreatorRole
, nCreateTime
-- * CreativeOpenAuctionStatus
, CreativeOpenAuctionStatus (..)
-- * DealSyndicationProduct
, DealSyndicationProduct (..)
-- * GuaranteedFixedPriceTermsReservationType
, GuaranteedFixedPriceTermsReservationType (..)
-- * ListFilterSetsResponse
, ListFilterSetsResponse
, listFilterSetsResponse
, lfsrNextPageToken
, lfsrFilterSets
-- * Money
, Money
, money
, mCurrencyCode
, mNanos
, mUnits
-- * AddNoteRequest
, AddNoteRequest
, addNoteRequest
, anrNote
-- * PlacementTargeting
, PlacementTargeting
, placementTargeting
, ptURLTargeting
, ptMobileApplicationTargeting
-- * PauseProposalDealsRequest
, PauseProposalDealsRequest
, pauseProposalDealsRequest
, ppdrReason
, ppdrExternalDealIds
-- * FilterSetTimeSeriesGranularity
, FilterSetTimeSeriesGranularity (..)
-- * ListCreativeStatusBreakdownByDetailResponseDetailType
, ListCreativeStatusBreakdownByDetailResponseDetailType (..)
-- * AdSize
, AdSize
, adSize
, asHeight
, asWidth
, asSizeType
-- * StopWatchingCreativeRequest
, StopWatchingCreativeRequest
, stopWatchingCreativeRequest
-- * FilterSetPlatformsItem
, FilterSetPlatformsItem (..)
-- * CreativeSizeAllowedFormatsItem
, CreativeSizeAllowedFormatsItem (..)
-- * WatchCreativeRequest
, WatchCreativeRequest
, watchCreativeRequest
, wcrTopic
-- * DealServingMetadata
, DealServingMetadata
, dealServingMetadata
, dsmDealPauseStatus
-- * DeliveryControl
, DeliveryControl
, deliveryControl
, dcCreativeBlockingLevel
, dcFrequencyCaps
, dcDeliveryRateType
-- * ResumeProposalDealsRequest
, ResumeProposalDealsRequest
, resumeProposalDealsRequest
, rpdrExternalDealIds
-- * PricePerBuyer
, PricePerBuyer
, pricePerBuyer
, ppbPrice
, ppbAdvertiserIds
, ppbBuyer
-- * Creative
, Creative
, creative
, cAPIUpdateTime
, cDetectedLanguages
, cAdvertiserName
, cAdChoicesDestinationURL
, cAgencyId
, cCorrections
, cClickThroughURLs
, cRestrictedCategories
, cDetectedProductCategories
, cDealsStatus
, cCreativeId
, cVideo
, cAdTechnologyProviders
, cNATive
, cDetectedSensitiveCategories
, cImpressionTrackingURLs
, cAccountId
, cAttributes
, cVersion
, cVendorIds
, cDetectedAdvertiserIds
, cHTML
, cServingRestrictions
, cDetectedDomains
, cOpenAuctionStatus
, cDeclaredClickThroughURLs
-- * AppContext
, AppContext
, appContext
, acAppTypes
-- * MarketplaceTargeting
, MarketplaceTargeting
, marketplaceTargeting
, mtGeoTargeting
, mtTechnologyTargeting
, mtPlacementTargeting
, mtVideoTargeting
, mtInventorySizeTargeting
-- * BidResponseWithoutBidsStatusRowStatus
, BidResponseWithoutBidsStatusRowStatus (..)
-- * ListBidResponseErrorsResponse
, ListBidResponseErrorsResponse
, listBidResponseErrorsResponse
, lbrerNextPageToken
, lbrerCalloutStatusRows
-- * Correction
, Correction
, correction
, cContexts
, cDetails
, cType
-- * CreativeDealAssociation
, CreativeDealAssociation
, creativeDealAssociation
, cdaCreativeId
, cdaAccountId
, cdaDealsId
-- * Seller
, Seller
, seller
, sAccountId
, sSubAccountId
-- * PublisherProFileMobileApplicationAppStore
, PublisherProFileMobileApplicationAppStore (..)
-- * CreativeAttributesItem
, CreativeAttributesItem (..)
-- * ListCreativesResponse
, ListCreativesResponse
, listCreativesResponse
, lcrNextPageToken
, lcrCreatives
-- * AdTechnologyProviders
, AdTechnologyProviders
, adTechnologyProviders
, atpHasUnidentifiedProvider
, atpDetectedProviderIds
-- * RowDimensions
, RowDimensions
, rowDimensions
, rdPublisherIdentifier
, rdTimeInterval
-- * OperatingSystemTargeting
, OperatingSystemTargeting
, operatingSystemTargeting
, ostOperatingSystemVersionCriteria
, ostOperatingSystemCriteria
-- * ListCreativeStatusBreakdownByDetailResponse
, ListCreativeStatusBreakdownByDetailResponse
, listCreativeStatusBreakdownByDetailResponse
, lcsbbdrNextPageToken
, lcsbbdrDetailType
, lcsbbdrFilteredBidDetailRows
-- * DayPartTargetingTimeZoneType
, DayPartTargetingTimeZoneType (..)
-- * SecurityContext
, SecurityContext
, securityContext
, scSecurities
-- * PlatformContextPlatformsItem
, PlatformContextPlatformsItem (..)
-- * Date
, Date
, date
, dDay
, dYear
, dMonth
-- * ClientUserStatus
, ClientUserStatus (..)
-- * CancelNegotiationRequest
, CancelNegotiationRequest
, cancelNegotiationRequest
-- * ContactInformation
, ContactInformation
, contactInformation
, ciEmail
, ciName
-- * ServingContextAll
, ServingContextAll (..)
-- * AbsoluteDateRange
, AbsoluteDateRange
, absoluteDateRange
, adrEndDate
, adrStartDate
-- * CreativeRestrictionsCreativeFormat
, CreativeRestrictionsCreativeFormat (..)
-- * AuctionContext
, AuctionContext
, auctionContext
, acAuctionTypes
-- * Deal
, Deal
, deal
, dAvailableStartTime
, dExternalDealId
, dBuyerPrivateData
, dIsSetupComplete
, dWebPropertyCode
, dDeliveryControl
, dDealServingMetadata
, dProposalId
, dTargeting
, dDealId
, dCreativeRestrictions
, dSyndicationProduct
, dCreateProductRevision
, dUpdateTime
, dTargetingCriterion
, dSellerContacts
, dCreateProductId
, dDisplayName
, dProgrammaticCreativeSource
, dAvailableEndTime
, dCreativePreApprovalPolicy
, dDescription
, dCreateTime
, dCreativeSafeFrameCompatibility
, dDealTerms
-- * CreativeRestrictions
, CreativeRestrictions
, creativeRestrictions
, crCreativeFormat
, crSkippableAdType
, crCreativeSpecifications
-- * ProposalOriginatorRole
, ProposalOriginatorRole (..)
-- * Proposal
, Proposal
, proposal
, pBuyerPrivateData
, pIsSetupComplete
, pDeals
, pProposalRevision
, pBuyerContacts
, pOriginatorRole
, pBilledBuyer
, pPrivateAuctionId
, pIsRenegotiating
, pSeller
, pProposalId
, pUpdateTime
, pSellerContacts
, pDisplayName
, pNotes
, pProposalState
, pLastUpdaterOrCommentorRole
, pTermsAndConditions
, pBuyer
-- * RelativeDateRange
, RelativeDateRange
, relativeDateRange
, rdrOffSetDays
, rdrDurationDays
-- * CreativeSizeNATiveTemplate
, CreativeSizeNATiveTemplate (..)
-- * PauseProposalRequest
, PauseProposalRequest
, pauseProposalRequest
, pprReason
-- * FirstPartyMobileApplicationTargeting
, FirstPartyMobileApplicationTargeting
, firstPartyMobileApplicationTargeting
, fpmatTargetedAppIds
, fpmatExcludedAppIds
-- * MetricValue
, MetricValue
, metricValue
, mvValue
, mvVariance
-- * FilterSetEnvironment
, FilterSetEnvironment (..)
-- * CompleteSetupRequest
, CompleteSetupRequest
, completeSetupRequest
-- * FilteredBidCreativeRow
, FilteredBidCreativeRow
, filteredBidCreativeRow
, fbcrCreativeId
, fbcrRowDimensions
, fbcrBidCount
-- * LocationContext
, LocationContext
, locationContext
, lcGeoCriteriaIds
-- * DayPartDayOfWeek
, DayPartDayOfWeek (..)
-- * Xgafv
, Xgafv (..)
-- * TargetingCriteria
, TargetingCriteria
, targetingCriteria
, tcKey
, tcExclusions
, tcInclusions
-- * DealTermsBrandingType
, DealTermsBrandingType (..)
-- * CalloutStatusRow
, CalloutStatusRow
, calloutStatusRow
, cRowDimensions
, cCalloutStatusId
, cImpressionCount
-- * URLTargeting
, URLTargeting
, urlTargeting
, utTargetedURLs
, utExcludedURLs
-- * BidResponseWithoutBidsStatusRow
, BidResponseWithoutBidsStatusRow
, bidResponseWithoutBidsStatusRow
, brwbsrStatus
, brwbsrRowDimensions
, brwbsrImpressionCount
-- * FilterSet
, FilterSet
, filterSet
, fsPlatforms
, fsRealtimeTimeRange
, fsEnvironment
, fsFormats
, fsFormat
, fsCreativeId
, fsBreakdownDimensions
, fsSellerNetworkIds
, fsDealId
, fsAbsoluteDateRange
, fsName
, fsRelativeDateRange
, fsTimeSeriesGranularity
, fsPublisherIdentifiers
-- * TimeInterval
, TimeInterval
, timeInterval
, tiStartTime
, tiEndTime
-- * ProposalLastUpdaterOrCommentorRole
, ProposalLastUpdaterOrCommentorRole (..)
-- * RemoveDealAssociationRequest
, RemoveDealAssociationRequest
, removeDealAssociationRequest
, rdarAssociation
-- * ClientEntityType
, ClientEntityType (..)
-- * TargetingValue
, TargetingValue
, targetingValue
, tvCreativeSizeValue
, tvStringValue
, tvLongValue
, tvDayPartTargetingValue
-- * ListNonBillableWinningBidsResponse
, ListNonBillableWinningBidsResponse
, listNonBillableWinningBidsResponse
, lnbwbrNextPageToken
, lnbwbrNonBillableWinningBidStatusRows
-- * FrequencyCapTimeUnitType
, FrequencyCapTimeUnitType (..)
-- * CreativeRestrictedCategoriesItem
, CreativeRestrictedCategoriesItem (..)
-- * ListLosingBidsResponse
, ListLosingBidsResponse
, listLosingBidsResponse
, llbrNextPageToken
, llbrCreativeStatusRows
-- * PricePricingType
, PricePricingType (..)
-- * NonBillableWinningBidStatusRowStatus
, NonBillableWinningBidStatusRowStatus (..)
-- * VideoTargetingExcludedPositionTypesItem
, VideoTargetingExcludedPositionTypesItem (..)
-- * ClientStatus
, ClientStatus (..)
-- * Price
, Price
, price
, pAmount
, pPricingType
-- * ListImpressionMetricsResponse
, ListImpressionMetricsResponse
, listImpressionMetricsResponse
, limrNextPageToken
, limrImpressionMetricsRows
-- * AdSizeSizeType
, AdSizeSizeType (..)
-- * CreativeDealsStatus
, CreativeDealsStatus (..)
-- * CreativeSizeSkippableAdType
, CreativeSizeSkippableAdType (..)
-- * PlatformContext
, PlatformContext
, platformContext
, pcPlatforms
-- * AccountsFinalizedProposalsListFilterSyntax
, AccountsFinalizedProposalsListFilterSyntax (..)
-- * VideoContent
, VideoContent
, videoContent
, vcVideoVastXML
, vcVideoURL
-- * TimeOfDay'
, TimeOfDay'
, timeOfDay
, todNanos
, todHours
, todMinutes
, todSeconds
-- * NoteCreatorRole
, NoteCreatorRole (..)
-- * CreativeSizeCreativeSizeType
, CreativeSizeCreativeSizeType (..)
-- * GuaranteedFixedPriceTerms
, GuaranteedFixedPriceTerms
, guaranteedFixedPriceTerms
, gfptGuaranteedLooks
, gfptGuaranteedImpressions
, gfptPercentShareOfVoice
, gfptReservationType
, gfptFixedPrices
, gfptMinimumDailyLooks
, gfptImpressionCap
-- * NATiveContent
, NATiveContent
, nATiveContent
, natcStoreURL
, natcImage
, natcAdvertiserName
, natcAppIcon
, natcPriceDisplayText
, natcClickTrackingURL
, natcClickLinkURL
, natcBody
, natcHeadline
, natcCallToAction
, natcVideoURL
, natcStarRating
, natcLogo
-- * VideoTargeting
, VideoTargeting
, videoTargeting
, vtTargetedPositionTypes
, vtExcludedPositionTypes
-- * ClientUser
, ClientUser
, clientUser
, cuEmail
, cuStatus
, cuUserId
, cuClientAccountId
-- * Product
, Product
, product
, proAvailableStartTime
, proWebPropertyCode
, proTerms
, proProductRevision
, proHasCreatorSignedOff
, proSeller
, proSyndicationProduct
, proUpdateTime
, proCreatorContacts
, proTargetingCriterion
, proDisplayName
, proPublisherProFileId
, proAvailableEndTime
, proProductId
, proCreateTime
-- * ListClientUserInvitationsResponse
, ListClientUserInvitationsResponse
, listClientUserInvitationsResponse
, lcuirNextPageToken
, lcuirInvitations
-- * DayPart
, DayPart
, dayPart
, dpStartTime
, dpEndTime
, dpDayOfWeek
-- * MobileApplicationTargeting
, MobileApplicationTargeting
, mobileApplicationTargeting
, matFirstPartyTargeting
-- * ListClientsResponse
, ListClientsResponse
, listClientsResponse
, lNextPageToken
, lClients
-- * ListCreativeStatusBreakdownByCreativeResponse
, ListCreativeStatusBreakdownByCreativeResponse
, listCreativeStatusBreakdownByCreativeResponse
, lcsbbcrNextPageToken
, lcsbbcrFilteredBidCreativeRows
-- * FrequencyCap
, FrequencyCap
, frequencyCap
, fcMaxImpressions
, fcNumTimeUnits
, fcTimeUnitType
-- * ListBidResponsesWithoutBidsResponse
, ListBidResponsesWithoutBidsResponse
, listBidResponsesWithoutBidsResponse
, lbrwbrNextPageToken
, lbrwbrBidResponseWithoutBidsStatusRows
-- * ServingRestriction
, ServingRestriction
, servingRestriction
, srStatus
, srContexts
, srDisApprovalReasons
, srDisApproval
-- * CreativeSpecification
, CreativeSpecification
, creativeSpecification
, csCreativeCompanionSizes
, csCreativeSize
-- * ImpressionMetricsRow
, ImpressionMetricsRow
, impressionMetricsRow
, imrRowDimensions
, imrAvailableImpressions
, imrSuccessfulResponses
, imrInventoryMatches
, imrBidRequests
, imrResponsesWithBids
-- * AccountsProposalsListFilterSyntax
, AccountsProposalsListFilterSyntax (..)
-- * CreativeSize
, CreativeSize
, creativeSize
, csSize
, csCompanionSizes
, csSkippableAdType
, csCreativeSizeType
, csAllowedFormats
, csNATiveTemplate
-- * CorrectionType
, CorrectionType (..)
-- * ListProposalsResponse
, ListProposalsResponse
, listProposalsResponse
, lprProposals
, lprNextPageToken
-- * InventorySizeTargeting
, InventorySizeTargeting
, inventorySizeTargeting
, istTargetedInventorySizes
, istExcludedInventorySizes
-- * DealTerms
, DealTerms
, dealTerms
, dtEstimatedGrossSpend
, dtNonGuaranteedFixedPriceTerms
, dtNonGuaranteedAuctionTerms
, dtBrandingType
, dtEstimatedImpressionsPerDay
, dtSellerTimeZone
, dtGuaranteedFixedPriceTerms
, dtDescription
-- * AuctionContextAuctionTypesItem
, AuctionContextAuctionTypesItem (..)
-- * ClientUserInvitation
, ClientUserInvitation
, clientUserInvitation
, cuiEmail
, cuiInvitationId
, cuiClientAccountId
-- * NonBillableWinningBidStatusRow
, NonBillableWinningBidStatusRow
, nonBillableWinningBidStatusRow
, nbwbsrStatus
, nbwbsrRowDimensions
, nbwbsrBidCount
-- * ListProductsResponse
, ListProductsResponse
, listProductsResponse
, lisNextPageToken
, lisProducts
-- * FilterSetFormat
, FilterSetFormat (..)
-- * Buyer
, Buyer
, buyer
, bAccountId
-- * ListBidMetricsResponse
, ListBidMetricsResponse
, listBidMetricsResponse
, lbmrNextPageToken
, lbmrBidMetricsRows
-- * Client
, Client
, client
, cEntityName
, cStatus
, cEntityType
, cRole
, cVisibleToSeller
, cPartnerClientId
, cClientAccountId
, cClientName
, cEntityId
-- * DisApproval
, DisApproval
, disApproval
, daReason
, daDetails
-- * AppContextAppTypesItem
, AppContextAppTypesItem (..)
-- * FilterSetBreakdownDimensionsItem
, FilterSetBreakdownDimensionsItem (..)
) where
import Network.Google.AdExchangeBuyer2.Types.Product
import Network.Google.AdExchangeBuyer2.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v2beta1' of the Ad Exchange Buyer API II. This contains the host and root path used as a starting point for constructing service requests.
adExchangeBuyer2Service :: ServiceConfig
adExchangeBuyer2Service
= defaultService
(ServiceId "adexchangebuyer2:v2beta1")
"adexchangebuyer.googleapis.com"
-- | Manage your Ad Exchange buyer account configuration
adExchangeBuyerScope :: Proxy '["https://www.googleapis.com/auth/adexchange.buyer"]
adExchangeBuyerScope = Proxy
| brendanhay/gogol | gogol-adexchangebuyer2/gen/Network/Google/AdExchangeBuyer2/Types.hs | mpl-2.0 | 22,654 | 0 | 7 | 5,511 | 2,447 | 1,708 | 739 | 673 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Games.Players.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Get the collection of players for the currently authenticated user.
--
-- /See:/ <https://developers.google.com/games/services/ Google Play Game Services API Reference> for @games.players.list@.
module Network.Google.Resource.Games.Players.List
(
-- * REST Resource
PlayersListResource
-- * Creating a Request
, playersList
, PlayersList
-- * Request Lenses
, plConsistencyToken
, plCollection
, plLanguage
, plPageToken
, plMaxResults
) where
import Network.Google.Games.Types
import Network.Google.Prelude
-- | A resource alias for @games.players.list@ method which the
-- 'PlayersList' request conforms to.
type PlayersListResource =
"games" :>
"v1" :>
"players" :>
"me" :>
"players" :>
Capture "collection" PlayersListCollection :>
QueryParam "consistencyToken" (Textual Int64) :>
QueryParam "language" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Int32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] PlayerListResponse
-- | Get the collection of players for the currently authenticated user.
--
-- /See:/ 'playersList' smart constructor.
data PlayersList = PlayersList'
{ _plConsistencyToken :: !(Maybe (Textual Int64))
, _plCollection :: !PlayersListCollection
, _plLanguage :: !(Maybe Text)
, _plPageToken :: !(Maybe Text)
, _plMaxResults :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PlayersList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plConsistencyToken'
--
-- * 'plCollection'
--
-- * 'plLanguage'
--
-- * 'plPageToken'
--
-- * 'plMaxResults'
playersList
:: PlayersListCollection -- ^ 'plCollection'
-> PlayersList
playersList pPlCollection_ =
PlayersList'
{ _plConsistencyToken = Nothing
, _plCollection = pPlCollection_
, _plLanguage = Nothing
, _plPageToken = Nothing
, _plMaxResults = Nothing
}
-- | The last-seen mutation timestamp.
plConsistencyToken :: Lens' PlayersList (Maybe Int64)
plConsistencyToken
= lens _plConsistencyToken
(\ s a -> s{_plConsistencyToken = a})
. mapping _Coerce
-- | Collection of players being retrieved
plCollection :: Lens' PlayersList PlayersListCollection
plCollection
= lens _plCollection (\ s a -> s{_plCollection = a})
-- | The preferred language to use for strings returned by this method.
plLanguage :: Lens' PlayersList (Maybe Text)
plLanguage
= lens _plLanguage (\ s a -> s{_plLanguage = a})
-- | The token returned by the previous request.
plPageToken :: Lens' PlayersList (Maybe Text)
plPageToken
= lens _plPageToken (\ s a -> s{_plPageToken = a})
-- | The maximum number of player resources to return in the response, used
-- for paging. For any response, the actual number of player resources
-- returned may be less than the specified maxResults.
plMaxResults :: Lens' PlayersList (Maybe Int32)
plMaxResults
= lens _plMaxResults (\ s a -> s{_plMaxResults = a})
. mapping _Coerce
instance GoogleRequest PlayersList where
type Rs PlayersList = PlayerListResponse
type Scopes PlayersList =
'["https://www.googleapis.com/auth/games",
"https://www.googleapis.com/auth/plus.login"]
requestClient PlayersList'{..}
= go _plCollection _plConsistencyToken _plLanguage
_plPageToken
_plMaxResults
(Just AltJSON)
gamesService
where go
= buildClient (Proxy :: Proxy PlayersListResource)
mempty
| rueshyna/gogol | gogol-games/gen/Network/Google/Resource/Games/Players/List.hs | mpl-2.0 | 4,636 | 0 | 18 | 1,149 | 673 | 390 | 283 | 96 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.GlobalAddresses.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of global addresses.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.globalAddresses.list@.
module Network.Google.Resource.Compute.GlobalAddresses.List
(
-- * REST Resource
GlobalAddressesListResource
-- * Creating a Request
, globalAddressesList
, GlobalAddressesList
-- * Request Lenses
, galReturnPartialSuccess
, galOrderBy
, galProject
, galFilter
, galPageToken
, galMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.globalAddresses.list@ method which the
-- 'GlobalAddressesList' request conforms to.
type GlobalAddressesListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"addresses" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] AddressList
-- | Retrieves a list of global addresses.
--
-- /See:/ 'globalAddressesList' smart constructor.
data GlobalAddressesList =
GlobalAddressesList'
{ _galReturnPartialSuccess :: !(Maybe Bool)
, _galOrderBy :: !(Maybe Text)
, _galProject :: !Text
, _galFilter :: !(Maybe Text)
, _galPageToken :: !(Maybe Text)
, _galMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GlobalAddressesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'galReturnPartialSuccess'
--
-- * 'galOrderBy'
--
-- * 'galProject'
--
-- * 'galFilter'
--
-- * 'galPageToken'
--
-- * 'galMaxResults'
globalAddressesList
:: Text -- ^ 'galProject'
-> GlobalAddressesList
globalAddressesList pGalProject_ =
GlobalAddressesList'
{ _galReturnPartialSuccess = Nothing
, _galOrderBy = Nothing
, _galProject = pGalProject_
, _galFilter = Nothing
, _galPageToken = Nothing
, _galMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
galReturnPartialSuccess :: Lens' GlobalAddressesList (Maybe Bool)
galReturnPartialSuccess
= lens _galReturnPartialSuccess
(\ s a -> s{_galReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
galOrderBy :: Lens' GlobalAddressesList (Maybe Text)
galOrderBy
= lens _galOrderBy (\ s a -> s{_galOrderBy = a})
-- | Project ID for this request.
galProject :: Lens' GlobalAddressesList Text
galProject
= lens _galProject (\ s a -> s{_galProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
galFilter :: Lens' GlobalAddressesList (Maybe Text)
galFilter
= lens _galFilter (\ s a -> s{_galFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
galPageToken :: Lens' GlobalAddressesList (Maybe Text)
galPageToken
= lens _galPageToken (\ s a -> s{_galPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
galMaxResults :: Lens' GlobalAddressesList Word32
galMaxResults
= lens _galMaxResults
(\ s a -> s{_galMaxResults = a})
. _Coerce
instance GoogleRequest GlobalAddressesList where
type Rs GlobalAddressesList = AddressList
type Scopes GlobalAddressesList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient GlobalAddressesList'{..}
= go _galProject _galReturnPartialSuccess _galOrderBy
_galFilter
_galPageToken
(Just _galMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy GlobalAddressesListResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/GlobalAddresses/List.hs | mpl-2.0 | 7,060 | 0 | 19 | 1,509 | 756 | 452 | 304 | 108 | 1 |
-- This file is part of purebred
-- Copyright (C) 2019 Fraser Tweedale
--
-- purebred is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
-- You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE OverloadedStrings #-}
{- |
Information flow control types and functions.
-}
module Purebred.Types.IFC
(
Tainted
, taint
, untaint
-- * Sanitisation functions
, sanitiseText
) where
import Data.Char (chr, isControl, ord)
import qualified Data.Text as T
-- | A tainted value can only be unwrapped by applying 'untaint'
-- with a sanitisation function. This approach is used instead of
-- type classes because how you untaint a value might depend on how
-- that value will be used.
--
-- You /could/ just use 'untaint id' to get the value out.
-- But you probably shouldn't.
--
newtype Tainted a = Tainted a
-- | Taint a value
taint :: a -> Tainted a
taint = Tainted
-- | Untaint a value.
untaint :: (a -> b) -> Tainted a -> b
untaint f (Tainted a) = f a
-- | Convert or strip control characters from input.
--
-- * Tab (HT) is replaced with 8 spaces.
-- * Other C0 codes (except CR and LF) and DEL are replaced with
-- <https://en.wikipedia.org/wiki/Control_Pictures Control Pictures>
-- * C1 and all other control characters are replaced with
-- REPLACEMENT CHARACTER U+FFFD
--
sanitiseText :: T.Text -> T.Text
sanitiseText = T.map substControl . T.replace "\t" " "
where
substControl c
| c == '\n' || c == '\r' = c -- CR and LF are OK
| c <= '\x1f' = chr (0x2400 + ord c)
| c == '\DEL' = '\x2421'
| isControl c = '\xfffd' -- REPLACEMENT CHARACTER
| otherwise = c
| purebred-mua/purebred | src/Purebred/Types/IFC.hs | agpl-3.0 | 2,177 | 0 | 12 | 449 | 273 | 161 | 112 | 22 | 1 |
module StringUtils where
import Data.Text
import Text.Regex.Posix
trimws :: String -> IO String
trimws = return . unpack . Data.Text.strip . pack
pattern = "[\\ \\t]*\\/\\/[\\ \\t]*((TODO|NOTE)\\([a-zA-Z,\\ \\t\\[\\]]*\\)\\:| \\-)"
isNote :: String -> Bool
isNote str = str =~ pattern
| SteamPoweredAnimal/TaskL | StringUtils.hs | lgpl-3.0 | 289 | 0 | 7 | 44 | 76 | 42 | 34 | 8 | 1 |
module Syntax where
import Data.Set (union)
import qualified Data.Set as S
-- data Ty = Nat | Arr Ty Ty
-- deriving Show
type Op = String
type SymName = String
data Expr = Num Int
| Var SymName
| App Expr Expr
| Lam SymName Expr
| BinOp Op Expr Expr
| Let SymName Expr Expr
| Ifz Expr Expr Expr
deriving Show
findFreeVars :: Expr -> [SymName]
findFreeVars expr = S.toList $ go expr
where go (Num _) = S.empty
go (Var var) = S.singleton var
go (App e1 e2) = go e1 `union` go e2
go (Lam var e) = S.delete var $ go e
go (BinOp _ e1 e2) = go e1 `union` go e2
go (Let var e1 e2) = go e1 `union` S.delete var (go e2)
go (Ifz e1 e2 e3) = go e1 `union` go e2 `union` go e3
| scturtle/turtlelang | Syntax.hs | unlicense | 784 | 0 | 10 | 265 | 325 | 173 | 152 | 22 | 7 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1beta1.ScaleSpec where
import GHC.Generics
import qualified Data.Aeson
-- | describes the attributes of a scale subresource
data ScaleSpec = ScaleSpec
{ replicas :: Maybe Integer -- ^ desired number of instances for the scaled object.
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON ScaleSpec
instance Data.Aeson.ToJSON ScaleSpec
| minhdoboi/deprecated-openshift-haskell-api | openshift/lib/Openshift/V1beta1/ScaleSpec.hs | apache-2.0 | 541 | 0 | 9 | 80 | 78 | 47 | 31 | 13 | 0 |
-- Copyright 2014 (c) Diego Souza <[email protected]>
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Leela.Data.Timeout
( Handle ()
, TimeoutInUs
, TimeoutManager ()
, timeoutManager
, open
, creat
, touch
, purge
, withHandle
) where
import GHC.Event
import Data.IORef
import Control.Monad
import Control.Exception
type TimeoutInUs = Int
type Finalizer = IO ()
data Handle = Handle Int TimeoutKey (IORef (IO ()))
data TimeoutManager = TimeoutManager (IORef Int)
timeoutManager :: IO TimeoutManager
timeoutManager = liftM TimeoutManager (newIORef 0)
open :: TimeoutManager -> TimeoutInUs -> Finalizer -> IO (Int, Handle)
open (TimeoutManager ref) timeout fin = do
tm <- getSystemTimerManager
active <- atomicModifyIORef' ref (\x -> (x + 1, x + 1))
ioref <- newIORef (atomicModifyIORef' ref (\x -> (x - 1, ())))
cookie <- registerTimeout tm timeout (executeOnce ioref >> fin)
return (active, Handle timeout cookie ioref)
creat :: TimeoutManager -> TimeoutInUs -> Finalizer -> IO ()
creat _ timeout fin = do
tm <- getSystemTimerManager
void $ registerTimeout tm timeout fin
touch :: Handle -> IO ()
touch (Handle timeout cookie _) = do
tm <- getSystemTimerManager
updateTimeout tm cookie timeout
purge :: Handle -> IO ()
purge (Handle _ cookie ioref) = do
tm <- getSystemTimerManager
unregisterTimeout tm cookie
executeOnce ioref
executeOnce :: IORef (IO ()) -> IO ()
executeOnce ioref = do
join $ atomicModifyIORef' ioref (\io -> (return (), io))
withHandle :: TimeoutManager -> Int -> Finalizer -> IO a -> IO a
withHandle tm timeout fin action = bracket (open tm timeout fin)
(purge . snd)
(const $ action)
| locaweb/leela | src/warpdrive/src/Leela/Data/Timeout.hs | apache-2.0 | 2,323 | 0 | 14 | 543 | 617 | 322 | 295 | 51 | 1 |
{-# LANGUAGE NamedFieldPuns, RecordWildCards, DeriveDataTypeable #-}
module HEP.Physics.MSSM.SLHA where
import qualified Data.ByteString as B
import Crypto.Classes
import Data.Digest.Pure.MD5
import Data.Typeable
import Data.Data
data SLHA = SLHA {
slhaContent :: B.ByteString
} deriving (Show,Typeable,Data)
slhaMD5 :: SLHA -> MD5Digest
slhaMD5 (SLHA bstr) = hash' bstr
{-
import Text.StringTemplate.Helpers
import HEP.Physics.MSSM.OutputPhys
render :: String -> [(String,String)] -> String
render = flip render1
pp = show
outputPhysToSLHA :: String -> OutputPhys -> String
outputPhysToSLHA tmpl OutputPhys {..} =
render tmpl [ ("mass_Mh" , pp mass_Mh )
, ("mass_MHH" , pp mass_MHH )
, ("mass_MH3" , pp mass_MH3 )
, ("mass_MHc" , pp mass_MHc )
, ("mass_MNE1", pp mass_MNE1)
, ("mass_MNE2", pp mass_MNE2)
, ("mass_MNE3", pp mass_MNE3)
, ("mass_MNE4", pp mass_MNE4)
, ("mass_MC1" , pp mass_MC1 )
, ("mass_MC2" , pp mass_MC2 )
, ("mass_MSG" , pp mass_MSG )
, ("mass_MSuL", pp mass_MSuL)
, ("mass_MSdL", pp mass_MSdL)
, ("mass_MSeL", pp mass_MSeL)
, ("mass_MSne", pp mass_MSne)
, ("mass_MSuR", pp mass_MSuR)
, ("mass_MSdR", pp mass_MSdR)
, ("mass_MSeR", pp mass_MSeR)
, ("mass_MScL", pp mass_MScL)
, ("mass_MSsL", pp mass_MSsL)
, ("mass_MSmL", pp mass_MSmL)
, ("mass_MSnm", pp mass_MSnm)
, ("mass_MScR", pp mass_MScR)
, ("mass_MSsR", pp mass_MSsR)
, ("mass_MSmR", pp mass_MSmR)
, ("mass_MSt1", pp mass_MSt1)
, ("mass_MSb1", pp mass_MSb1)
, ("mass_MSl1", pp mass_MSl1)
, ("mass_MSn1", pp mass_MSn1)
, ("mass_MSt2", pp mass_MSt2)
, ("mass_MSb2", pp mass_MSb2)
, ("mass_MSl2", pp mass_MSl2) ] -} | wavewave/MSSMType | src/HEP/Physics/MSSM/SLHA.hs | bsd-2-clause | 2,055 | 0 | 9 | 686 | 98 | 59 | 39 | 12 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Renewal.Types ( renewalApi )
import Servant.JS
import Servant.JS.JQuery
import System.Environment ( getArgs )
main :: IO ()
main = do
[out] <- getArgs
let def = defCommonGeneratorOptions
writeJSForAPI renewalApi (jqueryWith (def { urlPrefix = "/api" })) out
| kevin-li-195/books | gen-client/Main.hs | bsd-3-clause | 328 | 0 | 12 | 55 | 97 | 54 | 43 | 11 | 1 |
{-# LANGUAGE CPP, MagicHash, NondecreasingIndentation, TupleSections,
RecordWildCards #-}
{-# OPTIONS -fno-cse #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-----------------------------------------------------------------------------
--
-- GHC Interactive User Interface
--
-- (c) The GHC Team 2005-2006
--
-----------------------------------------------------------------------------
module InteractiveUI (
interactiveUI,
GhciSettings(..),
defaultGhciSettings,
ghciCommands,
ghciWelcomeMsg
) where
#include "HsVersions.h"
-- GHCi
import qualified GhciMonad ( args, runStmt )
import GhciMonad hiding ( args, runStmt )
import GhciTags
import Debugger
-- The GHC interface
import DynFlags
import ErrUtils
import GhcMonad ( modifySession )
import qualified GHC
import GHC ( LoadHowMuch(..), Target(..), TargetId(..), InteractiveImport(..),
TyThing(..), Phase, BreakIndex, Resume, SingleStep, Ghc,
handleSourceError )
import HsImpExp
import HsSyn
import HscTypes ( tyThingParent_maybe, handleFlagWarnings, getSafeMode, hsc_IC,
setInteractivePrintName )
import Module
import Name
import Packages ( trusted, getPackageDetails, listVisibleModuleNames, pprFlag )
import PprTyThing
import PrelNames
import RdrName ( RdrName, getGRE_NameQualifier_maybes, getRdrName )
import SrcLoc
import qualified Lexer
import StringBuffer
import Outputable hiding ( printForUser, printForUserPartWay, bold )
-- Other random utilities
import BasicTypes hiding ( isTopLevel )
import Config
import Digraph
import Encoding
import FastString
import Linker
import Maybes ( orElse, expectJust )
import NameSet
import Panic hiding ( showException )
import Util
-- Haskell Libraries
import System.Console.Haskeline as Haskeline
import Control.Applicative hiding (empty)
import Control.DeepSeq (deepseq)
import Control.Monad as Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Data.Array
import qualified Data.ByteString.Char8 as BS
import Data.Char
import Data.Function
import Data.IORef ( IORef, modifyIORef, newIORef, readIORef, writeIORef )
import Data.List ( find, group, intercalate, intersperse, isPrefixOf, nub,
partition, sort, sortBy )
import Data.Maybe
import Exception hiding (catch)
import Foreign.C
#if __GLASGOW_HASKELL__ >= 709
import Foreign
#else
import Foreign.Safe
#endif
import System.Directory
import System.Environment
import System.Exit ( exitWith, ExitCode(..) )
import System.FilePath
import System.IO
import System.IO.Error
import System.IO.Unsafe ( unsafePerformIO )
import System.Process
import Text.Printf
import Text.Read ( readMaybe )
#ifndef mingw32_HOST_OS
import System.Posix hiding ( getEnv )
#else
import qualified System.Win32
#endif
import GHC.Exts ( unsafeCoerce# )
import GHC.IO.Exception ( IOErrorType(InvalidArgument) )
import GHC.IO.Handle ( hFlushAll )
import GHC.TopHandler ( topHandler )
-----------------------------------------------------------------------------
data GhciSettings = GhciSettings {
availableCommands :: [Command],
shortHelpText :: String,
fullHelpText :: String,
defPrompt :: String,
defPrompt2 :: String
}
defaultGhciSettings :: GhciSettings
defaultGhciSettings =
GhciSettings {
availableCommands = ghciCommands,
shortHelpText = defShortHelpText,
fullHelpText = defFullHelpText,
defPrompt = default_prompt,
defPrompt2 = default_prompt2
}
ghciWelcomeMsg :: String
ghciWelcomeMsg = "GHCi, version " ++ cProjectVersion ++
": http://www.haskell.org/ghc/ :? for help"
cmdName :: Command -> String
cmdName (n,_,_) = n
GLOBAL_VAR(macros_ref, [], [Command])
ghciCommands :: [Command]
ghciCommands = [
-- Hugs users are accustomed to :e, so make sure it doesn't overlap
("?", keepGoing help, noCompletion),
("add", keepGoingPaths addModule, completeFilename),
("abandon", keepGoing abandonCmd, noCompletion),
("break", keepGoing breakCmd, completeIdentifier),
("back", keepGoing backCmd, noCompletion),
("browse", keepGoing' (browseCmd False), completeModule),
("browse!", keepGoing' (browseCmd True), completeModule),
("cd", keepGoing' changeDirectory, completeFilename),
("check", keepGoing' checkModule, completeHomeModule),
("continue", keepGoing continueCmd, noCompletion),
("complete", keepGoing completeCmd, noCompletion),
("cmd", keepGoing cmdCmd, completeExpression),
("ctags", keepGoing createCTagsWithLineNumbersCmd, completeFilename),
("ctags!", keepGoing createCTagsWithRegExesCmd, completeFilename),
("def", keepGoing (defineMacro False), completeExpression),
("def!", keepGoing (defineMacro True), completeExpression),
("delete", keepGoing deleteCmd, noCompletion),
("edit", keepGoing' editFile, completeFilename),
("etags", keepGoing createETagsFileCmd, completeFilename),
("force", keepGoing forceCmd, completeExpression),
("forward", keepGoing forwardCmd, noCompletion),
("help", keepGoing help, noCompletion),
("history", keepGoing historyCmd, noCompletion),
("info", keepGoing' (info False), completeIdentifier),
("info!", keepGoing' (info True), completeIdentifier),
("issafe", keepGoing' isSafeCmd, completeModule),
("kind", keepGoing' (kindOfType False), completeIdentifier),
("kind!", keepGoing' (kindOfType True), completeIdentifier),
("load", keepGoingPaths (loadModule_ False), completeHomeModuleOrFile),
("load!", keepGoingPaths (loadModule_ True), completeHomeModuleOrFile),
("list", keepGoing' listCmd, noCompletion),
("module", keepGoing moduleCmd, completeSetModule),
("main", keepGoing runMain, completeFilename),
("print", keepGoing printCmd, completeExpression),
("quit", quit, noCompletion),
("reload", keepGoing' (reloadModule False), noCompletion),
("reload!", keepGoing' (reloadModule True), noCompletion),
("run", keepGoing runRun, completeFilename),
("script", keepGoing' scriptCmd, completeFilename),
("set", keepGoing setCmd, completeSetOptions),
("seti", keepGoing setiCmd, completeSeti),
("show", keepGoing showCmd, completeShowOptions),
("showi", keepGoing showiCmd, completeShowiOptions),
("sprint", keepGoing sprintCmd, completeExpression),
("step", keepGoing stepCmd, completeIdentifier),
("steplocal", keepGoing stepLocalCmd, completeIdentifier),
("stepmodule",keepGoing stepModuleCmd, completeIdentifier),
("type", keepGoing' typeOfExpr, completeExpression),
("trace", keepGoing traceCmd, completeExpression),
("undef", keepGoing undefineMacro, completeMacro),
("unset", keepGoing unsetOptions, completeSetOptions)
]
-- We initialize readline (in the interactiveUI function) to use
-- word_break_chars as the default set of completion word break characters.
-- This can be overridden for a particular command (for example, filename
-- expansion shouldn't consider '/' to be a word break) by setting the third
-- entry in the Command tuple above.
--
-- NOTE: in order for us to override the default correctly, any custom entry
-- must be a SUBSET of word_break_chars.
word_break_chars :: String
word_break_chars = let symbols = "!#$%&*+/<=>?@\\^|-~"
specials = "(),;[]`{}"
spaces = " \t\n"
in spaces ++ specials ++ symbols
flagWordBreakChars :: String
flagWordBreakChars = " \t\n"
keepGoing :: (String -> GHCi ()) -> (String -> InputT GHCi Bool)
keepGoing a str = keepGoing' (lift . a) str
keepGoing' :: Monad m => (String -> m ()) -> String -> m Bool
keepGoing' a str = a str >> return False
keepGoingPaths :: ([FilePath] -> InputT GHCi ()) -> (String -> InputT GHCi Bool)
keepGoingPaths a str
= do case toArgs str of
Left err -> liftIO $ hPutStrLn stderr err
Right args -> a args
return False
defShortHelpText :: String
defShortHelpText = "use :? for help.\n"
defFullHelpText :: String
defFullHelpText =
" Commands available from the prompt:\n" ++
"\n" ++
" <statement> evaluate/run <statement>\n" ++
" : repeat last command\n" ++
" :{\\n ..lines.. \\n:}\\n multiline command\n" ++
" :add [*]<module> ... add module(s) to the current target set\n" ++
" :browse[!] [[*]<mod>] display the names defined by module <mod>\n" ++
" (!: more details; *: all top-level names)\n" ++
" :cd <dir> change directory to <dir>\n" ++
" :cmd <expr> run the commands returned by <expr>::IO String\n" ++
" :complete <dom> [<rng>] <s> list completions for partial input string\n" ++
" :ctags[!] [<file>] create tags file for Vi (default: \"tags\")\n" ++
" (!: use regex instead of line number)\n" ++
" :def <cmd> <expr> define command :<cmd> (later defined command has\n" ++
" precedence, ::<cmd> is always a builtin command)\n" ++
" :edit <file> edit file\n" ++
" :edit edit last module\n" ++
" :etags [<file>] create tags file for Emacs (default: \"TAGS\")\n" ++
" :help, :? display this list of commands\n" ++
" :info[!] [<name> ...] display information about the given names\n" ++
" (!: do not filter instances)\n" ++
" :issafe [<mod>] display safe haskell information of module <mod>\n" ++
" :kind[!] <type> show the kind of <type>\n" ++
" (!: also print the normalised type)\n" ++
" :load[!] [*]<module> ... load module(s) and their dependents\n" ++
" (!: defer type errors)\n" ++
" :main [<arguments> ...] run the main function with the given arguments\n" ++
" :module [+/-] [*]<mod> ... set the context for expression evaluation\n" ++
" :quit exit GHCi\n" ++
" :reload[!] reload the current module set\n" ++
" (!: defer type errors)\n" ++
" :run function [<arguments> ...] run the function with the given arguments\n" ++
" :script <filename> run the script <filename>\n" ++
" :type <expr> show the type of <expr>\n" ++
" :undef <cmd> undefine user-defined command :<cmd>\n" ++
" :!<command> run the shell command <command>\n" ++
"\n" ++
" -- Commands for debugging:\n" ++
"\n" ++
" :abandon at a breakpoint, abandon current computation\n" ++
" :back [<n>] go back in the history N steps (after :trace)\n" ++
" :break [<mod>] <l> [<col>] set a breakpoint at the specified location\n" ++
" :break <name> set a breakpoint on the specified function\n" ++
" :continue resume after a breakpoint\n" ++
" :delete <number> delete the specified breakpoint\n" ++
" :delete * delete all breakpoints\n" ++
" :force <expr> print <expr>, forcing unevaluated parts\n" ++
" :forward [<n>] go forward in the history N step s(after :back)\n" ++
" :history [<n>] after :trace, show the execution history\n" ++
" :list show the source code around current breakpoint\n" ++
" :list <identifier> show the source code for <identifier>\n" ++
" :list [<module>] <line> show the source code around line number <line>\n" ++
" :print [<name> ...] show a value without forcing its computation\n" ++
" :sprint [<name> ...] simplified version of :print\n" ++
" :step single-step after stopping at a breakpoint\n"++
" :step <expr> single-step into <expr>\n"++
" :steplocal single-step within the current top-level binding\n"++
" :stepmodule single-step restricted to the current module\n"++
" :trace trace after stopping at a breakpoint\n"++
" :trace <expr> evaluate <expr> with tracing on (see :history)\n"++
"\n" ++
" -- Commands for changing settings:\n" ++
"\n" ++
" :set <option> ... set options\n" ++
" :seti <option> ... set options for interactive evaluation only\n" ++
" :set args <arg> ... set the arguments returned by System.getArgs\n" ++
" :set prog <progname> set the value returned by System.getProgName\n" ++
" :set prompt <prompt> set the prompt used in GHCi\n" ++
" :set prompt2 <prompt> set the continuation prompt used in GHCi\n" ++
" :set editor <cmd> set the command used for :edit\n" ++
" :set stop [<n>] <cmd> set the command to run when a breakpoint is hit\n" ++
" :unset <option> ... unset options\n" ++
"\n" ++
" Options for ':set' and ':unset':\n" ++
"\n" ++
" +m allow multiline commands\n" ++
" +r revert top-level expressions after each evaluation\n" ++
" +s print timing/memory stats after each evaluation\n" ++
" +t print type after evaluation\n" ++
" -<flags> most GHC command line flags can also be set here\n" ++
" (eg. -v2, -XFlexibleInstances, etc.)\n" ++
" for GHCi-specific flags, see User's Guide,\n"++
" Flag reference, Interactive-mode options\n" ++
"\n" ++
" -- Commands for displaying information:\n" ++
"\n" ++
" :show bindings show the current bindings made at the prompt\n" ++
" :show breaks show the active breakpoints\n" ++
" :show context show the breakpoint context\n" ++
" :show imports show the current imports\n" ++
" :show linker show current linker state\n" ++
" :show modules show the currently loaded modules\n" ++
" :show packages show the currently active package flags\n" ++
" :show paths show the currently active search paths\n" ++
" :show language show the currently active language flags\n" ++
" :show <setting> show value of <setting>, which is one of\n" ++
" [args, prog, prompt, editor, stop]\n" ++
" :showi language show language flags for interactive evaluation\n" ++
"\n"
findEditor :: IO String
findEditor = do
getEnv "EDITOR"
`catchIO` \_ -> do
#if mingw32_HOST_OS
win <- System.Win32.getWindowsDirectory
return (win </> "notepad.exe")
#else
return ""
#endif
foreign import ccall unsafe "rts_isProfiled" isProfiled :: IO CInt
default_progname, default_prompt, default_prompt2, default_stop :: String
default_progname = "<interactive>"
default_prompt = "%s> "
default_prompt2 = "%s| "
default_stop = ""
default_args :: [String]
default_args = []
interactiveUI :: GhciSettings -> [(FilePath, Maybe Phase)] -> Maybe [String]
-> Ghc ()
interactiveUI config srcs maybe_exprs = do
-- although GHCi compiles with -prof, it is not usable: the byte-code
-- compiler and interpreter don't work with profiling. So we check for
-- this up front and emit a helpful error message (#2197)
i <- liftIO $ isProfiled
when (i /= 0) $
throwGhcException (InstallationError "GHCi cannot be used when compiled with -prof")
-- HACK! If we happen to get into an infinite loop (eg the user
-- types 'let x=x in x' at the prompt), then the thread will block
-- on a blackhole, and become unreachable during GC. The GC will
-- detect that it is unreachable and send it the NonTermination
-- exception. However, since the thread is unreachable, everything
-- it refers to might be finalized, including the standard Handles.
-- This sounds like a bug, but we don't have a good solution right
-- now.
_ <- liftIO $ newStablePtr stdin
_ <- liftIO $ newStablePtr stdout
_ <- liftIO $ newStablePtr stderr
-- Initialise buffering for the *interpreted* I/O system
initInterpBuffering
-- The initial set of DynFlags used for interactive evaluation is the same
-- as the global DynFlags, plus -XExtendedDefaultRules and
-- -XNoMonomorphismRestriction.
dflags <- getDynFlags
let dflags' = (`xopt_set` Opt_ExtendedDefaultRules)
. (`xopt_unset` Opt_MonomorphismRestriction)
$ dflags
GHC.setInteractiveDynFlags dflags'
lastErrLocationsRef <- liftIO $ newIORef []
progDynFlags <- GHC.getProgramDynFlags
_ <- GHC.setProgramDynFlags $
progDynFlags { log_action = ghciLogAction lastErrLocationsRef }
liftIO $ when (isNothing maybe_exprs) $ do
-- Only for GHCi (not runghc and ghc -e):
-- Turn buffering off for the compiled program's stdout/stderr
turnOffBuffering
-- Turn buffering off for GHCi's stdout
hFlush stdout
hSetBuffering stdout NoBuffering
-- We don't want the cmd line to buffer any input that might be
-- intended for the program, so unbuffer stdin.
hSetBuffering stdin NoBuffering
hSetBuffering stderr NoBuffering
#if defined(mingw32_HOST_OS)
-- On Unix, stdin will use the locale encoding. The IO library
-- doesn't do this on Windows (yet), so for now we use UTF-8,
-- for consistency with GHC 6.10 and to make the tests work.
hSetEncoding stdin utf8
#endif
default_editor <- liftIO $ findEditor
startGHCi (runGHCi srcs maybe_exprs)
GHCiState{ progname = default_progname,
GhciMonad.args = default_args,
prompt = defPrompt config,
prompt2 = defPrompt2 config,
stop = default_stop,
editor = default_editor,
options = [],
-- We initialize line number as 0, not 1, because we use
-- current line number while reporting errors which is
-- incremented after reading a line.
line_number = 0,
break_ctr = 0,
breaks = [],
tickarrays = emptyModuleEnv,
ghci_commands = availableCommands config,
last_command = Nothing,
cmdqueue = [],
remembered_ctx = [],
transient_ctx = [],
ghc_e = isJust maybe_exprs,
short_help = shortHelpText config,
long_help = fullHelpText config,
lastErrorLocations = lastErrLocationsRef
}
return ()
resetLastErrorLocations :: GHCi ()
resetLastErrorLocations = do
st <- getGHCiState
liftIO $ writeIORef (lastErrorLocations st) []
ghciLogAction :: IORef [(FastString, Int)] -> LogAction
ghciLogAction lastErrLocations dflags severity srcSpan style msg = do
defaultLogAction dflags severity srcSpan style msg
case severity of
SevError -> case srcSpan of
RealSrcSpan rsp -> modifyIORef lastErrLocations
(++ [(srcLocFile (realSrcSpanStart rsp), srcLocLine (realSrcSpanStart rsp))])
_ -> return ()
_ -> return ()
withGhcAppData :: (FilePath -> IO a) -> IO a -> IO a
withGhcAppData right left = do
either_dir <- tryIO (getAppUserDataDirectory "ghc")
case either_dir of
Right dir ->
do createDirectoryIfMissing False dir `catchIO` \_ -> return ()
right dir
_ -> left
runGHCi :: [(FilePath, Maybe Phase)] -> Maybe [String] -> GHCi ()
runGHCi paths maybe_exprs = do
dflags <- getDynFlags
let
ignore_dot_ghci = gopt Opt_IgnoreDotGhci dflags
current_dir = return (Just ".ghci")
app_user_dir = liftIO $ withGhcAppData
(\dir -> return (Just (dir </> "ghci.conf")))
(return Nothing)
home_dir = do
either_dir <- liftIO $ tryIO (getEnv "HOME")
case either_dir of
Right home -> return (Just (home </> ".ghci"))
_ -> return Nothing
canonicalizePath' :: FilePath -> IO (Maybe FilePath)
canonicalizePath' fp = liftM Just (canonicalizePath fp)
`catchIO` \_ -> return Nothing
sourceConfigFile :: FilePath -> GHCi ()
sourceConfigFile file = do
exists <- liftIO $ doesFileExist file
when exists $ do
either_hdl <- liftIO $ tryIO (openFile file ReadMode)
case either_hdl of
Left _e -> return ()
-- NOTE: this assumes that runInputT won't affect the terminal;
-- can we assume this will always be the case?
-- This would be a good place for runFileInputT.
Right hdl ->
do runInputTWithPrefs defaultPrefs defaultSettings $
runCommands $ fileLoop hdl
liftIO (hClose hdl `catchIO` \_ -> return ())
--
setGHCContextFromGHCiState
dot_cfgs <- if ignore_dot_ghci then return [] else do
dot_files <- catMaybes <$> sequence [ current_dir, app_user_dir, home_dir ]
liftIO $ filterM checkFileAndDirPerms dot_files
let arg_cfgs = reverse $ ghciScripts dflags
-- -ghci-script are collected in reverse order
mcfgs <- liftIO $ mapM canonicalizePath' $ dot_cfgs ++ arg_cfgs
-- We don't require that a script explicitly added by -ghci-script
-- is owned by the current user. (#6017)
mapM_ sourceConfigFile $ nub $ catMaybes mcfgs
-- nub, because we don't want to read .ghci twice if the CWD is $HOME.
-- Perform a :load for files given on the GHCi command line
-- When in -e mode, if the load fails then we want to stop
-- immediately rather than going on to evaluate the expression.
when (not (null paths)) $ do
ok <- ghciHandle (\e -> do showException e; return Failed) $
-- TODO: this is a hack.
runInputTWithPrefs defaultPrefs defaultSettings $
loadModule paths
when (isJust maybe_exprs && failed ok) $
liftIO (exitWith (ExitFailure 1))
installInteractivePrint (interactivePrint dflags) (isJust maybe_exprs)
-- if verbosity is greater than 0, or we are connected to a
-- terminal, display the prompt in the interactive loop.
is_tty <- liftIO (hIsTerminalDevice stdin)
let show_prompt = verbosity dflags > 0 || is_tty
-- reset line number
modifyGHCiState $ \st -> st{line_number=0}
case maybe_exprs of
Nothing ->
do
-- enter the interactive loop
runGHCiInput $ runCommands $ nextInputLine show_prompt is_tty
Just exprs -> do
-- just evaluate the expression we were given
enqueueCommands exprs
let hdle e = do st <- getGHCiState
-- flush the interpreter's stdout/stderr on exit (#3890)
flushInterpBuffers
-- Jump through some hoops to get the
-- current progname in the exception text:
-- <progname>: <exception>
liftIO $ withProgName (progname st)
$ topHandler e
-- this used to be topHandlerFastExit, see #2228
runInputTWithPrefs defaultPrefs defaultSettings $ do
-- make `ghc -e` exit nonzero on invalid input, see Trac #7962
_ <- runCommands' hdle
(Just $ hdle (toException $ ExitFailure 1) >> return ())
(return Nothing)
return ()
-- and finally, exit
liftIO $ when (verbosity dflags > 0) $ putStrLn "Leaving GHCi."
runGHCiInput :: InputT GHCi a -> GHCi a
runGHCiInput f = do
dflags <- getDynFlags
histFile <- if gopt Opt_GhciHistory dflags
then liftIO $ withGhcAppData (\dir -> return (Just (dir </> "ghci_history")))
(return Nothing)
else return Nothing
runInputT
(setComplete ghciCompleteWord $ defaultSettings {historyFile = histFile})
f
-- | How to get the next input line from the user
nextInputLine :: Bool -> Bool -> InputT GHCi (Maybe String)
nextInputLine show_prompt is_tty
| is_tty = do
prmpt <- if show_prompt then lift mkPrompt else return ""
r <- getInputLine prmpt
incrementLineNo
return r
| otherwise = do
when show_prompt $ lift mkPrompt >>= liftIO . putStr
fileLoop stdin
-- NOTE: We only read .ghci files if they are owned by the current user,
-- and aren't world writable (files owned by root are ok, see #9324).
-- Otherwise, we could be accidentally running code planted by
-- a malicious third party.
-- Furthermore, We only read ./.ghci if . is owned by the current user
-- and isn't writable by anyone else. I think this is sufficient: we
-- don't need to check .. and ../.. etc. because "." always refers to
-- the same directory while a process is running.
checkFileAndDirPerms :: FilePath -> IO Bool
checkFileAndDirPerms file = do
file_ok <- checkPerms file
-- Do not check dir perms when .ghci doesn't exist, otherwise GHCi will
-- print some confusing and useless warnings in some cases (e.g. in
-- travis). Note that we can't add a test for this, as all ghci tests should
-- run with -ignore-dot-ghci, which means we never get here.
if file_ok then checkPerms (getDirectory file) else return False
where
getDirectory f = case takeDirectory f of
"" -> "."
d -> d
checkPerms :: FilePath -> IO Bool
#ifdef mingw32_HOST_OS
checkPerms _ = return True
#else
checkPerms file =
handleIO (\_ -> return False) $ do
st <- getFileStatus file
me <- getRealUserID
let mode = System.Posix.fileMode st
ok = (fileOwner st == me || fileOwner st == 0) &&
groupWriteMode /= mode `intersectFileModes` groupWriteMode &&
otherWriteMode /= mode `intersectFileModes` otherWriteMode
unless ok $
-- #8248: Improving warning to include a possible fix.
putStrLn $ "*** WARNING: " ++ file ++
" is writable by someone else, IGNORING!" ++
"\nSuggested fix: execute 'chmod go-w " ++ file ++ "'"
return ok
#endif
incrementLineNo :: InputT GHCi ()
incrementLineNo = do
st <- lift $ getGHCiState
let ln = 1+(line_number st)
lift $ setGHCiState st{line_number=ln}
fileLoop :: Handle -> InputT GHCi (Maybe String)
fileLoop hdl = do
l <- liftIO $ tryIO $ hGetLine hdl
case l of
Left e | isEOFError e -> return Nothing
| -- as we share stdin with the program, the program
-- might have already closed it, so we might get a
-- handle-closed exception. We therefore catch that
-- too.
isIllegalOperation e -> return Nothing
| InvalidArgument <- etype -> return Nothing
| otherwise -> liftIO $ ioError e
where etype = ioeGetErrorType e
-- treat InvalidArgument in the same way as EOF:
-- this can happen if the user closed stdin, or
-- perhaps did getContents which closes stdin at
-- EOF.
Right l' -> do
incrementLineNo
return (Just l')
mkPrompt :: GHCi String
mkPrompt = do
st <- getGHCiState
imports <- GHC.getContext
resumes <- GHC.getResumeContext
context_bit <-
case resumes of
[] -> return empty
r:_ -> do
let ix = GHC.resumeHistoryIx r
if ix == 0
then return (brackets (ppr (GHC.resumeSpan r)) <> space)
else do
let hist = GHC.resumeHistory r !! (ix-1)
pan <- GHC.getHistorySpan hist
return (brackets (ppr (negate ix) <> char ':'
<+> ppr pan) <> space)
let
dots | _:rs <- resumes, not (null rs) = text "... "
| otherwise = empty
rev_imports = reverse imports -- rightmost are the most recent
modules_bit =
hsep [ char '*' <> ppr m | IIModule m <- rev_imports ] <+>
hsep (map ppr [ myIdeclName d | IIDecl d <- rev_imports ])
-- use the 'as' name if there is one
myIdeclName d | Just m <- ideclAs d = m
| otherwise = unLoc (ideclName d)
deflt_prompt = dots <> context_bit <> modules_bit
f ('%':'l':xs) = ppr (1 + line_number st) <> f xs
f ('%':'s':xs) = deflt_prompt <> f xs
f ('%':'%':xs) = char '%' <> f xs
f (x:xs) = char x <> f xs
f [] = empty
dflags <- getDynFlags
return (showSDoc dflags (f (prompt st)))
queryQueue :: GHCi (Maybe String)
queryQueue = do
st <- getGHCiState
case cmdqueue st of
[] -> return Nothing
c:cs -> do setGHCiState st{ cmdqueue = cs }
return (Just c)
-- Reconfigurable pretty-printing Ticket #5461
installInteractivePrint :: Maybe String -> Bool -> GHCi ()
installInteractivePrint Nothing _ = return ()
installInteractivePrint (Just ipFun) exprmode = do
ok <- trySuccess $ do
(name:_) <- GHC.parseName ipFun
modifySession (\he -> let new_ic = setInteractivePrintName (hsc_IC he) name
in he{hsc_IC = new_ic})
return Succeeded
when (failed ok && exprmode) $ liftIO (exitWith (ExitFailure 1))
-- | The main read-eval-print loop
runCommands :: InputT GHCi (Maybe String) -> InputT GHCi ()
runCommands gCmd = runCommands' handler Nothing gCmd >> return ()
runCommands' :: (SomeException -> GHCi Bool) -- ^ Exception handler
-> Maybe (GHCi ()) -- ^ Source error handler
-> InputT GHCi (Maybe String)
-> InputT GHCi (Maybe Bool)
-- We want to return () here, but have to return (Maybe Bool)
-- because gmask is not polymorphic enough: we want to use
-- unmask at two different types.
runCommands' eh sourceErrorHandler gCmd = gmask $ \unmask -> do
b <- ghandle (\e -> case fromException e of
Just UserInterrupt -> return $ Just False
_ -> case fromException e of
Just ghce ->
do liftIO (print (ghce :: GhcException))
return Nothing
_other ->
liftIO (Exception.throwIO e))
(unmask $ runOneCommand eh gCmd)
case b of
Nothing -> return Nothing
Just success -> do
unless success $ maybe (return ()) lift sourceErrorHandler
unmask $ runCommands' eh sourceErrorHandler gCmd
-- | Evaluate a single line of user input (either :<command> or Haskell code).
-- A result of Nothing means there was no more input to process.
-- Otherwise the result is Just b where b is True if the command succeeded;
-- this is relevant only to ghc -e, which will exit with status 1
-- if the commmand was unsuccessful. GHCi will continue in either case.
runOneCommand :: (SomeException -> GHCi Bool) -> InputT GHCi (Maybe String)
-> InputT GHCi (Maybe Bool)
runOneCommand eh gCmd = do
-- run a previously queued command if there is one, otherwise get new
-- input from user
mb_cmd0 <- noSpace (lift queryQueue)
mb_cmd1 <- maybe (noSpace gCmd) (return . Just) mb_cmd0
case mb_cmd1 of
Nothing -> return Nothing
Just c -> ghciHandle (\e -> lift $ eh e >>= return . Just) $
handleSourceError printErrorAndFail
(doCommand c)
-- source error's are handled by runStmt
-- is the handler necessary here?
where
printErrorAndFail err = do
GHC.printException err
return $ Just False -- Exit ghc -e, but not GHCi
noSpace q = q >>= maybe (return Nothing)
(\c -> case removeSpaces c of
"" -> noSpace q
":{" -> multiLineCmd q
_ -> return (Just c) )
multiLineCmd q = do
st <- lift getGHCiState
let p = prompt st
lift $ setGHCiState st{ prompt = prompt2 st }
mb_cmd <- collectCommand q "" `GHC.gfinally` lift (getGHCiState >>= \st' -> setGHCiState st' { prompt = p })
return mb_cmd
-- we can't use removeSpaces for the sublines here, so
-- multiline commands are somewhat more brittle against
-- fileformat errors (such as \r in dos input on unix),
-- we get rid of any extra spaces for the ":}" test;
-- we also avoid silent failure if ":}" is not found;
-- and since there is no (?) valid occurrence of \r (as
-- opposed to its String representation, "\r") inside a
-- ghci command, we replace any such with ' ' (argh:-(
collectCommand q c = q >>=
maybe (liftIO (ioError collectError))
(\l->if removeSpaces l == ":}"
then return (Just c)
else collectCommand q (c ++ "\n" ++ map normSpace l))
where normSpace '\r' = ' '
normSpace x = x
-- SDM (2007-11-07): is userError the one to use here?
collectError = userError "unterminated multiline command :{ .. :}"
-- | Handle a line of input
doCommand :: String -> InputT GHCi (Maybe Bool)
-- command
doCommand stmt | (':' : cmd) <- removeSpaces stmt = do
result <- specialCommand cmd
case result of
True -> return Nothing
_ -> return $ Just True
-- haskell
doCommand stmt = do
-- if 'stmt' was entered via ':{' it will contain '\n's
let stmt_nl_cnt = length [ () | '\n' <- stmt ]
ml <- lift $ isOptionSet Multiline
if ml && stmt_nl_cnt == 0 -- don't trigger automatic multi-line mode for ':{'-multiline input
then do
fst_line_num <- lift (line_number <$> getGHCiState)
mb_stmt <- checkInputForLayout stmt gCmd
case mb_stmt of
Nothing -> return $ Just True
Just ml_stmt -> do
-- temporarily compensate line-number for multi-line input
result <- timeIt runAllocs $ lift $
runStmtWithLineNum fst_line_num ml_stmt GHC.RunToCompletion
return $ Just (runSuccess result)
else do -- single line input and :{ - multiline input
last_line_num <- lift (line_number <$> getGHCiState)
-- reconstruct first line num from last line num and stmt
let fst_line_num | stmt_nl_cnt > 0 = last_line_num - (stmt_nl_cnt2 + 1)
| otherwise = last_line_num -- single line input
stmt_nl_cnt2 = length [ () | '\n' <- stmt' ]
stmt' = dropLeadingWhiteLines stmt -- runStmt doesn't like leading empty lines
-- temporarily compensate line-number for multi-line input
result <- timeIt runAllocs $ lift $
runStmtWithLineNum fst_line_num stmt' GHC.RunToCompletion
return $ Just (runSuccess result)
-- runStmt wrapper for temporarily overridden line-number
runStmtWithLineNum :: Int -> String -> SingleStep
-> GHCi (Maybe GHC.ExecResult)
runStmtWithLineNum lnum stmt step = do
st0 <- getGHCiState
setGHCiState st0 { line_number = lnum }
result <- runStmt stmt step
-- restore original line_number
getGHCiState >>= \st -> setGHCiState st { line_number = line_number st0 }
return result
-- note: this is subtly different from 'unlines . dropWhile (all isSpace) . lines'
dropLeadingWhiteLines s | (l0,'\n':r) <- break (=='\n') s
, all isSpace l0 = dropLeadingWhiteLines r
| otherwise = s
-- #4316
-- lex the input. If there is an unclosed layout context, request input
checkInputForLayout :: String -> InputT GHCi (Maybe String)
-> InputT GHCi (Maybe String)
checkInputForLayout stmt getStmt = do
dflags' <- lift $ getDynFlags
let dflags = xopt_set dflags' Opt_AlternativeLayoutRule
st0 <- lift $ getGHCiState
let buf' = stringToStringBuffer stmt
loc = mkRealSrcLoc (fsLit (progname st0)) (line_number st0) 1
pstate = Lexer.mkPState dflags buf' loc
case Lexer.unP goToEnd pstate of
(Lexer.POk _ False) -> return $ Just stmt
_other -> do
st1 <- lift getGHCiState
let p = prompt st1
lift $ setGHCiState st1{ prompt = prompt2 st1 }
mb_stmt <- ghciHandle (\ex -> case fromException ex of
Just UserInterrupt -> return Nothing
_ -> case fromException ex of
Just ghce ->
do liftIO (print (ghce :: GhcException))
return Nothing
_other -> liftIO (Exception.throwIO ex))
getStmt
lift $ getGHCiState >>= \st' -> setGHCiState st'{ prompt = p }
-- the recursive call does not recycle parser state
-- as we use a new string buffer
case mb_stmt of
Nothing -> return Nothing
Just str -> if str == ""
then return $ Just stmt
else do
checkInputForLayout (stmt++"\n"++str) getStmt
where goToEnd = do
eof <- Lexer.nextIsEOF
if eof
then Lexer.activeContext
else Lexer.lexer False return >> goToEnd
enqueueCommands :: [String] -> GHCi ()
enqueueCommands cmds = do
-- make sure we force any exceptions in the commands while we're
-- still inside the exception handler, otherwise bad things will
-- happen (see #10501)
cmds `deepseq` return ()
modifyGHCiState $ \st -> st{ cmdqueue = cmds ++ cmdqueue st }
-- | If we one of these strings prefixes a command, then we treat it as a decl
-- rather than a stmt. NB that the appropriate decl prefixes depends on the
-- flag settings (Trac #9915)
declPrefixes :: DynFlags -> [String]
declPrefixes dflags = keywords ++ concat opt_keywords
where
keywords = [ "class ", "instance "
, "data ", "newtype ", "type "
, "default ", "default("
]
opt_keywords = [ ["foreign " | xopt Opt_ForeignFunctionInterface dflags]
, ["deriving " | xopt Opt_StandaloneDeriving dflags]
, ["pattern " | xopt Opt_PatternSynonyms dflags]
]
-- | Entry point to execute some haskell code from user.
-- The return value True indicates success, as in `runOneCommand`.
runStmt :: String -> SingleStep -> GHCi (Maybe GHC.ExecResult)
runStmt stmt step
-- empty; this should be impossible anyways since we filtered out
-- whitespace-only input in runOneCommand's noSpace
| null (filter (not.isSpace) stmt)
= return Nothing
-- import
| stmt `looks_like` "import "
= do addImportToContext stmt; return (Just (GHC.ExecComplete (Right []) 0))
| otherwise
= do dflags <- getDynFlags
if any (stmt `looks_like`) (declPrefixes dflags)
then run_decl
else run_stmt
where
run_decl =
do _ <- liftIO $ tryIO $ hFlushAll stdin
m_result <- GhciMonad.runDecls stmt
case m_result of
Nothing -> return Nothing
Just result ->
Just <$> afterRunStmt (const True)
(GHC.ExecComplete (Right result) 0)
run_stmt =
do -- In the new IO library, read handles buffer data even if the Handle
-- is set to NoBuffering. This causes problems for GHCi where there
-- are really two stdin Handles. So we flush any bufferred data in
-- GHCi's stdin Handle here (only relevant if stdin is attached to
-- a file, otherwise the read buffer can't be flushed).
_ <- liftIO $ tryIO $ hFlushAll stdin
m_result <- GhciMonad.runStmt stmt step
case m_result of
Nothing -> return Nothing
Just result -> Just <$> afterRunStmt (const True) result
s `looks_like` prefix = prefix `isPrefixOf` dropWhile isSpace s
-- Ignore leading spaces (see Trac #9914), so that
-- ghci> data T = T
-- (note leading spaces) works properly
-- | Clean up the GHCi environment after a statement has run
afterRunStmt :: (SrcSpan -> Bool) -> GHC.ExecResult -> GHCi GHC.ExecResult
afterRunStmt step_here run_result = do
resumes <- GHC.getResumeContext
case run_result of
GHC.ExecComplete{..} ->
case execResult of
Left ex -> liftIO $ Exception.throwIO ex
Right names -> do
show_types <- isOptionSet ShowType
when show_types $ printTypeOfNames names
GHC.ExecBreak _ names mb_info
| isNothing mb_info ||
step_here (GHC.resumeSpan $ head resumes) -> do
mb_id_loc <- toBreakIdAndLocation mb_info
let bCmd = maybe "" ( \(_,l) -> onBreakCmd l ) mb_id_loc
if (null bCmd)
then printStoppedAtBreakInfo (head resumes) names
else enqueueCommands [bCmd]
-- run the command set with ":set stop <cmd>"
st <- getGHCiState
enqueueCommands [stop st]
return ()
| otherwise -> resume step_here GHC.SingleStep >>=
afterRunStmt step_here >> return ()
flushInterpBuffers
liftIO installSignalHandlers
b <- isOptionSet RevertCAFs
when b revertCAFs
return run_result
runSuccess :: Maybe GHC.ExecResult -> Bool
runSuccess run_result
| Just (GHC.ExecComplete { execResult = Right _ }) <- run_result = True
| otherwise = False
runAllocs :: Maybe GHC.ExecResult -> Maybe Integer
runAllocs m = do
res <- m
case res of
GHC.ExecComplete{..} -> Just (fromIntegral execAllocation)
_ -> Nothing
toBreakIdAndLocation ::
Maybe GHC.BreakInfo -> GHCi (Maybe (Int, BreakLocation))
toBreakIdAndLocation Nothing = return Nothing
toBreakIdAndLocation (Just inf) = do
let md = GHC.breakInfo_module inf
nm = GHC.breakInfo_number inf
st <- getGHCiState
return $ listToMaybe [ id_loc | id_loc@(_,loc) <- breaks st,
breakModule loc == md,
breakTick loc == nm ]
printStoppedAtBreakInfo :: Resume -> [Name] -> GHCi ()
printStoppedAtBreakInfo res names = do
printForUser $ ptext (sLit "Stopped at") <+>
ppr (GHC.resumeSpan res)
-- printTypeOfNames session names
let namesSorted = sortBy compareNames names
tythings <- catMaybes `liftM` mapM GHC.lookupName namesSorted
docs <- mapM pprTypeAndContents [i | AnId i <- tythings]
printForUserPartWay $ vcat docs
printTypeOfNames :: [Name] -> GHCi ()
printTypeOfNames names
= mapM_ (printTypeOfName ) $ sortBy compareNames names
compareNames :: Name -> Name -> Ordering
n1 `compareNames` n2 = compareWith n1 `compare` compareWith n2
where compareWith n = (getOccString n, getSrcSpan n)
printTypeOfName :: Name -> GHCi ()
printTypeOfName n
= do maybe_tything <- GHC.lookupName n
case maybe_tything of
Nothing -> return ()
Just thing -> printTyThing thing
data MaybeCommand = GotCommand Command | BadCommand | NoLastCommand
-- | Entry point for execution a ':<command>' input from user
specialCommand :: String -> InputT GHCi Bool
specialCommand ('!':str) = lift $ shellEscape (dropWhile isSpace str)
specialCommand str = do
let (cmd,rest) = break isSpace str
maybe_cmd <- lift $ lookupCommand cmd
htxt <- lift $ short_help `fmap` getGHCiState
case maybe_cmd of
GotCommand (_,f,_) -> f (dropWhile isSpace rest)
BadCommand ->
do liftIO $ hPutStr stdout ("unknown command ':" ++ cmd ++ "'\n"
++ htxt)
return False
NoLastCommand ->
do liftIO $ hPutStr stdout ("there is no last command to perform\n"
++ htxt)
return False
shellEscape :: String -> GHCi Bool
shellEscape str = liftIO (system str >> return False)
lookupCommand :: String -> GHCi (MaybeCommand)
lookupCommand "" = do
st <- getGHCiState
case last_command st of
Just c -> return $ GotCommand c
Nothing -> return NoLastCommand
lookupCommand str = do
mc <- lookupCommand' str
st <- getGHCiState
setGHCiState st{ last_command = mc }
return $ case mc of
Just c -> GotCommand c
Nothing -> BadCommand
lookupCommand' :: String -> GHCi (Maybe Command)
lookupCommand' ":" = return Nothing
lookupCommand' str' = do
macros <- liftIO $ readIORef macros_ref
ghci_cmds <- ghci_commands `fmap` getGHCiState
let (str, xcmds) = case str' of
':' : rest -> (rest, []) -- "::" selects a builtin command
_ -> (str', macros) -- otherwise include macros in lookup
lookupExact s = find $ (s ==) . cmdName
lookupPrefix s = find $ (s `isPrefixOf`) . cmdName
builtinPfxMatch = lookupPrefix str ghci_cmds
-- first, look for exact match (while preferring macros); then, look
-- for first prefix match (preferring builtins), *unless* a macro
-- overrides the builtin; see #8305 for motivation
return $ lookupExact str xcmds <|>
lookupExact str ghci_cmds <|>
(builtinPfxMatch >>= \c -> lookupExact (cmdName c) xcmds) <|>
builtinPfxMatch <|>
lookupPrefix str xcmds
getCurrentBreakSpan :: GHCi (Maybe SrcSpan)
getCurrentBreakSpan = do
resumes <- GHC.getResumeContext
case resumes of
[] -> return Nothing
(r:_) -> do
let ix = GHC.resumeHistoryIx r
if ix == 0
then return (Just (GHC.resumeSpan r))
else do
let hist = GHC.resumeHistory r !! (ix-1)
pan <- GHC.getHistorySpan hist
return (Just pan)
getCurrentBreakModule :: GHCi (Maybe Module)
getCurrentBreakModule = do
resumes <- GHC.getResumeContext
case resumes of
[] -> return Nothing
(r:_) -> do
let ix = GHC.resumeHistoryIx r
if ix == 0
then return (GHC.breakInfo_module `liftM` GHC.resumeBreakInfo r)
else do
let hist = GHC.resumeHistory r !! (ix-1)
return $ Just $ GHC.getHistoryModule hist
-----------------------------------------------------------------------------
--
-- Commands
--
-----------------------------------------------------------------------------
noArgs :: GHCi () -> String -> GHCi ()
noArgs m "" = m
noArgs _ _ = liftIO $ putStrLn "This command takes no arguments"
withSandboxOnly :: String -> GHCi () -> GHCi ()
withSandboxOnly cmd this = do
dflags <- getDynFlags
if not (gopt Opt_GhciSandbox dflags)
then printForUser (text cmd <+>
ptext (sLit "is not supported with -fno-ghci-sandbox"))
else this
-----------------------------------------------------------------------------
-- :help
help :: String -> GHCi ()
help _ = do
txt <- long_help `fmap` getGHCiState
liftIO $ putStr txt
-----------------------------------------------------------------------------
-- :info
info :: Bool -> String -> InputT GHCi ()
info _ "" = throwGhcException (CmdLineError "syntax: ':i <thing-you-want-info-about>'")
info allInfo s = handleSourceError GHC.printException $ do
unqual <- GHC.getPrintUnqual
dflags <- getDynFlags
sdocs <- mapM (infoThing allInfo) (words s)
mapM_ (liftIO . putStrLn . showSDocForUser dflags unqual) sdocs
infoThing :: GHC.GhcMonad m => Bool -> String -> m SDoc
infoThing allInfo str = do
names <- GHC.parseName str
mb_stuffs <- mapM (GHC.getInfo allInfo) names
let filtered = filterOutChildren (\(t,_f,_ci,_fi) -> t) (catMaybes mb_stuffs)
return $ vcat (intersperse (text "") $ map pprInfo filtered)
-- Filter out names whose parent is also there Good
-- example is '[]', which is both a type and data
-- constructor in the same type
filterOutChildren :: (a -> TyThing) -> [a] -> [a]
filterOutChildren get_thing xs
= filterOut has_parent xs
where
all_names = mkNameSet (map (getName . get_thing) xs)
has_parent x = case tyThingParent_maybe (get_thing x) of
Just p -> getName p `elemNameSet` all_names
Nothing -> False
pprInfo :: (TyThing, Fixity, [GHC.ClsInst], [GHC.FamInst]) -> SDoc
pprInfo (thing, fixity, cls_insts, fam_insts)
= pprTyThingInContextLoc thing
$$ show_fixity
$$ vcat (map GHC.pprInstance cls_insts)
$$ vcat (map GHC.pprFamInst fam_insts)
where
show_fixity
| fixity == GHC.defaultFixity = empty
| otherwise = ppr fixity <+> pprInfixName (GHC.getName thing)
-----------------------------------------------------------------------------
-- :main
runMain :: String -> GHCi ()
runMain s = case toArgs s of
Left err -> liftIO (hPutStrLn stderr err)
Right args ->
do dflags <- getDynFlags
let main = fromMaybe "main" (mainFunIs dflags)
-- Wrap the main function in 'void' to discard its value instead
-- of printing it (#9086). See Haskell 2010 report Chapter 5.
doWithArgs args $ "Control.Monad.void (" ++ main ++ ")"
-----------------------------------------------------------------------------
-- :run
runRun :: String -> GHCi ()
runRun s = case toCmdArgs s of
Left err -> liftIO (hPutStrLn stderr err)
Right (cmd, args) -> doWithArgs args cmd
doWithArgs :: [String] -> String -> GHCi ()
doWithArgs args cmd = enqueueCommands ["System.Environment.withArgs " ++
show args ++ " (" ++ cmd ++ ")"]
-----------------------------------------------------------------------------
-- :cd
changeDirectory :: String -> InputT GHCi ()
changeDirectory "" = do
-- :cd on its own changes to the user's home directory
either_dir <- liftIO $ tryIO getHomeDirectory
case either_dir of
Left _e -> return ()
Right dir -> changeDirectory dir
changeDirectory dir = do
graph <- GHC.getModuleGraph
when (not (null graph)) $
liftIO $ putStrLn "Warning: changing directory causes all loaded modules to be unloaded,\nbecause the search path has changed."
GHC.setTargets []
_ <- GHC.load LoadAllTargets
lift $ setContextAfterLoad False []
GHC.workingDirectoryChanged
dir' <- expandPath dir
liftIO $ setCurrentDirectory dir'
trySuccess :: GHC.GhcMonad m => m SuccessFlag -> m SuccessFlag
trySuccess act =
handleSourceError (\e -> do GHC.printException e
return Failed) $ do
act
-----------------------------------------------------------------------------
-- :edit
editFile :: String -> InputT GHCi ()
editFile str =
do file <- if null str then lift chooseEditFile else expandPath str
st <- lift getGHCiState
errs <- liftIO $ readIORef $ lastErrorLocations st
let cmd = editor st
when (null cmd)
$ throwGhcException (CmdLineError "editor not set, use :set editor")
lineOpt <- liftIO $ do
let sameFile p1 p2 = liftA2 (==) (canonicalizePath p1) (canonicalizePath p2)
`catchIO` (\_ -> return False)
curFileErrs <- filterM (\(f, _) -> unpackFS f `sameFile` file) errs
return $ case curFileErrs of
(_, line):_ -> " +" ++ show line
_ -> ""
let cmdArgs = ' ':(file ++ lineOpt)
code <- liftIO $ system (cmd ++ cmdArgs)
when (code == ExitSuccess)
$ reloadModule False ""
-- The user didn't specify a file so we pick one for them.
-- Our strategy is to pick the first module that failed to load,
-- or otherwise the first target.
--
-- XXX: Can we figure out what happened if the depndecy analysis fails
-- (e.g., because the porgrammeer mistyped the name of a module)?
-- XXX: Can we figure out the location of an error to pass to the editor?
-- XXX: if we could figure out the list of errors that occured during the
-- last load/reaload, then we could start the editor focused on the first
-- of those.
chooseEditFile :: GHCi String
chooseEditFile =
do let hasFailed x = fmap not $ GHC.isLoaded $ GHC.ms_mod_name x
graph <- GHC.getModuleGraph
failed_graph <- filterM hasFailed graph
let order g = flattenSCCs $ GHC.topSortModuleGraph True g Nothing
pick xs = case xs of
x : _ -> GHC.ml_hs_file (GHC.ms_location x)
_ -> Nothing
case pick (order failed_graph) of
Just file -> return file
Nothing ->
do targets <- GHC.getTargets
case msum (map fromTarget targets) of
Just file -> return file
Nothing -> throwGhcException (CmdLineError "No files to edit.")
where fromTarget (GHC.Target (GHC.TargetFile f _) _ _) = Just f
fromTarget _ = Nothing -- when would we get a module target?
-----------------------------------------------------------------------------
-- :def
defineMacro :: Bool{-overwrite-} -> String -> GHCi ()
defineMacro _ (':':_) =
liftIO $ putStrLn "macro name cannot start with a colon"
defineMacro overwrite s = do
let (macro_name, definition) = break isSpace s
macros <- liftIO (readIORef macros_ref)
let defined = map cmdName macros
if (null macro_name)
then if null defined
then liftIO $ putStrLn "no macros defined"
else liftIO $ putStr ("the following macros are defined:\n" ++
unlines defined)
else do
if (not overwrite && macro_name `elem` defined)
then throwGhcException (CmdLineError
("macro '" ++ macro_name ++ "' is already defined"))
else do
let filtered = [ cmd | cmd <- macros, cmdName cmd /= macro_name ]
-- compile the expression
handleSourceError GHC.printException $ do
step <- getGhciStepIO
expr <- GHC.parseExpr definition
-- > ghciStepIO . definition :: String -> IO String
let stringTy = nlHsTyVar stringTy_RDR
ioM = nlHsTyVar (getRdrName ioTyConName) `nlHsAppTy` stringTy
body = nlHsVar compose_RDR `mkHsApp` step `mkHsApp` expr
tySig = stringTy `nlHsFunTy` ioM
new_expr = L (getLoc expr) $ ExprWithTySig body tySig PlaceHolder
hv <- GHC.compileParsedExpr new_expr
liftIO (writeIORef macros_ref -- later defined macros have precedence
((macro_name, lift . runMacro hv, noCompletion) : filtered))
runMacro :: GHC.HValue{-String -> IO String-} -> String -> GHCi Bool
runMacro fun s = do
str <- liftIO ((unsafeCoerce# fun :: String -> IO String) s)
enqueueCommands (lines str)
return False
-----------------------------------------------------------------------------
-- :undef
undefineMacro :: String -> GHCi ()
undefineMacro str = mapM_ undef (words str)
where undef macro_name = do
cmds <- liftIO (readIORef macros_ref)
if (macro_name `notElem` map cmdName cmds)
then throwGhcException (CmdLineError
("macro '" ++ macro_name ++ "' is not defined"))
else do
liftIO (writeIORef macros_ref (filter ((/= macro_name) . cmdName) cmds))
-----------------------------------------------------------------------------
-- :cmd
cmdCmd :: String -> GHCi ()
cmdCmd str = handleSourceError GHC.printException $ do
step <- getGhciStepIO
expr <- GHC.parseExpr str
-- > ghciStepIO str :: IO String
let new_expr = step `mkHsApp` expr
hv <- GHC.compileParsedExpr new_expr
cmds <- liftIO $ (unsafeCoerce# hv :: IO String)
enqueueCommands (lines cmds)
-- | Generate a typed ghciStepIO expression
-- @ghciStepIO :: Ty String -> IO String@.
getGhciStepIO :: GHCi (LHsExpr RdrName)
getGhciStepIO = do
ghciTyConName <- GHC.getGHCiMonad
let stringTy = nlHsTyVar stringTy_RDR
ghciM = nlHsTyVar (getRdrName ghciTyConName) `nlHsAppTy` stringTy
ioM = nlHsTyVar (getRdrName ioTyConName) `nlHsAppTy` stringTy
body = nlHsVar (getRdrName ghciStepIoMName)
tySig = ghciM `nlHsFunTy` ioM
return $ noLoc $ ExprWithTySig body tySig PlaceHolder
-----------------------------------------------------------------------------
-- :check
checkModule :: String -> InputT GHCi ()
checkModule m = do
let modl = GHC.mkModuleName m
ok <- handleSourceError (\e -> GHC.printException e >> return False) $ do
r <- GHC.typecheckModule =<< GHC.parseModule =<< GHC.getModSummary modl
dflags <- getDynFlags
liftIO $ putStrLn $ showSDoc dflags $
case GHC.moduleInfo r of
cm | Just scope <- GHC.modInfoTopLevelScope cm ->
let
(loc, glob) = ASSERT( all isExternalName scope )
partition ((== modl) . GHC.moduleName . GHC.nameModule) scope
in
(text "global names: " <+> ppr glob) $$
(text "local names: " <+> ppr loc)
_ -> empty
return True
afterLoad (successIf ok) False
-----------------------------------------------------------------------------
-- :load, :add, :reload
-- | Sets '-fdefer-type-errors' if 'defer' is true, executes 'load' and unsets
-- '-fdefer-type-errors' again if it has not been set before
deferredLoad :: Bool -> InputT GHCi SuccessFlag -> InputT GHCi ()
deferredLoad defer load = do
flags <- getDynFlags
deferredBefore <- return (gopt Opt_DeferTypeErrors flags)
when (defer) $ Monad.void $
GHC.setProgramDynFlags $ gopt_set flags Opt_DeferTypeErrors
Monad.void $ load
flags <- getDynFlags
when (not deferredBefore) $ Monad.void $
GHC.setProgramDynFlags $ gopt_unset flags Opt_DeferTypeErrors
loadModule :: [(FilePath, Maybe Phase)] -> InputT GHCi SuccessFlag
loadModule fs = timeIt (const Nothing) (loadModule' fs)
loadModule_ :: Bool -> [FilePath] -> InputT GHCi ()
loadModule_ defer fs = deferredLoad defer (loadModule (zip fs (repeat Nothing)))
loadModule' :: [(FilePath, Maybe Phase)] -> InputT GHCi SuccessFlag
loadModule' files = do
let (filenames, phases) = unzip files
exp_filenames <- mapM expandPath filenames
let files' = zip exp_filenames phases
targets <- mapM (uncurry GHC.guessTarget) files'
-- NOTE: we used to do the dependency anal first, so that if it
-- fails we didn't throw away the current set of modules. This would
-- require some re-working of the GHC interface, so we'll leave it
-- as a ToDo for now.
-- unload first
_ <- GHC.abandonAll
lift discardActiveBreakPoints
GHC.setTargets []
_ <- GHC.load LoadAllTargets
GHC.setTargets targets
doLoad False LoadAllTargets
-- :add
addModule :: [FilePath] -> InputT GHCi ()
addModule files = do
lift revertCAFs -- always revert CAFs on load/add.
files' <- mapM expandPath files
targets <- mapM (\m -> GHC.guessTarget m Nothing) files'
-- remove old targets with the same id; e.g. for :add *M
mapM_ GHC.removeTarget [ tid | Target tid _ _ <- targets ]
mapM_ GHC.addTarget targets
_ <- doLoad False LoadAllTargets
return ()
-- :reload
reloadModule :: Bool -> String -> InputT GHCi ()
reloadModule defer m = deferredLoad defer load
where load = doLoad True $
if null m then LoadAllTargets else LoadUpTo (GHC.mkModuleName m)
doLoad :: Bool -> LoadHowMuch -> InputT GHCi SuccessFlag
doLoad retain_context howmuch = do
-- turn off breakpoints before we load: we can't turn them off later, because
-- the ModBreaks will have gone away.
lift discardActiveBreakPoints
lift resetLastErrorLocations
-- Enable buffering stdout and stderr as we're compiling. Keeping these
-- handles unbuffered will just slow the compilation down, especially when
-- compiling in parallel.
gbracket (liftIO $ do hSetBuffering stdout LineBuffering
hSetBuffering stderr LineBuffering)
(\_ ->
liftIO $ do hSetBuffering stdout NoBuffering
hSetBuffering stderr NoBuffering) $ \_ -> do
ok <- trySuccess $ GHC.load howmuch
afterLoad ok retain_context
return ok
afterLoad :: SuccessFlag
-> Bool -- keep the remembered_ctx, as far as possible (:reload)
-> InputT GHCi ()
afterLoad ok retain_context = do
lift revertCAFs -- always revert CAFs on load.
lift discardTickArrays
loaded_mod_summaries <- getLoadedModules
let loaded_mods = map GHC.ms_mod loaded_mod_summaries
modulesLoadedMsg ok loaded_mods
lift $ setContextAfterLoad retain_context loaded_mod_summaries
setContextAfterLoad :: Bool -> [GHC.ModSummary] -> GHCi ()
setContextAfterLoad keep_ctxt [] = do
setContextKeepingPackageModules keep_ctxt []
setContextAfterLoad keep_ctxt ms = do
-- load a target if one is available, otherwise load the topmost module.
targets <- GHC.getTargets
case [ m | Just m <- map (findTarget ms) targets ] of
[] ->
let graph' = flattenSCCs (GHC.topSortModuleGraph True ms Nothing) in
load_this (last graph')
(m:_) ->
load_this m
where
findTarget mds t
= case filter (`matches` t) mds of
[] -> Nothing
(m:_) -> Just m
summary `matches` Target (TargetModule m) _ _
= GHC.ms_mod_name summary == m
summary `matches` Target (TargetFile f _) _ _
| Just f' <- GHC.ml_hs_file (GHC.ms_location summary) = f == f'
_ `matches` _
= False
load_this summary | m <- GHC.ms_mod summary = do
is_interp <- GHC.moduleIsInterpreted m
dflags <- getDynFlags
let star_ok = is_interp && not (safeLanguageOn dflags)
-- We import the module with a * iff
-- - it is interpreted, and
-- - -XSafe is off (it doesn't allow *-imports)
let new_ctx | star_ok = [mkIIModule (GHC.moduleName m)]
| otherwise = [mkIIDecl (GHC.moduleName m)]
setContextKeepingPackageModules keep_ctxt new_ctx
-- | Keep any package modules (except Prelude) when changing the context.
setContextKeepingPackageModules
:: Bool -- True <=> keep all of remembered_ctx
-- False <=> just keep package imports
-> [InteractiveImport] -- new context
-> GHCi ()
setContextKeepingPackageModules keep_ctx trans_ctx = do
st <- getGHCiState
let rem_ctx = remembered_ctx st
new_rem_ctx <- if keep_ctx then return rem_ctx
else keepPackageImports rem_ctx
setGHCiState st{ remembered_ctx = new_rem_ctx,
transient_ctx = filterSubsumed new_rem_ctx trans_ctx }
setGHCContextFromGHCiState
-- | Filters a list of 'InteractiveImport', clearing out any home package
-- imports so only imports from external packages are preserved. ('IIModule'
-- counts as a home package import, because we are only able to bring a
-- full top-level into scope when the source is available.)
keepPackageImports :: [InteractiveImport] -> GHCi [InteractiveImport]
keepPackageImports = filterM is_pkg_import
where
is_pkg_import :: InteractiveImport -> GHCi Bool
is_pkg_import (IIModule _) = return False
is_pkg_import (IIDecl d)
= do e <- gtry $ GHC.findModule mod_name (fmap sl_fs $ ideclPkgQual d)
case e :: Either SomeException Module of
Left _ -> return False
Right m -> return (not (isHomeModule m))
where
mod_name = unLoc (ideclName d)
modulesLoadedMsg :: SuccessFlag -> [Module] -> InputT GHCi ()
modulesLoadedMsg ok mods = do
dflags <- getDynFlags
unqual <- GHC.getPrintUnqual
let mod_commas
| null mods = text "none."
| otherwise = hsep (
punctuate comma (map ppr mods)) <> text "."
status = case ok of
Failed -> text "Failed"
Succeeded -> text "Ok"
msg = status <> text ", modules loaded:" <+> mod_commas
when (verbosity dflags > 0) $
liftIO $ putStrLn $ showSDocForUser dflags unqual msg
-----------------------------------------------------------------------------
-- :type
typeOfExpr :: String -> InputT GHCi ()
typeOfExpr str
= handleSourceError GHC.printException
$ do
ty <- GHC.exprType str
printForUser $ sep [text str, nest 2 (dcolon <+> pprTypeForUser ty)]
-----------------------------------------------------------------------------
-- :kind
kindOfType :: Bool -> String -> InputT GHCi ()
kindOfType norm str
= handleSourceError GHC.printException
$ do
(ty, kind) <- GHC.typeKind norm str
printForUser $ vcat [ text str <+> dcolon <+> pprTypeForUser kind
, ppWhen norm $ equals <+> pprTypeForUser ty ]
-----------------------------------------------------------------------------
-- :quit
quit :: String -> InputT GHCi Bool
quit _ = return True
-----------------------------------------------------------------------------
-- :script
-- running a script file #1363
scriptCmd :: String -> InputT GHCi ()
scriptCmd ws = do
case words ws of
[s] -> runScript s
_ -> throwGhcException (CmdLineError "syntax: :script <filename>")
runScript :: String -- ^ filename
-> InputT GHCi ()
runScript filename = do
filename' <- expandPath filename
either_script <- liftIO $ tryIO (openFile filename' ReadMode)
case either_script of
Left _err -> throwGhcException (CmdLineError $ "IO error: \""++filename++"\" "
++(ioeGetErrorString _err))
Right script -> do
st <- lift $ getGHCiState
let prog = progname st
line = line_number st
lift $ setGHCiState st{progname=filename',line_number=0}
scriptLoop script
liftIO $ hClose script
new_st <- lift $ getGHCiState
lift $ setGHCiState new_st{progname=prog,line_number=line}
where scriptLoop script = do
res <- runOneCommand handler $ fileLoop script
case res of
Nothing -> return ()
Just s -> if s
then scriptLoop script
else return ()
-----------------------------------------------------------------------------
-- :issafe
-- Displaying Safe Haskell properties of a module
isSafeCmd :: String -> InputT GHCi ()
isSafeCmd m =
case words m of
[s] | looksLikeModuleName s -> do
md <- lift $ lookupModule s
isSafeModule md
[] -> do md <- guessCurrentModule "issafe"
isSafeModule md
_ -> throwGhcException (CmdLineError "syntax: :issafe <module>")
isSafeModule :: Module -> InputT GHCi ()
isSafeModule m = do
mb_mod_info <- GHC.getModuleInfo m
when (isNothing mb_mod_info)
(throwGhcException $ CmdLineError $ "unknown module: " ++ mname)
dflags <- getDynFlags
let iface = GHC.modInfoIface $ fromJust mb_mod_info
when (isNothing iface)
(throwGhcException $ CmdLineError $ "can't load interface file for module: " ++
(GHC.moduleNameString $ GHC.moduleName m))
(msafe, pkgs) <- GHC.moduleTrustReqs m
let trust = showPpr dflags $ getSafeMode $ GHC.mi_trust $ fromJust iface
pkg = if packageTrusted dflags m then "trusted" else "untrusted"
(good, bad) = tallyPkgs dflags pkgs
-- print info to user...
liftIO $ putStrLn $ "Trust type is (Module: " ++ trust ++ ", Package: " ++ pkg ++ ")"
liftIO $ putStrLn $ "Package Trust: " ++ (if packageTrustOn dflags then "On" else "Off")
when (not $ null good)
(liftIO $ putStrLn $ "Trusted package dependencies (trusted): " ++
(intercalate ", " $ map (showPpr dflags) good))
case msafe && null bad of
True -> liftIO $ putStrLn $ mname ++ " is trusted!"
False -> do
when (not $ null bad)
(liftIO $ putStrLn $ "Trusted package dependencies (untrusted): "
++ (intercalate ", " $ map (showPpr dflags) bad))
liftIO $ putStrLn $ mname ++ " is NOT trusted!"
where
mname = GHC.moduleNameString $ GHC.moduleName m
packageTrusted dflags md
| thisPackage dflags == moduleUnitId md = True
| otherwise = trusted $ getPackageDetails dflags (moduleUnitId md)
tallyPkgs dflags deps | not (packageTrustOn dflags) = ([], [])
| otherwise = partition part deps
where part pkg = trusted $ getPackageDetails dflags pkg
-----------------------------------------------------------------------------
-- :browse
-- Browsing a module's contents
browseCmd :: Bool -> String -> InputT GHCi ()
browseCmd bang m =
case words m of
['*':s] | looksLikeModuleName s -> do
md <- lift $ wantInterpretedModule s
browseModule bang md False
[s] | looksLikeModuleName s -> do
md <- lift $ lookupModule s
browseModule bang md True
[] -> do md <- guessCurrentModule ("browse" ++ if bang then "!" else "")
browseModule bang md True
_ -> throwGhcException (CmdLineError "syntax: :browse <module>")
guessCurrentModule :: String -> InputT GHCi Module
-- Guess which module the user wants to browse. Pick
-- modules that are interpreted first. The most
-- recently-added module occurs last, it seems.
guessCurrentModule cmd
= do imports <- GHC.getContext
when (null imports) $ throwGhcException $
CmdLineError (':' : cmd ++ ": no current module")
case (head imports) of
IIModule m -> GHC.findModule m Nothing
IIDecl d -> GHC.findModule (unLoc (ideclName d))
(fmap sl_fs $ ideclPkgQual d)
-- without bang, show items in context of their parents and omit children
-- with bang, show class methods and data constructors separately, and
-- indicate import modules, to aid qualifying unqualified names
-- with sorted, sort items alphabetically
browseModule :: Bool -> Module -> Bool -> InputT GHCi ()
browseModule bang modl exports_only = do
-- :browse reports qualifiers wrt current context
unqual <- GHC.getPrintUnqual
mb_mod_info <- GHC.getModuleInfo modl
case mb_mod_info of
Nothing -> throwGhcException (CmdLineError ("unknown module: " ++
GHC.moduleNameString (GHC.moduleName modl)))
Just mod_info -> do
dflags <- getDynFlags
let names
| exports_only = GHC.modInfoExports mod_info
| otherwise = GHC.modInfoTopLevelScope mod_info
`orElse` []
-- sort alphabetically name, but putting locally-defined
-- identifiers first. We would like to improve this; see #1799.
sorted_names = loc_sort local ++ occ_sort external
where
(local,external) = ASSERT( all isExternalName names )
partition ((==modl) . nameModule) names
occ_sort = sortBy (compare `on` nameOccName)
-- try to sort by src location. If the first name in our list
-- has a good source location, then they all should.
loc_sort ns
| n:_ <- ns, isGoodSrcSpan (nameSrcSpan n)
= sortBy (compare `on` nameSrcSpan) ns
| otherwise
= occ_sort ns
mb_things <- mapM GHC.lookupName sorted_names
let filtered_things = filterOutChildren (\t -> t) (catMaybes mb_things)
rdr_env <- GHC.getGRE
let things | bang = catMaybes mb_things
| otherwise = filtered_things
pretty | bang = pprTyThing
| otherwise = pprTyThingInContext
labels [] = text "-- not currently imported"
labels l = text $ intercalate "\n" $ map qualifier l
qualifier :: Maybe [ModuleName] -> String
qualifier = maybe "-- defined locally"
(("-- imported via "++) . intercalate ", "
. map GHC.moduleNameString)
importInfo = RdrName.getGRE_NameQualifier_maybes rdr_env
modNames :: [[Maybe [ModuleName]]]
modNames = map (importInfo . GHC.getName) things
-- annotate groups of imports with their import modules
-- the default ordering is somewhat arbitrary, so we group
-- by header and sort groups; the names themselves should
-- really come in order of source appearance.. (trac #1799)
annotate mts = concatMap (\(m,ts)->labels m:ts)
$ sortBy cmpQualifiers $ grp mts
where cmpQualifiers =
compare `on` (map (fmap (map moduleNameFS)) . fst)
grp [] = []
grp mts@((m,_):_) = (m,map snd g) : grp ng
where (g,ng) = partition ((==m).fst) mts
let prettyThings, prettyThings' :: [SDoc]
prettyThings = map pretty things
prettyThings' | bang = annotate $ zip modNames prettyThings
| otherwise = prettyThings
liftIO $ putStrLn $ showSDocForUser dflags unqual (vcat prettyThings')
-- ToDo: modInfoInstances currently throws an exception for
-- package modules. When it works, we can do this:
-- $$ vcat (map GHC.pprInstance (GHC.modInfoInstances mod_info))
-----------------------------------------------------------------------------
-- :module
-- Setting the module context. For details on context handling see
-- "remembered_ctx" and "transient_ctx" in GhciMonad.
moduleCmd :: String -> GHCi ()
moduleCmd str
| all sensible strs = cmd
| otherwise = throwGhcException (CmdLineError "syntax: :module [+/-] [*]M1 ... [*]Mn")
where
(cmd, strs) =
case str of
'+':stuff -> rest addModulesToContext stuff
'-':stuff -> rest remModulesFromContext stuff
stuff -> rest setContext stuff
rest op stuff = (op as bs, stuffs)
where (as,bs) = partitionWith starred stuffs
stuffs = words stuff
sensible ('*':m) = looksLikeModuleName m
sensible m = looksLikeModuleName m
starred ('*':m) = Left (GHC.mkModuleName m)
starred m = Right (GHC.mkModuleName m)
-- -----------------------------------------------------------------------------
-- Four ways to manipulate the context:
-- (a) :module +<stuff>: addModulesToContext
-- (b) :module -<stuff>: remModulesFromContext
-- (c) :module <stuff>: setContext
-- (d) import <module>...: addImportToContext
addModulesToContext :: [ModuleName] -> [ModuleName] -> GHCi ()
addModulesToContext starred unstarred = restoreContextOnFailure $ do
addModulesToContext_ starred unstarred
addModulesToContext_ :: [ModuleName] -> [ModuleName] -> GHCi ()
addModulesToContext_ starred unstarred = do
mapM_ addII (map mkIIModule starred ++ map mkIIDecl unstarred)
setGHCContextFromGHCiState
remModulesFromContext :: [ModuleName] -> [ModuleName] -> GHCi ()
remModulesFromContext starred unstarred = do
-- we do *not* call restoreContextOnFailure here. If the user
-- is trying to fix up a context that contains errors by removing
-- modules, we don't want GHC to silently put them back in again.
mapM_ rm (starred ++ unstarred)
setGHCContextFromGHCiState
where
rm :: ModuleName -> GHCi ()
rm str = do
m <- moduleName <$> lookupModuleName str
let filt = filter ((/=) m . iiModuleName)
modifyGHCiState $ \st ->
st { remembered_ctx = filt (remembered_ctx st)
, transient_ctx = filt (transient_ctx st) }
setContext :: [ModuleName] -> [ModuleName] -> GHCi ()
setContext starred unstarred = restoreContextOnFailure $ do
modifyGHCiState $ \st -> st { remembered_ctx = [], transient_ctx = [] }
-- delete the transient context
addModulesToContext_ starred unstarred
addImportToContext :: String -> GHCi ()
addImportToContext str = restoreContextOnFailure $ do
idecl <- GHC.parseImportDecl str
addII (IIDecl idecl) -- #5836
setGHCContextFromGHCiState
-- Util used by addImportToContext and addModulesToContext
addII :: InteractiveImport -> GHCi ()
addII iidecl = do
checkAdd iidecl
modifyGHCiState $ \st ->
st { remembered_ctx = addNotSubsumed iidecl (remembered_ctx st)
, transient_ctx = filter (not . (iidecl `iiSubsumes`))
(transient_ctx st)
}
-- Sometimes we can't tell whether an import is valid or not until
-- we finally call 'GHC.setContext'. e.g.
--
-- import System.IO (foo)
--
-- will fail because System.IO does not export foo. In this case we
-- don't want to store the import in the context permanently, so we
-- catch the failure from 'setGHCContextFromGHCiState' and set the
-- context back to what it was.
--
-- See #6007
--
restoreContextOnFailure :: GHCi a -> GHCi a
restoreContextOnFailure do_this = do
st <- getGHCiState
let rc = remembered_ctx st; tc = transient_ctx st
do_this `gonException` (modifyGHCiState $ \st' ->
st' { remembered_ctx = rc, transient_ctx = tc })
-- -----------------------------------------------------------------------------
-- Validate a module that we want to add to the context
checkAdd :: InteractiveImport -> GHCi ()
checkAdd ii = do
dflags <- getDynFlags
let safe = safeLanguageOn dflags
case ii of
IIModule modname
| safe -> throwGhcException $ CmdLineError "can't use * imports with Safe Haskell"
| otherwise -> wantInterpretedModuleName modname >> return ()
IIDecl d -> do
let modname = unLoc (ideclName d)
pkgqual = ideclPkgQual d
m <- GHC.lookupModule modname (fmap sl_fs pkgqual)
when safe $ do
t <- GHC.isModuleTrusted m
when (not t) $ throwGhcException $ ProgramError $ ""
-- -----------------------------------------------------------------------------
-- Update the GHC API's view of the context
-- | Sets the GHC context from the GHCi state. The GHC context is
-- always set this way, we never modify it incrementally.
--
-- We ignore any imports for which the ModuleName does not currently
-- exist. This is so that the remembered_ctx can contain imports for
-- modules that are not currently loaded, perhaps because we just did
-- a :reload and encountered errors.
--
-- Prelude is added if not already present in the list. Therefore to
-- override the implicit Prelude import you can say 'import Prelude ()'
-- at the prompt, just as in Haskell source.
--
setGHCContextFromGHCiState :: GHCi ()
setGHCContextFromGHCiState = do
st <- getGHCiState
-- re-use checkAdd to check whether the module is valid. If the
-- module does not exist, we do *not* want to print an error
-- here, we just want to silently keep the module in the context
-- until such time as the module reappears again. So we ignore
-- the actual exception thrown by checkAdd, using tryBool to
-- turn it into a Bool.
iidecls <- filterM (tryBool.checkAdd) (transient_ctx st ++ remembered_ctx st)
dflags <- GHC.getSessionDynFlags
GHC.setContext $
if xopt Opt_ImplicitPrelude dflags && not (any isPreludeImport iidecls)
then iidecls ++ [implicitPreludeImport]
else iidecls
-- XXX put prel at the end, so that guessCurrentModule doesn't pick it up.
-- -----------------------------------------------------------------------------
-- Utils on InteractiveImport
mkIIModule :: ModuleName -> InteractiveImport
mkIIModule = IIModule
mkIIDecl :: ModuleName -> InteractiveImport
mkIIDecl = IIDecl . simpleImportDecl
iiModules :: [InteractiveImport] -> [ModuleName]
iiModules is = [m | IIModule m <- is]
iiModuleName :: InteractiveImport -> ModuleName
iiModuleName (IIModule m) = m
iiModuleName (IIDecl d) = unLoc (ideclName d)
preludeModuleName :: ModuleName
preludeModuleName = GHC.mkModuleName "Prelude"
implicitPreludeImport :: InteractiveImport
implicitPreludeImport = IIDecl (simpleImportDecl preludeModuleName)
isPreludeImport :: InteractiveImport -> Bool
isPreludeImport (IIModule {}) = True
isPreludeImport (IIDecl d) = unLoc (ideclName d) == preludeModuleName
addNotSubsumed :: InteractiveImport
-> [InteractiveImport] -> [InteractiveImport]
addNotSubsumed i is
| any (`iiSubsumes` i) is = is
| otherwise = i : filter (not . (i `iiSubsumes`)) is
-- | @filterSubsumed is js@ returns the elements of @js@ not subsumed
-- by any of @is@.
filterSubsumed :: [InteractiveImport] -> [InteractiveImport]
-> [InteractiveImport]
filterSubsumed is js = filter (\j -> not (any (`iiSubsumes` j) is)) js
-- | Returns True if the left import subsumes the right one. Doesn't
-- need to be 100% accurate, conservatively returning False is fine.
-- (EXCEPT: (IIModule m) *must* subsume itself, otherwise a panic in
-- plusProv will ensue (#5904))
--
-- Note that an IIModule does not necessarily subsume an IIDecl,
-- because e.g. a module might export a name that is only available
-- qualified within the module itself.
--
-- Note that 'import M' does not necessarily subsume 'import M(foo)',
-- because M might not export foo and we want an error to be produced
-- in that case.
--
iiSubsumes :: InteractiveImport -> InteractiveImport -> Bool
iiSubsumes (IIModule m1) (IIModule m2) = m1==m2
iiSubsumes (IIDecl d1) (IIDecl d2) -- A bit crude
= unLoc (ideclName d1) == unLoc (ideclName d2)
&& ideclAs d1 == ideclAs d2
&& (not (ideclQualified d1) || ideclQualified d2)
&& (ideclHiding d1 `hidingSubsumes` ideclHiding d2)
where
_ `hidingSubsumes` Just (False,L _ []) = True
Just (False, L _ xs) `hidingSubsumes` Just (False,L _ ys)
= all (`elem` xs) ys
h1 `hidingSubsumes` h2 = h1 == h2
iiSubsumes _ _ = False
----------------------------------------------------------------------------
-- :set
-- set options in the interpreter. Syntax is exactly the same as the
-- ghc command line, except that certain options aren't available (-C,
-- -E etc.)
--
-- This is pretty fragile: most options won't work as expected. ToDo:
-- figure out which ones & disallow them.
setCmd :: String -> GHCi ()
setCmd "" = showOptions False
setCmd "-a" = showOptions True
setCmd str
= case getCmd str of
Right ("args", rest) ->
case toArgs rest of
Left err -> liftIO (hPutStrLn stderr err)
Right args -> setArgs args
Right ("prog", rest) ->
case toArgs rest of
Right [prog] -> setProg prog
_ -> liftIO (hPutStrLn stderr "syntax: :set prog <progname>")
Right ("prompt", rest) -> setPrompt $ dropWhile isSpace rest
Right ("prompt2", rest) -> setPrompt2 $ dropWhile isSpace rest
Right ("editor", rest) -> setEditor $ dropWhile isSpace rest
Right ("stop", rest) -> setStop $ dropWhile isSpace rest
_ -> case toArgs str of
Left err -> liftIO (hPutStrLn stderr err)
Right wds -> setOptions wds
setiCmd :: String -> GHCi ()
setiCmd "" = GHC.getInteractiveDynFlags >>= liftIO . showDynFlags False
setiCmd "-a" = GHC.getInteractiveDynFlags >>= liftIO . showDynFlags True
setiCmd str =
case toArgs str of
Left err -> liftIO (hPutStrLn stderr err)
Right wds -> newDynFlags True wds
showOptions :: Bool -> GHCi ()
showOptions show_all
= do st <- getGHCiState
dflags <- getDynFlags
let opts = options st
liftIO $ putStrLn (showSDoc dflags (
text "options currently set: " <>
if null opts
then text "none."
else hsep (map (\o -> char '+' <> text (optToStr o)) opts)
))
getDynFlags >>= liftIO . showDynFlags show_all
showDynFlags :: Bool -> DynFlags -> IO ()
showDynFlags show_all dflags = do
showLanguages' show_all dflags
putStrLn $ showSDoc dflags $
text "GHCi-specific dynamic flag settings:" $$
nest 2 (vcat (map (setting gopt) ghciFlags))
putStrLn $ showSDoc dflags $
text "other dynamic, non-language, flag settings:" $$
nest 2 (vcat (map (setting gopt) others))
putStrLn $ showSDoc dflags $
text "warning settings:" $$
nest 2 (vcat (map (setting wopt) DynFlags.fWarningFlags))
where
setting test flag
| quiet = empty
| is_on = fstr name
| otherwise = fnostr name
where name = flagSpecName flag
f = flagSpecFlag flag
is_on = test f dflags
quiet = not show_all && test f default_dflags == is_on
default_dflags = defaultDynFlags (settings dflags)
fstr str = text "-f" <> text str
fnostr str = text "-fno-" <> text str
(ghciFlags,others) = partition (\f -> flagSpecFlag f `elem` flgs)
DynFlags.fFlags
flgs = [ Opt_PrintExplicitForalls
, Opt_PrintExplicitKinds
, Opt_PrintUnicodeSyntax
, Opt_PrintBindResult
, Opt_BreakOnException
, Opt_BreakOnError
, Opt_PrintEvldWithShow
]
setArgs, setOptions :: [String] -> GHCi ()
setProg, setEditor, setStop :: String -> GHCi ()
setArgs args = do
st <- getGHCiState
setGHCiState st{ GhciMonad.args = args }
setProg prog = do
st <- getGHCiState
setGHCiState st{ progname = prog }
setEditor cmd = do
st <- getGHCiState
setGHCiState st{ editor = cmd }
setStop str@(c:_) | isDigit c
= do let (nm_str,rest) = break (not.isDigit) str
nm = read nm_str
st <- getGHCiState
let old_breaks = breaks st
if all ((/= nm) . fst) old_breaks
then printForUser (text "Breakpoint" <+> ppr nm <+>
text "does not exist")
else do
let new_breaks = map fn old_breaks
fn (i,loc) | i == nm = (i,loc { onBreakCmd = dropWhile isSpace rest })
| otherwise = (i,loc)
setGHCiState st{ breaks = new_breaks }
setStop cmd = do
st <- getGHCiState
setGHCiState st{ stop = cmd }
setPrompt :: String -> GHCi ()
setPrompt = setPrompt_ f err
where
f v st = st { prompt = v }
err st = "syntax: :set prompt <prompt>, currently \"" ++ prompt st ++ "\""
setPrompt2 :: String -> GHCi ()
setPrompt2 = setPrompt_ f err
where
f v st = st { prompt2 = v }
err st = "syntax: :set prompt2 <prompt>, currently \"" ++ prompt2 st ++ "\""
setPrompt_ :: (String -> GHCiState -> GHCiState) -> (GHCiState -> String) -> String -> GHCi ()
setPrompt_ f err value = do
st <- getGHCiState
if null value
then liftIO $ hPutStrLn stderr $ err st
else case value of
'\"' : _ -> case reads value of
[(value', xs)] | all isSpace xs ->
setGHCiState $ f value' st
_ ->
liftIO $ hPutStrLn stderr "Can't parse prompt string. Use Haskell syntax."
_ -> setGHCiState $ f value st
setOptions wds =
do -- first, deal with the GHCi opts (+s, +t, etc.)
let (plus_opts, minus_opts) = partitionWith isPlus wds
mapM_ setOpt plus_opts
-- then, dynamic flags
newDynFlags False minus_opts
newDynFlags :: Bool -> [String] -> GHCi ()
newDynFlags interactive_only minus_opts = do
let lopts = map noLoc minus_opts
idflags0 <- GHC.getInteractiveDynFlags
(idflags1, leftovers, warns) <- GHC.parseDynamicFlags idflags0 lopts
liftIO $ handleFlagWarnings idflags1 warns
when (not $ null leftovers)
(throwGhcException . CmdLineError
$ "Some flags have not been recognized: "
++ (concat . intersperse ", " $ map unLoc leftovers))
when (interactive_only &&
packageFlags idflags1 /= packageFlags idflags0) $ do
liftIO $ hPutStrLn stderr "cannot set package flags with :seti; use :set"
GHC.setInteractiveDynFlags idflags1
installInteractivePrint (interactivePrint idflags1) False
dflags0 <- getDynFlags
when (not interactive_only) $ do
(dflags1, _, _) <- liftIO $ GHC.parseDynamicFlags dflags0 lopts
new_pkgs <- GHC.setProgramDynFlags dflags1
-- if the package flags changed, reset the context and link
-- the new packages.
dflags2 <- getDynFlags
when (packageFlags dflags2 /= packageFlags dflags0) $ do
when (verbosity dflags2 > 0) $
liftIO . putStrLn $
"package flags have changed, resetting and loading new packages..."
GHC.setTargets []
_ <- GHC.load LoadAllTargets
liftIO $ linkPackages dflags2 new_pkgs
-- package flags changed, we can't re-use any of the old context
setContextAfterLoad False []
-- and copy the package state to the interactive DynFlags
idflags <- GHC.getInteractiveDynFlags
GHC.setInteractiveDynFlags
idflags{ pkgState = pkgState dflags2
, pkgDatabase = pkgDatabase dflags2
, packageFlags = packageFlags dflags2 }
let ld0length = length $ ldInputs dflags0
fmrk0length = length $ cmdlineFrameworks dflags0
newLdInputs = drop ld0length (ldInputs dflags2)
newCLFrameworks = drop fmrk0length (cmdlineFrameworks dflags2)
when (not (null newLdInputs && null newCLFrameworks)) $
liftIO $ linkCmdLineLibs $
dflags2 { ldInputs = newLdInputs
, cmdlineFrameworks = newCLFrameworks }
return ()
unsetOptions :: String -> GHCi ()
unsetOptions str
= -- first, deal with the GHCi opts (+s, +t, etc.)
let opts = words str
(minus_opts, rest1) = partition isMinus opts
(plus_opts, rest2) = partitionWith isPlus rest1
(other_opts, rest3) = partition (`elem` map fst defaulters) rest2
defaulters =
[ ("args" , setArgs default_args)
, ("prog" , setProg default_progname)
, ("prompt" , setPrompt default_prompt)
, ("prompt2", setPrompt2 default_prompt2)
, ("editor" , liftIO findEditor >>= setEditor)
, ("stop" , setStop default_stop)
]
no_flag ('-':'f':rest) = return ("-fno-" ++ rest)
no_flag ('-':'X':rest) = return ("-XNo" ++ rest)
no_flag f = throwGhcException (ProgramError ("don't know how to reverse " ++ f))
in if (not (null rest3))
then liftIO (putStrLn ("unknown option: '" ++ head rest3 ++ "'"))
else do
mapM_ (fromJust.flip lookup defaulters) other_opts
mapM_ unsetOpt plus_opts
no_flags <- mapM no_flag minus_opts
newDynFlags False no_flags
isMinus :: String -> Bool
isMinus ('-':_) = True
isMinus _ = False
isPlus :: String -> Either String String
isPlus ('+':opt) = Left opt
isPlus other = Right other
setOpt, unsetOpt :: String -> GHCi ()
setOpt str
= case strToGHCiOpt str of
Nothing -> liftIO (putStrLn ("unknown option: '" ++ str ++ "'"))
Just o -> setOption o
unsetOpt str
= case strToGHCiOpt str of
Nothing -> liftIO (putStrLn ("unknown option: '" ++ str ++ "'"))
Just o -> unsetOption o
strToGHCiOpt :: String -> (Maybe GHCiOption)
strToGHCiOpt "m" = Just Multiline
strToGHCiOpt "s" = Just ShowTiming
strToGHCiOpt "t" = Just ShowType
strToGHCiOpt "r" = Just RevertCAFs
strToGHCiOpt _ = Nothing
optToStr :: GHCiOption -> String
optToStr Multiline = "m"
optToStr ShowTiming = "s"
optToStr ShowType = "t"
optToStr RevertCAFs = "r"
-- ---------------------------------------------------------------------------
-- :show
showCmd :: String -> GHCi ()
showCmd "" = showOptions False
showCmd "-a" = showOptions True
showCmd str = do
st <- getGHCiState
case words str of
["args"] -> liftIO $ putStrLn (show (GhciMonad.args st))
["prog"] -> liftIO $ putStrLn (show (progname st))
["prompt"] -> liftIO $ putStrLn (show (prompt st))
["prompt2"] -> liftIO $ putStrLn (show (prompt2 st))
["editor"] -> liftIO $ putStrLn (show (editor st))
["stop"] -> liftIO $ putStrLn (show (stop st))
["imports"] -> showImports
["modules" ] -> showModules
["bindings"] -> showBindings
["linker"] ->
do dflags <- getDynFlags
liftIO $ showLinkerState dflags
["breaks"] -> showBkptTable
["context"] -> showContext
["packages"] -> showPackages
["paths"] -> showPaths
["languages"] -> showLanguages -- backwards compat
["language"] -> showLanguages
["lang"] -> showLanguages -- useful abbreviation
_ -> throwGhcException (CmdLineError ("syntax: :show [ args | prog | prompt | prompt2 | editor | stop | modules\n" ++
" | bindings | breaks | context | packages | language ]"))
showiCmd :: String -> GHCi ()
showiCmd str = do
case words str of
["languages"] -> showiLanguages -- backwards compat
["language"] -> showiLanguages
["lang"] -> showiLanguages -- useful abbreviation
_ -> throwGhcException (CmdLineError ("syntax: :showi language"))
showImports :: GHCi ()
showImports = do
st <- getGHCiState
dflags <- getDynFlags
let rem_ctx = reverse (remembered_ctx st)
trans_ctx = transient_ctx st
show_one (IIModule star_m)
= ":module +*" ++ moduleNameString star_m
show_one (IIDecl imp) = showPpr dflags imp
prel_imp
| any isPreludeImport (rem_ctx ++ trans_ctx) = []
| not (xopt Opt_ImplicitPrelude dflags) = []
| otherwise = ["import Prelude -- implicit"]
trans_comment s = s ++ " -- added automatically"
--
liftIO $ mapM_ putStrLn (prel_imp ++ map show_one rem_ctx
++ map (trans_comment . show_one) trans_ctx)
showModules :: GHCi ()
showModules = do
loaded_mods <- getLoadedModules
-- we want *loaded* modules only, see #1734
let show_one ms = do m <- GHC.showModule ms; liftIO (putStrLn m)
mapM_ show_one loaded_mods
getLoadedModules :: GHC.GhcMonad m => m [GHC.ModSummary]
getLoadedModules = do
graph <- GHC.getModuleGraph
filterM (GHC.isLoaded . GHC.ms_mod_name) graph
showBindings :: GHCi ()
showBindings = do
bindings <- GHC.getBindings
(insts, finsts) <- GHC.getInsts
docs <- mapM makeDoc (reverse bindings)
-- reverse so the new ones come last
let idocs = map GHC.pprInstanceHdr insts
fidocs = map GHC.pprFamInst finsts
mapM_ printForUserPartWay (docs ++ idocs ++ fidocs)
where
makeDoc (AnId i) = pprTypeAndContents i
makeDoc tt = do
mb_stuff <- GHC.getInfo False (getName tt)
return $ maybe (text "") pprTT mb_stuff
pprTT :: (TyThing, Fixity, [GHC.ClsInst], [GHC.FamInst]) -> SDoc
pprTT (thing, fixity, _cls_insts, _fam_insts)
= pprTyThing thing
$$ show_fixity
where
show_fixity
| fixity == GHC.defaultFixity = empty
| otherwise = ppr fixity <+> ppr (GHC.getName thing)
printTyThing :: TyThing -> GHCi ()
printTyThing tyth = printForUser (pprTyThing tyth)
showBkptTable :: GHCi ()
showBkptTable = do
st <- getGHCiState
printForUser $ prettyLocations (breaks st)
showContext :: GHCi ()
showContext = do
resumes <- GHC.getResumeContext
printForUser $ vcat (map pp_resume (reverse resumes))
where
pp_resume res =
ptext (sLit "--> ") <> text (GHC.resumeStmt res)
$$ nest 2 (ptext (sLit "Stopped at") <+> ppr (GHC.resumeSpan res))
showPackages :: GHCi ()
showPackages = do
dflags <- getDynFlags
let pkg_flags = packageFlags dflags
liftIO $ putStrLn $ showSDoc dflags $
text ("active package flags:"++if null pkg_flags then " none" else "") $$
nest 2 (vcat (map pprFlag pkg_flags))
showPaths :: GHCi ()
showPaths = do
dflags <- getDynFlags
liftIO $ do
cwd <- getCurrentDirectory
putStrLn $ showSDoc dflags $
text "current working directory: " $$
nest 2 (text cwd)
let ipaths = importPaths dflags
putStrLn $ showSDoc dflags $
text ("module import search paths:"++if null ipaths then " none" else "") $$
nest 2 (vcat (map text ipaths))
showLanguages :: GHCi ()
showLanguages = getDynFlags >>= liftIO . showLanguages' False
showiLanguages :: GHCi ()
showiLanguages = GHC.getInteractiveDynFlags >>= liftIO . showLanguages' False
showLanguages' :: Bool -> DynFlags -> IO ()
showLanguages' show_all dflags =
putStrLn $ showSDoc dflags $ vcat
[ text "base language is: " <>
case language dflags of
Nothing -> text "Haskell2010"
Just Haskell98 -> text "Haskell98"
Just Haskell2010 -> text "Haskell2010"
, (if show_all then text "all active language options:"
else text "with the following modifiers:") $$
nest 2 (vcat (map (setting xopt) DynFlags.xFlags))
]
where
setting test flag
| quiet = empty
| is_on = text "-X" <> text name
| otherwise = text "-XNo" <> text name
where name = flagSpecName flag
f = flagSpecFlag flag
is_on = test f dflags
quiet = not show_all && test f default_dflags == is_on
default_dflags =
defaultDynFlags (settings dflags) `lang_set`
case language dflags of
Nothing -> Just Haskell2010
other -> other
-- -----------------------------------------------------------------------------
-- Completion
completeCmd :: String -> GHCi ()
completeCmd argLine0 = case parseLine argLine0 of
Just ("repl", resultRange, left) -> do
(unusedLine,compls) <- ghciCompleteWord (reverse left,"")
let compls' = takeRange resultRange compls
liftIO . putStrLn $ unwords [ show (length compls'), show (length compls), show (reverse unusedLine) ]
forM_ (takeRange resultRange compls) $ \(Completion r _ _) -> do
liftIO $ print r
_ -> throwGhcException (CmdLineError "Syntax: :complete repl [<range>] <quoted-string-to-complete>")
where
parseLine argLine
| null argLine = Nothing
| null rest1 = Nothing
| otherwise = (,,) dom <$> resRange <*> s
where
(dom, rest1) = breakSpace argLine
(rng, rest2) = breakSpace rest1
resRange | head rest1 == '"' = parseRange ""
| otherwise = parseRange rng
s | head rest1 == '"' = readMaybe rest1 :: Maybe String
| otherwise = readMaybe rest2
breakSpace = fmap (dropWhile isSpace) . break isSpace
takeRange (lb,ub) = maybe id (drop . pred) lb . maybe id take ub
-- syntax: [n-][m] with semantics "drop (n-1) . take m"
parseRange :: String -> Maybe (Maybe Int,Maybe Int)
parseRange s = case span isDigit s of
(_, "") ->
-- upper limit only
Just (Nothing, bndRead s)
(s1, '-' : s2)
| all isDigit s2 ->
Just (bndRead s1, bndRead s2)
_ ->
Nothing
where
bndRead x = if null x then Nothing else Just (read x)
completeGhciCommand, completeMacro, completeIdentifier, completeModule,
completeSetModule, completeSeti, completeShowiOptions,
completeHomeModule, completeSetOptions, completeShowOptions,
completeHomeModuleOrFile, completeExpression
:: CompletionFunc GHCi
ghciCompleteWord :: CompletionFunc GHCi
ghciCompleteWord line@(left,_) = case firstWord of
':':cmd | null rest -> completeGhciCommand line
| otherwise -> do
completion <- lookupCompletion cmd
completion line
"import" -> completeModule line
_ -> completeExpression line
where
(firstWord,rest) = break isSpace $ dropWhile isSpace $ reverse left
lookupCompletion ('!':_) = return completeFilename
lookupCompletion c = do
maybe_cmd <- lookupCommand' c
case maybe_cmd of
Just (_,_,f) -> return f
Nothing -> return completeFilename
completeGhciCommand = wrapCompleter " " $ \w -> do
macros <- liftIO $ readIORef macros_ref
cmds <- ghci_commands `fmap` getGHCiState
let macro_names = map (':':) . map cmdName $ macros
let command_names = map (':':) . map cmdName $ cmds
let{ candidates = case w of
':' : ':' : _ -> map (':':) command_names
_ -> nub $ macro_names ++ command_names }
return $ filter (w `isPrefixOf`) candidates
completeMacro = wrapIdentCompleter $ \w -> do
cmds <- liftIO $ readIORef macros_ref
return (filter (w `isPrefixOf`) (map cmdName cmds))
completeIdentifier = wrapIdentCompleter $ \w -> do
rdrs <- GHC.getRdrNamesInScope
dflags <- GHC.getSessionDynFlags
return (filter (w `isPrefixOf`) (map (showPpr dflags) rdrs))
completeModule = wrapIdentCompleter $ \w -> do
dflags <- GHC.getSessionDynFlags
let pkg_mods = allVisibleModules dflags
loaded_mods <- liftM (map GHC.ms_mod_name) getLoadedModules
return $ filter (w `isPrefixOf`)
$ map (showPpr dflags) $ loaded_mods ++ pkg_mods
completeSetModule = wrapIdentCompleterWithModifier "+-" $ \m w -> do
dflags <- GHC.getSessionDynFlags
modules <- case m of
Just '-' -> do
imports <- GHC.getContext
return $ map iiModuleName imports
_ -> do
let pkg_mods = allVisibleModules dflags
loaded_mods <- liftM (map GHC.ms_mod_name) getLoadedModules
return $ loaded_mods ++ pkg_mods
return $ filter (w `isPrefixOf`) $ map (showPpr dflags) modules
completeHomeModule = wrapIdentCompleter listHomeModules
listHomeModules :: String -> GHCi [String]
listHomeModules w = do
g <- GHC.getModuleGraph
let home_mods = map GHC.ms_mod_name g
dflags <- getDynFlags
return $ sort $ filter (w `isPrefixOf`)
$ map (showPpr dflags) home_mods
completeSetOptions = wrapCompleter flagWordBreakChars $ \w -> do
return (filter (w `isPrefixOf`) opts)
where opts = "args":"prog":"prompt":"prompt2":"editor":"stop":flagList
flagList = map head $ group $ sort allFlags
completeSeti = wrapCompleter flagWordBreakChars $ \w -> do
return (filter (w `isPrefixOf`) flagList)
where flagList = map head $ group $ sort allFlags
completeShowOptions = wrapCompleter flagWordBreakChars $ \w -> do
return (filter (w `isPrefixOf`) opts)
where opts = ["args", "prog", "prompt", "prompt2", "editor", "stop",
"modules", "bindings", "linker", "breaks",
"context", "packages", "paths", "language", "imports"]
completeShowiOptions = wrapCompleter flagWordBreakChars $ \w -> do
return (filter (w `isPrefixOf`) ["language"])
completeHomeModuleOrFile = completeWord Nothing filenameWordBreakChars
$ unionComplete (fmap (map simpleCompletion) . listHomeModules)
listFiles
unionComplete :: Monad m => (a -> m [b]) -> (a -> m [b]) -> a -> m [b]
unionComplete f1 f2 line = do
cs1 <- f1 line
cs2 <- f2 line
return (cs1 ++ cs2)
wrapCompleter :: String -> (String -> GHCi [String]) -> CompletionFunc GHCi
wrapCompleter breakChars fun = completeWord Nothing breakChars
$ fmap (map simpleCompletion . nubSort) . fun
wrapIdentCompleter :: (String -> GHCi [String]) -> CompletionFunc GHCi
wrapIdentCompleter = wrapCompleter word_break_chars
wrapIdentCompleterWithModifier :: String -> (Maybe Char -> String -> GHCi [String]) -> CompletionFunc GHCi
wrapIdentCompleterWithModifier modifChars fun = completeWordWithPrev Nothing word_break_chars
$ \rest -> fmap (map simpleCompletion . nubSort) . fun (getModifier rest)
where
getModifier = find (`elem` modifChars)
-- | Return a list of visible module names for autocompletion.
-- (NB: exposed != visible)
allVisibleModules :: DynFlags -> [ModuleName]
allVisibleModules dflags = listVisibleModuleNames dflags
completeExpression = completeQuotedWord (Just '\\') "\"" listFiles
completeIdentifier
-- -----------------------------------------------------------------------------
-- commands for debugger
sprintCmd, printCmd, forceCmd :: String -> GHCi ()
sprintCmd = pprintCommand False False
printCmd = pprintCommand True False
forceCmd = pprintCommand False True
pprintCommand :: Bool -> Bool -> String -> GHCi ()
pprintCommand bind force str = do
pprintClosureCommand bind force str
stepCmd :: String -> GHCi ()
stepCmd arg = withSandboxOnly ":step" $ step arg
where
step [] = doContinue (const True) GHC.SingleStep
step expression = runStmt expression GHC.SingleStep >> return ()
stepLocalCmd :: String -> GHCi ()
stepLocalCmd arg = withSandboxOnly ":steplocal" $ step arg
where
step expr
| not (null expr) = stepCmd expr
| otherwise = do
mb_span <- getCurrentBreakSpan
case mb_span of
Nothing -> stepCmd []
Just loc -> do
Just md <- getCurrentBreakModule
current_toplevel_decl <- enclosingTickSpan md loc
doContinue (`isSubspanOf` current_toplevel_decl) GHC.SingleStep
stepModuleCmd :: String -> GHCi ()
stepModuleCmd arg = withSandboxOnly ":stepmodule" $ step arg
where
step expr
| not (null expr) = stepCmd expr
| otherwise = do
mb_span <- getCurrentBreakSpan
case mb_span of
Nothing -> stepCmd []
Just pan -> do
let f some_span = srcSpanFileName_maybe pan == srcSpanFileName_maybe some_span
doContinue f GHC.SingleStep
-- | Returns the span of the largest tick containing the srcspan given
enclosingTickSpan :: Module -> SrcSpan -> GHCi SrcSpan
enclosingTickSpan _ (UnhelpfulSpan _) = panic "enclosingTickSpan UnhelpfulSpan"
enclosingTickSpan md (RealSrcSpan src) = do
ticks <- getTickArray md
let line = srcSpanStartLine src
ASSERT(inRange (bounds ticks) line) do
let toRealSrcSpan (UnhelpfulSpan _) = panic "enclosingTickSpan UnhelpfulSpan"
toRealSrcSpan (RealSrcSpan s) = s
enclosing_spans = [ pan | (_,pan) <- ticks ! line
, realSrcSpanEnd (toRealSrcSpan pan) >= realSrcSpanEnd src]
return . head . sortBy leftmost_largest $ enclosing_spans
traceCmd :: String -> GHCi ()
traceCmd arg
= withSandboxOnly ":trace" $ tr arg
where
tr [] = doContinue (const True) GHC.RunAndLogSteps
tr expression = runStmt expression GHC.RunAndLogSteps >> return ()
continueCmd :: String -> GHCi ()
continueCmd = noArgs $ withSandboxOnly ":continue" $ doContinue (const True) GHC.RunToCompletion
-- doContinue :: SingleStep -> GHCi ()
doContinue :: (SrcSpan -> Bool) -> SingleStep -> GHCi ()
doContinue pre step = do
runResult <- resume pre step
_ <- afterRunStmt pre runResult
return ()
abandonCmd :: String -> GHCi ()
abandonCmd = noArgs $ withSandboxOnly ":abandon" $ do
b <- GHC.abandon -- the prompt will change to indicate the new context
when (not b) $ liftIO $ putStrLn "There is no computation running."
deleteCmd :: String -> GHCi ()
deleteCmd argLine = withSandboxOnly ":delete" $ do
deleteSwitch $ words argLine
where
deleteSwitch :: [String] -> GHCi ()
deleteSwitch [] =
liftIO $ putStrLn "The delete command requires at least one argument."
-- delete all break points
deleteSwitch ("*":_rest) = discardActiveBreakPoints
deleteSwitch idents = do
mapM_ deleteOneBreak idents
where
deleteOneBreak :: String -> GHCi ()
deleteOneBreak str
| all isDigit str = deleteBreak (read str)
| otherwise = return ()
historyCmd :: String -> GHCi ()
historyCmd arg
| null arg = history 20
| all isDigit arg = history (read arg)
| otherwise = liftIO $ putStrLn "Syntax: :history [num]"
where
history num = do
resumes <- GHC.getResumeContext
case resumes of
[] -> liftIO $ putStrLn "Not stopped at a breakpoint"
(r:_) -> do
let hist = GHC.resumeHistory r
(took,rest) = splitAt num hist
case hist of
[] -> liftIO $ putStrLn $
"Empty history. Perhaps you forgot to use :trace?"
_ -> do
pans <- mapM GHC.getHistorySpan took
let nums = map (printf "-%-3d:") [(1::Int)..]
names = map GHC.historyEnclosingDecls took
printForUser (vcat(zipWith3
(\x y z -> x <+> y <+> z)
(map text nums)
(map (bold . hcat . punctuate colon . map text) names)
(map (parens . ppr) pans)))
liftIO $ putStrLn $ if null rest then "<end of history>" else "..."
bold :: SDoc -> SDoc
bold c | do_bold = text start_bold <> c <> text end_bold
| otherwise = c
backCmd :: String -> GHCi ()
backCmd arg
| null arg = back 1
| all isDigit arg = back (read arg)
| otherwise = liftIO $ putStrLn "Syntax: :back [num]"
where
back num = withSandboxOnly ":back" $ do
(names, _, pan) <- GHC.back num
printForUser $ ptext (sLit "Logged breakpoint at") <+> ppr pan
printTypeOfNames names
-- run the command set with ":set stop <cmd>"
st <- getGHCiState
enqueueCommands [stop st]
forwardCmd :: String -> GHCi ()
forwardCmd arg
| null arg = forward 1
| all isDigit arg = forward (read arg)
| otherwise = liftIO $ putStrLn "Syntax: :back [num]"
where
forward num = withSandboxOnly ":forward" $ do
(names, ix, pan) <- GHC.forward num
printForUser $ (if (ix == 0)
then ptext (sLit "Stopped at")
else ptext (sLit "Logged breakpoint at")) <+> ppr pan
printTypeOfNames names
-- run the command set with ":set stop <cmd>"
st <- getGHCiState
enqueueCommands [stop st]
-- handle the "break" command
breakCmd :: String -> GHCi ()
breakCmd argLine = withSandboxOnly ":break" $ breakSwitch $ words argLine
breakSwitch :: [String] -> GHCi ()
breakSwitch [] = do
liftIO $ putStrLn "The break command requires at least one argument."
breakSwitch (arg1:rest)
| looksLikeModuleName arg1 && not (null rest) = do
md <- wantInterpretedModule arg1
breakByModule md rest
| all isDigit arg1 = do
imports <- GHC.getContext
case iiModules imports of
(mn : _) -> do
md <- lookupModuleName mn
breakByModuleLine md (read arg1) rest
[] -> do
liftIO $ putStrLn "No modules are loaded with debugging support."
| otherwise = do -- try parsing it as an identifier
wantNameFromInterpretedModule noCanDo arg1 $ \name -> do
let loc = GHC.srcSpanStart (GHC.nameSrcSpan name)
case loc of
RealSrcLoc l ->
ASSERT( isExternalName name )
findBreakAndSet (GHC.nameModule name) $
findBreakByCoord (Just (GHC.srcLocFile l))
(GHC.srcLocLine l,
GHC.srcLocCol l)
UnhelpfulLoc _ ->
noCanDo name $ text "can't find its location: " <> ppr loc
where
noCanDo n why = printForUser $
text "cannot set breakpoint on " <> ppr n <> text ": " <> why
breakByModule :: Module -> [String] -> GHCi ()
breakByModule md (arg1:rest)
| all isDigit arg1 = do -- looks like a line number
breakByModuleLine md (read arg1) rest
breakByModule _ _
= breakSyntax
breakByModuleLine :: Module -> Int -> [String] -> GHCi ()
breakByModuleLine md line args
| [] <- args = findBreakAndSet md $ findBreakByLine line
| [col] <- args, all isDigit col =
findBreakAndSet md $ findBreakByCoord Nothing (line, read col)
| otherwise = breakSyntax
breakSyntax :: a
breakSyntax = throwGhcException (CmdLineError "Syntax: :break [<mod>] <line> [<column>]")
findBreakAndSet :: Module -> (TickArray -> Maybe (Int, SrcSpan)) -> GHCi ()
findBreakAndSet md lookupTickTree = do
dflags <- getDynFlags
tickArray <- getTickArray md
(breakArray, _) <- getModBreak md
case lookupTickTree tickArray of
Nothing -> liftIO $ putStrLn $ "No breakpoints found at that location."
Just (tick, pan) -> do
success <- liftIO $ setBreakFlag dflags True breakArray tick
if success
then do
(alreadySet, nm) <-
recordBreak $ BreakLocation
{ breakModule = md
, breakLoc = pan
, breakTick = tick
, onBreakCmd = ""
}
printForUser $
text "Breakpoint " <> ppr nm <>
if alreadySet
then text " was already set at " <> ppr pan
else text " activated at " <> ppr pan
else do
printForUser $ text "Breakpoint could not be activated at"
<+> ppr pan
-- When a line number is specified, the current policy for choosing
-- the best breakpoint is this:
-- - the leftmost complete subexpression on the specified line, or
-- - the leftmost subexpression starting on the specified line, or
-- - the rightmost subexpression enclosing the specified line
--
findBreakByLine :: Int -> TickArray -> Maybe (BreakIndex,SrcSpan)
findBreakByLine line arr
| not (inRange (bounds arr) line) = Nothing
| otherwise =
listToMaybe (sortBy (leftmost_largest `on` snd) comp) `mplus`
listToMaybe (sortBy (leftmost_smallest `on` snd) incomp) `mplus`
listToMaybe (sortBy (rightmost `on` snd) ticks)
where
ticks = arr ! line
starts_here = [ tick | tick@(_,pan) <- ticks,
GHC.srcSpanStartLine (toRealSpan pan) == line ]
(comp, incomp) = partition ends_here starts_here
where ends_here (_,pan) = GHC.srcSpanEndLine (toRealSpan pan) == line
toRealSpan (RealSrcSpan pan) = pan
toRealSpan (UnhelpfulSpan _) = panic "findBreakByLine UnhelpfulSpan"
findBreakByCoord :: Maybe FastString -> (Int,Int) -> TickArray
-> Maybe (BreakIndex,SrcSpan)
findBreakByCoord mb_file (line, col) arr
| not (inRange (bounds arr) line) = Nothing
| otherwise =
listToMaybe (sortBy (rightmost `on` snd) contains ++
sortBy (leftmost_smallest `on` snd) after_here)
where
ticks = arr ! line
-- the ticks that span this coordinate
contains = [ tick | tick@(_,pan) <- ticks, pan `spans` (line,col),
is_correct_file pan ]
is_correct_file pan
| Just f <- mb_file = GHC.srcSpanFile (toRealSpan pan) == f
| otherwise = True
after_here = [ tick | tick@(_,pan) <- ticks,
let pan' = toRealSpan pan,
GHC.srcSpanStartLine pan' == line,
GHC.srcSpanStartCol pan' >= col ]
toRealSpan (RealSrcSpan pan) = pan
toRealSpan (UnhelpfulSpan _) = panic "findBreakByCoord UnhelpfulSpan"
-- For now, use ANSI bold on terminals that we know support it.
-- Otherwise, we add a line of carets under the active expression instead.
-- In particular, on Windows and when running the testsuite (which sets
-- TERM to vt100 for other reasons) we get carets.
-- We really ought to use a proper termcap/terminfo library.
do_bold :: Bool
do_bold = (`isPrefixOf` unsafePerformIO mTerm) `any` ["xterm", "linux"]
where mTerm = System.Environment.getEnv "TERM"
`catchIO` \_ -> return "TERM not set"
start_bold :: String
start_bold = "\ESC[1m"
end_bold :: String
end_bold = "\ESC[0m"
-----------------------------------------------------------------------------
-- :list
listCmd :: String -> InputT GHCi ()
listCmd c = listCmd' c
listCmd' :: String -> InputT GHCi ()
listCmd' "" = do
mb_span <- lift getCurrentBreakSpan
case mb_span of
Nothing ->
printForUser $ text "Not stopped at a breakpoint; nothing to list"
Just (RealSrcSpan pan) ->
listAround pan True
Just pan@(UnhelpfulSpan _) ->
do resumes <- GHC.getResumeContext
case resumes of
[] -> panic "No resumes"
(r:_) ->
do let traceIt = case GHC.resumeHistory r of
[] -> text "rerunning with :trace,"
_ -> empty
doWhat = traceIt <+> text ":back then :list"
printForUser (text "Unable to list source for" <+>
ppr pan
$$ text "Try" <+> doWhat)
listCmd' str = list2 (words str)
list2 :: [String] -> InputT GHCi ()
list2 [arg] | all isDigit arg = do
imports <- GHC.getContext
case iiModules imports of
[] -> liftIO $ putStrLn "No module to list"
(mn : _) -> do
md <- lift $ lookupModuleName mn
listModuleLine md (read arg)
list2 [arg1,arg2] | looksLikeModuleName arg1, all isDigit arg2 = do
md <- wantInterpretedModule arg1
listModuleLine md (read arg2)
list2 [arg] = do
wantNameFromInterpretedModule noCanDo arg $ \name -> do
let loc = GHC.srcSpanStart (GHC.nameSrcSpan name)
case loc of
RealSrcLoc l ->
do tickArray <- ASSERT( isExternalName name )
lift $ getTickArray (GHC.nameModule name)
let mb_span = findBreakByCoord (Just (GHC.srcLocFile l))
(GHC.srcLocLine l, GHC.srcLocCol l)
tickArray
case mb_span of
Nothing -> listAround (realSrcLocSpan l) False
Just (_, UnhelpfulSpan _) -> panic "list2 UnhelpfulSpan"
Just (_, RealSrcSpan pan) -> listAround pan False
UnhelpfulLoc _ ->
noCanDo name $ text "can't find its location: " <>
ppr loc
where
noCanDo n why = printForUser $
text "cannot list source code for " <> ppr n <> text ": " <> why
list2 _other =
liftIO $ putStrLn "syntax: :list [<line> | <module> <line> | <identifier>]"
listModuleLine :: Module -> Int -> InputT GHCi ()
listModuleLine modl line = do
graph <- GHC.getModuleGraph
let this = filter ((== modl) . GHC.ms_mod) graph
case this of
[] -> panic "listModuleLine"
summ:_ -> do
let filename = expectJust "listModuleLine" (ml_hs_file (GHC.ms_location summ))
loc = mkRealSrcLoc (mkFastString (filename)) line 0
listAround (realSrcLocSpan loc) False
-- | list a section of a source file around a particular SrcSpan.
-- If the highlight flag is True, also highlight the span using
-- start_bold\/end_bold.
-- GHC files are UTF-8, so we can implement this by:
-- 1) read the file in as a BS and syntax highlight it as before
-- 2) convert the BS to String using utf-string, and write it out.
-- It would be better if we could convert directly between UTF-8 and the
-- console encoding, of course.
listAround :: MonadIO m => RealSrcSpan -> Bool -> InputT m ()
listAround pan do_highlight = do
contents <- liftIO $ BS.readFile (unpackFS file)
-- Drop carriage returns to avoid duplicates, see #9367.
let ls = BS.split '\n' $ BS.filter (/= '\r') contents
ls' = take (line2 - line1 + 1 + pad_before + pad_after) $
drop (line1 - 1 - pad_before) $ ls
fst_line = max 1 (line1 - pad_before)
line_nos = [ fst_line .. ]
highlighted | do_highlight = zipWith highlight line_nos ls'
| otherwise = [\p -> BS.concat[p,l] | l <- ls']
bs_line_nos = [ BS.pack (show l ++ " ") | l <- line_nos ]
prefixed = zipWith ($) highlighted bs_line_nos
output = BS.intercalate (BS.pack "\n") prefixed
utf8Decoded <- liftIO $ BS.useAsCStringLen output
$ \(p,n) -> utf8DecodeString (castPtr p) n
liftIO $ putStrLn utf8Decoded
where
file = GHC.srcSpanFile pan
line1 = GHC.srcSpanStartLine pan
col1 = GHC.srcSpanStartCol pan - 1
line2 = GHC.srcSpanEndLine pan
col2 = GHC.srcSpanEndCol pan - 1
pad_before | line1 == 1 = 0
| otherwise = 1
pad_after = 1
highlight | do_bold = highlight_bold
| otherwise = highlight_carets
highlight_bold no line prefix
| no == line1 && no == line2
= let (a,r) = BS.splitAt col1 line
(b,c) = BS.splitAt (col2-col1) r
in
BS.concat [prefix, a,BS.pack start_bold,b,BS.pack end_bold,c]
| no == line1
= let (a,b) = BS.splitAt col1 line in
BS.concat [prefix, a, BS.pack start_bold, b]
| no == line2
= let (a,b) = BS.splitAt col2 line in
BS.concat [prefix, a, BS.pack end_bold, b]
| otherwise = BS.concat [prefix, line]
highlight_carets no line prefix
| no == line1 && no == line2
= BS.concat [prefix, line, nl, indent, BS.replicate col1 ' ',
BS.replicate (col2-col1) '^']
| no == line1
= BS.concat [indent, BS.replicate (col1 - 2) ' ', BS.pack "vv", nl,
prefix, line]
| no == line2
= BS.concat [prefix, line, nl, indent, BS.replicate col2 ' ',
BS.pack "^^"]
| otherwise = BS.concat [prefix, line]
where
indent = BS.pack (" " ++ replicate (length (show no)) ' ')
nl = BS.singleton '\n'
-- --------------------------------------------------------------------------
-- Tick arrays
getTickArray :: Module -> GHCi TickArray
getTickArray modl = do
st <- getGHCiState
let arrmap = tickarrays st
case lookupModuleEnv arrmap modl of
Just arr -> return arr
Nothing -> do
(_breakArray, ticks) <- getModBreak modl
let arr = mkTickArray (assocs ticks)
setGHCiState st{tickarrays = extendModuleEnv arrmap modl arr}
return arr
discardTickArrays :: GHCi ()
discardTickArrays = do
st <- getGHCiState
setGHCiState st{tickarrays = emptyModuleEnv}
mkTickArray :: [(BreakIndex,SrcSpan)] -> TickArray
mkTickArray ticks
= accumArray (flip (:)) [] (1, max_line)
[ (line, (nm,pan)) | (nm,pan) <- ticks,
let pan' = toRealSpan pan,
line <- srcSpanLines pan' ]
where
max_line = foldr max 0 (map (GHC.srcSpanEndLine . toRealSpan . snd) ticks)
srcSpanLines pan = [ GHC.srcSpanStartLine pan .. GHC.srcSpanEndLine pan ]
toRealSpan (RealSrcSpan pan) = pan
toRealSpan (UnhelpfulSpan _) = panic "mkTickArray UnhelpfulSpan"
-- don't reset the counter back to zero?
discardActiveBreakPoints :: GHCi ()
discardActiveBreakPoints = do
st <- getGHCiState
mapM_ (turnOffBreak.snd) (breaks st)
setGHCiState $ st { breaks = [] }
deleteBreak :: Int -> GHCi ()
deleteBreak identity = do
st <- getGHCiState
let oldLocations = breaks st
(this,rest) = partition (\loc -> fst loc == identity) oldLocations
if null this
then printForUser (text "Breakpoint" <+> ppr identity <+>
text "does not exist")
else do
mapM_ (turnOffBreak.snd) this
setGHCiState $ st { breaks = rest }
turnOffBreak :: BreakLocation -> GHCi Bool
turnOffBreak loc = do
dflags <- getDynFlags
(arr, _) <- getModBreak (breakModule loc)
liftIO $ setBreakFlag dflags False arr (breakTick loc)
getModBreak :: Module -> GHCi (GHC.BreakArray, Array Int SrcSpan)
getModBreak m = do
Just mod_info <- GHC.getModuleInfo m
let modBreaks = GHC.modInfoModBreaks mod_info
let arr = GHC.modBreaks_flags modBreaks
let ticks = GHC.modBreaks_locs modBreaks
return (arr, ticks)
setBreakFlag :: DynFlags -> Bool -> GHC.BreakArray -> Int -> IO Bool
setBreakFlag dflags toggle arr i
| toggle = GHC.setBreakOn dflags arr i
| otherwise = GHC.setBreakOff dflags arr i
-- ---------------------------------------------------------------------------
-- User code exception handling
-- This is the exception handler for exceptions generated by the
-- user's code and exceptions coming from children sessions;
-- it normally just prints out the exception. The
-- handler must be recursive, in case showing the exception causes
-- more exceptions to be raised.
--
-- Bugfix: if the user closed stdout or stderr, the flushing will fail,
-- raising another exception. We therefore don't put the recursive
-- handler arond the flushing operation, so if stderr is closed
-- GHCi will just die gracefully rather than going into an infinite loop.
handler :: SomeException -> GHCi Bool
handler exception = do
flushInterpBuffers
liftIO installSignalHandlers
ghciHandle handler (showException exception >> return False)
showException :: SomeException -> GHCi ()
showException se =
liftIO $ case fromException se of
-- omit the location for CmdLineError:
Just (CmdLineError s) -> putException s
-- ditto:
Just other_ghc_ex -> putException (show other_ghc_ex)
Nothing ->
case fromException se of
Just UserInterrupt -> putException "Interrupted."
_ -> putException ("*** Exception: " ++ show se)
where
putException = hPutStrLn stderr
-----------------------------------------------------------------------------
-- recursive exception handlers
-- Don't forget to unblock async exceptions in the handler, or if we're
-- in an exception loop (eg. let a = error a in a) the ^C exception
-- may never be delivered. Thanks to Marcin for pointing out the bug.
ghciHandle :: (HasDynFlags m, ExceptionMonad m) => (SomeException -> m a) -> m a -> m a
ghciHandle h m = gmask $ \restore -> do
dflags <- getDynFlags
gcatch (restore (GHC.prettyPrintGhcErrors dflags m)) $ \e -> restore (h e)
ghciTry :: GHCi a -> GHCi (Either SomeException a)
ghciTry (GHCi m) = GHCi $ \s -> gtry (m s)
tryBool :: GHCi a -> GHCi Bool
tryBool m = do
r <- ghciTry m
case r of
Left _ -> return False
Right _ -> return True
-- ----------------------------------------------------------------------------
-- Utils
lookupModule :: GHC.GhcMonad m => String -> m Module
lookupModule mName = lookupModuleName (GHC.mkModuleName mName)
lookupModuleName :: GHC.GhcMonad m => ModuleName -> m Module
lookupModuleName mName = GHC.lookupModule mName Nothing
isHomeModule :: Module -> Bool
isHomeModule m = GHC.moduleUnitId m == mainUnitId
-- TODO: won't work if home dir is encoded.
-- (changeDirectory may not work either in that case.)
expandPath :: MonadIO m => String -> InputT m String
expandPath = liftIO . expandPathIO
expandPathIO :: String -> IO String
expandPathIO p =
case dropWhile isSpace p of
('~':d) -> do
tilde <- getHomeDirectory -- will fail if HOME not defined
return (tilde ++ '/':d)
other ->
return other
wantInterpretedModule :: GHC.GhcMonad m => String -> m Module
wantInterpretedModule str = wantInterpretedModuleName (GHC.mkModuleName str)
wantInterpretedModuleName :: GHC.GhcMonad m => ModuleName -> m Module
wantInterpretedModuleName modname = do
modl <- lookupModuleName modname
let str = moduleNameString modname
dflags <- getDynFlags
when (GHC.moduleUnitId modl /= thisPackage dflags) $
throwGhcException (CmdLineError ("module '" ++ str ++ "' is from another package;\nthis command requires an interpreted module"))
is_interpreted <- GHC.moduleIsInterpreted modl
when (not is_interpreted) $
throwGhcException (CmdLineError ("module '" ++ str ++ "' is not interpreted; try \':add *" ++ str ++ "' first"))
return modl
wantNameFromInterpretedModule :: GHC.GhcMonad m
=> (Name -> SDoc -> m ())
-> String
-> (Name -> m ())
-> m ()
wantNameFromInterpretedModule noCanDo str and_then =
handleSourceError GHC.printException $ do
names <- GHC.parseName str
case names of
[] -> return ()
(n:_) -> do
let modl = ASSERT( isExternalName n ) GHC.nameModule n
if not (GHC.isExternalName n)
then noCanDo n $ ppr n <>
text " is not defined in an interpreted module"
else do
is_interpreted <- GHC.moduleIsInterpreted modl
if not is_interpreted
then noCanDo n $ text "module " <> ppr modl <>
text " is not interpreted"
else and_then n
| AlexanderPankiv/ghc | ghc/InteractiveUI.hs | bsd-3-clause | 131,151 | 12 | 103 | 37,850 | 32,192 | 15,998 | 16,194 | -1 | -1 |
--------------------------------------------------------------------------------
-- Copyright © 2011 National Institute of Aerospace / Galois, Inc.
--------------------------------------------------------------------------------
-- | if-then-else.
{-# LANGUAGE Trustworthy #-}
module Copilot.Language.Operators.Mux
( mux
, ifThenElse
) where
import Copilot.Core (Typed, typeOf)
import qualified Copilot.Core as Core
import Copilot.Language.Prelude
import Copilot.Language.Stream
import Prelude ()
--------------------------------------------------------------------------------
mux :: Typed a => Stream Bool -> Stream a -> Stream a -> Stream a
mux (Const True) t _ = t
mux (Const False) _ f = f
mux b t f = Op3 (Core.Mux typeOf) b t f
--------------------------------------------------------------------------------
ifThenElse :: Typed a => Stream Bool -> Stream a -> Stream a -> Stream a
ifThenElse = mux
--------------------------------------------------------------------------------
| leepike/copilot-language | src/Copilot/Language/Operators/Mux.hs | bsd-3-clause | 1,016 | 0 | 9 | 135 | 205 | 112 | 93 | 15 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Database.Bloodhound.Internal.Query
( module X
, module Database.Bloodhound.Internal.Query
) where
import Bloodhound.Import
import Control.Monad.Fail (MonadFail)
import Data.Char (isNumber)
import qualified Data.HashMap.Strict as HM
import Data.List (nub)
import qualified Data.Text as T
import Database.Bloodhound.Common.Script as X
import Database.Bloodhound.Internal.Newtypes
data Query =
TermQuery Term (Maybe Boost)
| TermsQuery Text (NonEmpty Text)
| QueryMatchQuery MatchQuery
| QueryMultiMatchQuery MultiMatchQuery
| QueryBoolQuery BoolQuery
| QueryBoostingQuery BoostingQuery
| QueryCommonTermsQuery CommonTermsQuery
| ConstantScoreQuery Query Boost
| QueryFunctionScoreQuery FunctionScoreQuery
| QueryDisMaxQuery DisMaxQuery
| QueryFuzzyLikeThisQuery FuzzyLikeThisQuery
| QueryFuzzyLikeFieldQuery FuzzyLikeFieldQuery
| QueryFuzzyQuery FuzzyQuery
| QueryHasChildQuery HasChildQuery
| QueryHasParentQuery HasParentQuery
| IdsQuery [DocId]
| QueryIndicesQuery IndicesQuery
| MatchAllQuery (Maybe Boost)
| QueryMoreLikeThisQuery MoreLikeThisQuery
| QueryMoreLikeThisFieldQuery MoreLikeThisFieldQuery
| QueryNestedQuery NestedQuery
| QueryPrefixQuery PrefixQuery
| QueryQueryStringQuery QueryStringQuery
| QuerySimpleQueryStringQuery SimpleQueryStringQuery
| QueryRangeQuery RangeQuery
| QueryRegexpQuery RegexpQuery
| QueryExistsQuery FieldName
| QueryMatchNoneQuery
| QueryWildcardQuery WildcardQuery
deriving (Eq, Show)
instance ToJSON Query where
toJSON (TermQuery (Term termQueryField termQueryValue) boost) =
object [ "term" .=
object [termQueryField .= object merged]]
where
base = [ "value" .= termQueryValue ]
boosted = maybe [] (return . ("boost" .=)) boost
merged = mappend base boosted
toJSON (TermsQuery fieldName terms) =
object [ "terms" .= object conjoined ]
where conjoined = [fieldName .= terms]
toJSON (IdsQuery docIds) =
object [ "ids" .= object conjoined ]
where conjoined = [ "values" .= fmap toJSON docIds ]
toJSON (QueryQueryStringQuery qQueryStringQuery) =
object [ "query_string" .= qQueryStringQuery ]
toJSON (QueryMatchQuery matchQuery) =
object [ "match" .= matchQuery ]
toJSON (QueryMultiMatchQuery multiMatchQuery) =
toJSON multiMatchQuery
toJSON (QueryBoolQuery boolQuery) =
object [ "bool" .= boolQuery ]
toJSON (QueryBoostingQuery boostingQuery) =
object [ "boosting" .= boostingQuery ]
toJSON (QueryCommonTermsQuery commonTermsQuery) =
object [ "common" .= commonTermsQuery ]
toJSON (ConstantScoreQuery query boost) =
object ["constant_score" .= object ["filter" .= query
, "boost" .= boost]]
toJSON (QueryFunctionScoreQuery functionScoreQuery) =
object [ "function_score" .= functionScoreQuery ]
toJSON (QueryDisMaxQuery disMaxQuery) =
object [ "dis_max" .= disMaxQuery ]
toJSON (QueryFuzzyLikeThisQuery fuzzyQuery) =
object [ "fuzzy_like_this" .= fuzzyQuery ]
toJSON (QueryFuzzyLikeFieldQuery fuzzyFieldQuery) =
object [ "fuzzy_like_this_field" .= fuzzyFieldQuery ]
toJSON (QueryFuzzyQuery fuzzyQuery) =
object [ "fuzzy" .= fuzzyQuery ]
toJSON (QueryHasChildQuery childQuery) =
object [ "has_child" .= childQuery ]
toJSON (QueryHasParentQuery parentQuery) =
object [ "has_parent" .= parentQuery ]
toJSON (QueryIndicesQuery qIndicesQuery) =
object [ "indices" .= qIndicesQuery ]
toJSON (MatchAllQuery boost) =
object [ "match_all" .= omitNulls [ "boost" .= boost ] ]
toJSON (QueryMoreLikeThisQuery query) =
object [ "more_like_this" .= query ]
toJSON (QueryMoreLikeThisFieldQuery query) =
object [ "more_like_this_field" .= query ]
toJSON (QueryNestedQuery query) =
object [ "nested" .= query ]
toJSON (QueryPrefixQuery query) =
object [ "prefix" .= query ]
toJSON (QueryRangeQuery query) =
object [ "range" .= query ]
toJSON (QueryRegexpQuery query) =
object [ "regexp" .= query ]
toJSON (QuerySimpleQueryStringQuery query) =
object [ "simple_query_string" .= query ]
toJSON (QueryExistsQuery (FieldName fieldName)) =
object ["exists" .= object
["field" .= fieldName]
]
toJSON QueryMatchNoneQuery =
object ["match_none" .= object []]
toJSON (QueryWildcardQuery query) =
object [ "wildcard" .= query ]
instance FromJSON Query where
parseJSON v = withObject "Query" parse v
where parse o = termQuery `taggedWith` "term"
<|> termsQuery `taggedWith` "terms"
<|> idsQuery `taggedWith` "ids"
<|> queryQueryStringQuery `taggedWith` "query_string"
<|> queryMatchQuery `taggedWith` "match"
<|> queryMultiMatchQuery
<|> queryBoolQuery `taggedWith` "bool"
<|> queryBoostingQuery `taggedWith` "boosting"
<|> queryCommonTermsQuery `taggedWith` "common"
<|> constantScoreQuery `taggedWith` "constant_score"
<|> queryFunctionScoreQuery `taggedWith` "function_score"
<|> queryDisMaxQuery `taggedWith` "dis_max"
<|> queryFuzzyLikeThisQuery `taggedWith` "fuzzy_like_this"
<|> queryFuzzyLikeFieldQuery `taggedWith` "fuzzy_like_this_field"
<|> queryFuzzyQuery `taggedWith` "fuzzy"
<|> queryHasChildQuery `taggedWith` "has_child"
<|> queryHasParentQuery `taggedWith` "has_parent"
<|> queryIndicesQuery `taggedWith` "indices"
<|> matchAllQuery `taggedWith` "match_all"
<|> queryMoreLikeThisQuery `taggedWith` "more_like_this"
<|> queryMoreLikeThisFieldQuery `taggedWith` "more_like_this_field"
<|> queryNestedQuery `taggedWith` "nested"
<|> queryPrefixQuery `taggedWith` "prefix"
<|> queryRangeQuery `taggedWith` "range"
<|> queryRegexpQuery `taggedWith` "regexp"
<|> querySimpleQueryStringQuery `taggedWith` "simple_query_string"
<|> queryWildcardQuery `taggedWith` "wildcard"
where taggedWith parser k = parser =<< o .: k
termQuery = fieldTagged $ \(FieldName fn) o ->
TermQuery <$> (Term fn <$> o .: "value") <*> o .:? "boost"
termsQuery o = case HM.toList o of
[(fn, vs)] -> do vals <- parseJSON vs
case vals of
x:xs -> return (TermsQuery fn (x :| xs))
_ -> fail "Expected non empty list of values"
_ -> fail "Expected object with 1 field-named key"
idsQuery o = IdsQuery <$> o .: "values"
queryQueryStringQuery = pure . QueryQueryStringQuery
queryMatchQuery = pure . QueryMatchQuery
queryMultiMatchQuery = QueryMultiMatchQuery <$> parseJSON v
queryBoolQuery = pure . QueryBoolQuery
queryBoostingQuery = pure . QueryBoostingQuery
queryCommonTermsQuery = pure . QueryCommonTermsQuery
constantScoreQuery o = case HM.lookup "filter" o of
Just x -> ConstantScoreQuery <$> parseJSON x
<*> o .: "boost"
_ -> fail "Does not appear to be a ConstantScoreQuery"
queryFunctionScoreQuery = pure . QueryFunctionScoreQuery
queryDisMaxQuery = pure . QueryDisMaxQuery
queryFuzzyLikeThisQuery = pure . QueryFuzzyLikeThisQuery
queryFuzzyLikeFieldQuery = pure . QueryFuzzyLikeFieldQuery
queryFuzzyQuery = pure . QueryFuzzyQuery
queryHasChildQuery = pure . QueryHasChildQuery
queryHasParentQuery = pure . QueryHasParentQuery
queryIndicesQuery = pure . QueryIndicesQuery
matchAllQuery o = MatchAllQuery <$> o .:? "boost"
queryMoreLikeThisQuery = pure . QueryMoreLikeThisQuery
queryMoreLikeThisFieldQuery = pure . QueryMoreLikeThisFieldQuery
queryNestedQuery = pure . QueryNestedQuery
queryPrefixQuery = pure . QueryPrefixQuery
queryRangeQuery = pure . QueryRangeQuery
queryRegexpQuery = pure . QueryRegexpQuery
querySimpleQueryStringQuery = pure . QuerySimpleQueryStringQuery
-- queryExistsQuery o = QueryExistsQuery <$> o .: "field"
queryWildcardQuery = pure . QueryWildcardQuery
-- | As of Elastic 2.0, 'Filters' are just 'Queries' housed in a
-- Bool Query, and flagged in a different context.
newtype Filter = Filter { unFilter :: Query }
deriving (Eq, Show)
instance ToJSON Filter where
toJSON = toJSON . unFilter
instance FromJSON Filter where
parseJSON v = Filter <$> parseJSON v
data RegexpQuery =
RegexpQuery { regexpQueryField :: FieldName
, regexpQuery :: Regexp
, regexpQueryFlags :: RegexpFlags
, regexpQueryBoost :: Maybe Boost
} deriving (Eq, Show)
instance ToJSON RegexpQuery where
toJSON (RegexpQuery (FieldName rqQueryField)
(Regexp regexpQueryQuery) rqQueryFlags
rqQueryBoost) =
object [ rqQueryField .= omitNulls base ]
where base = [ "value" .= regexpQueryQuery
, "flags" .= rqQueryFlags
, "boost" .= rqQueryBoost ]
instance FromJSON RegexpQuery where
parseJSON = withObject "RegexpQuery" parse
where parse = fieldTagged $ \fn o ->
RegexpQuery fn
<$> o .: "value"
<*> o .: "flags"
<*> o .:? "boost"
data WildcardQuery =
WildcardQuery { wildcardQueryField :: FieldName
, wildcardQuery :: Text
, wildcardQueryBoost :: Maybe Boost
} deriving (Eq, Show)
instance ToJSON WildcardQuery where
toJSON (WildcardQuery (FieldName wcQueryField)
(wcQueryQuery) wcQueryBoost) =
object [ wcQueryField .= omitNulls base ]
where base = [ "value" .= wcQueryQuery
, "boost" .= wcQueryBoost ]
instance FromJSON WildcardQuery where
parseJSON = withObject "WildcardQuery" parse
where parse = fieldTagged $ \fn o ->
WildcardQuery fn
<$> o .: "value"
<*> o .:? "boost"
data RangeQuery =
RangeQuery { rangeQueryField :: FieldName
, rangeQueryRange :: RangeValue
, rangeQueryBoost :: Boost } deriving (Eq, Show)
instance ToJSON RangeQuery where
toJSON (RangeQuery (FieldName fieldName) range boost) =
object [ fieldName .= object conjoined ]
where
conjoined = ("boost" .= boost) : rangeValueToPair range
instance FromJSON RangeQuery where
parseJSON = withObject "RangeQuery" parse
where parse = fieldTagged $ \fn o ->
RangeQuery fn
<$> parseJSON (Object o)
<*> o .: "boost"
mkRangeQuery :: FieldName -> RangeValue -> RangeQuery
mkRangeQuery f r = RangeQuery f r (Boost 1.0)
data SimpleQueryStringQuery =
SimpleQueryStringQuery
{ simpleQueryStringQuery :: QueryString
, simpleQueryStringField :: Maybe FieldOrFields
, simpleQueryStringOperator :: Maybe BooleanOperator
, simpleQueryStringAnalyzer :: Maybe Analyzer
, simpleQueryStringFlags :: Maybe (NonEmpty SimpleQueryFlag)
, simpleQueryStringLowercaseExpanded :: Maybe LowercaseExpanded
, simpleQueryStringLocale :: Maybe Locale
} deriving (Eq, Show)
instance ToJSON SimpleQueryStringQuery where
toJSON SimpleQueryStringQuery {..} =
omitNulls (base ++ maybeAdd)
where base = [ "query" .= simpleQueryStringQuery ]
maybeAdd = [ "fields" .= simpleQueryStringField
, "default_operator" .= simpleQueryStringOperator
, "analyzer" .= simpleQueryStringAnalyzer
, "flags" .= simpleQueryStringFlags
, "lowercase_expanded_terms" .= simpleQueryStringLowercaseExpanded
, "locale" .= simpleQueryStringLocale ]
instance FromJSON SimpleQueryStringQuery where
parseJSON = withObject "SimpleQueryStringQuery" parse
where parse o = SimpleQueryStringQuery <$> o .: "query"
<*> o .:? "fields"
<*> o .:? "default_operator"
<*> o .:? "analyzer"
<*> (parseFlags <$> o .:? "flags")
<*> o .:? "lowercase_expanded_terms"
<*> o .:? "locale"
parseFlags (Just (x:xs)) = Just (x :| xs)
parseFlags _ = Nothing
data SimpleQueryFlag =
SimpleQueryAll
| SimpleQueryNone
| SimpleQueryAnd
| SimpleQueryOr
| SimpleQueryPrefix
| SimpleQueryPhrase
| SimpleQueryPrecedence
| SimpleQueryEscape
| SimpleQueryWhitespace
| SimpleQueryFuzzy
| SimpleQueryNear
| SimpleQuerySlop deriving (Eq, Show)
instance ToJSON SimpleQueryFlag where
toJSON SimpleQueryAll = "ALL"
toJSON SimpleQueryNone = "NONE"
toJSON SimpleQueryAnd = "AND"
toJSON SimpleQueryOr = "OR"
toJSON SimpleQueryPrefix = "PREFIX"
toJSON SimpleQueryPhrase = "PHRASE"
toJSON SimpleQueryPrecedence = "PRECEDENCE"
toJSON SimpleQueryEscape = "ESCAPE"
toJSON SimpleQueryWhitespace = "WHITESPACE"
toJSON SimpleQueryFuzzy = "FUZZY"
toJSON SimpleQueryNear = "NEAR"
toJSON SimpleQuerySlop = "SLOP"
instance FromJSON SimpleQueryFlag where
parseJSON = withText "SimpleQueryFlag" parse
where parse "ALL" = pure SimpleQueryAll
parse "NONE" = pure SimpleQueryNone
parse "AND" = pure SimpleQueryAnd
parse "OR" = pure SimpleQueryOr
parse "PREFIX" = pure SimpleQueryPrefix
parse "PHRASE" = pure SimpleQueryPhrase
parse "PRECEDENCE" = pure SimpleQueryPrecedence
parse "ESCAPE" = pure SimpleQueryEscape
parse "WHITESPACE" = pure SimpleQueryWhitespace
parse "FUZZY" = pure SimpleQueryFuzzy
parse "NEAR" = pure SimpleQueryNear
parse "SLOP" = pure SimpleQuerySlop
parse f = fail ("Unexpected SimpleQueryFlag: " <> show f)
-- use_dis_max and tie_breaker when fields are plural?
data QueryStringQuery =
QueryStringQuery
{ queryStringQuery :: QueryString
, queryStringDefaultField :: Maybe FieldName
, queryStringOperator :: Maybe BooleanOperator
, queryStringAnalyzer :: Maybe Analyzer
, queryStringAllowLeadingWildcard :: Maybe AllowLeadingWildcard
, queryStringLowercaseExpanded :: Maybe LowercaseExpanded
, queryStringEnablePositionIncrements :: Maybe EnablePositionIncrements
, queryStringFuzzyMaxExpansions :: Maybe MaxExpansions
, queryStringFuzziness :: Maybe Fuzziness
, queryStringFuzzyPrefixLength :: Maybe PrefixLength
, queryStringPhraseSlop :: Maybe PhraseSlop
, queryStringBoost :: Maybe Boost
, queryStringAnalyzeWildcard :: Maybe AnalyzeWildcard
, queryStringGeneratePhraseQueries :: Maybe GeneratePhraseQueries
, queryStringMinimumShouldMatch :: Maybe MinimumMatch
, queryStringLenient :: Maybe Lenient
, queryStringLocale :: Maybe Locale
} deriving (Eq, Show)
instance ToJSON QueryStringQuery where
toJSON (QueryStringQuery qsQueryString
qsDefaultField qsOperator
qsAnalyzer qsAllowWildcard
qsLowercaseExpanded qsEnablePositionIncrements
qsFuzzyMaxExpansions qsFuzziness
qsFuzzyPrefixLength qsPhraseSlop
qsBoost qsAnalyzeWildcard
qsGeneratePhraseQueries qsMinimumShouldMatch
qsLenient qsLocale) =
omitNulls base
where
base = [ "query" .= qsQueryString
, "default_field" .= qsDefaultField
, "default_operator" .= qsOperator
, "analyzer" .= qsAnalyzer
, "allow_leading_wildcard" .= qsAllowWildcard
, "lowercase_expanded_terms" .= qsLowercaseExpanded
, "enable_position_increments" .= qsEnablePositionIncrements
, "fuzzy_max_expansions" .= qsFuzzyMaxExpansions
, "fuzziness" .= qsFuzziness
, "fuzzy_prefix_length" .= qsFuzzyPrefixLength
, "phrase_slop" .= qsPhraseSlop
, "boost" .= qsBoost
, "analyze_wildcard" .= qsAnalyzeWildcard
, "auto_generate_phrase_queries" .= qsGeneratePhraseQueries
, "minimum_should_match" .= qsMinimumShouldMatch
, "lenient" .= qsLenient
, "locale" .= qsLocale ]
instance FromJSON QueryStringQuery where
parseJSON = withObject "QueryStringQuery" parse
where parse o = QueryStringQuery
<$> o .: "query"
<*> o .:? "default_field"
<*> o .:? "default_operator"
<*> o .:? "analyzer"
<*> o .:? "allow_leading_wildcard"
<*> o .:? "lowercase_expanded_terms"
<*> o .:? "enable_position_increments"
<*> o .:? "fuzzy_max_expansions"
<*> o .:? "fuzziness"
<*> o .:? "fuzzy_prefix_length"
<*> o .:? "phrase_slop"
<*> o .:? "boost"
<*> o .:? "analyze_wildcard"
<*> o .:? "auto_generate_phrase_queries"
<*> o .:? "minimum_should_match"
<*> o .:? "lenient"
<*> o .:? "locale"
mkQueryStringQuery :: QueryString -> QueryStringQuery
mkQueryStringQuery qs =
QueryStringQuery qs Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing Nothing Nothing
Nothing Nothing
data FieldOrFields = FofField FieldName
| FofFields (NonEmpty FieldName) deriving (Eq, Show)
instance ToJSON FieldOrFields where
toJSON (FofField fieldName) =
toJSON fieldName
toJSON (FofFields fieldNames) =
toJSON fieldNames
instance FromJSON FieldOrFields where
parseJSON v = FofField <$> parseJSON v
<|> FofFields <$> (parseNEJSON =<< parseJSON v)
data PrefixQuery =
PrefixQuery
{ prefixQueryField :: FieldName
, prefixQueryPrefixValue :: Text
, prefixQueryBoost :: Maybe Boost } deriving (Eq, Show)
instance ToJSON PrefixQuery where
toJSON (PrefixQuery (FieldName fieldName) queryValue boost) =
object [ fieldName .= omitNulls base ]
where base = [ "value" .= queryValue
, "boost" .= boost ]
instance FromJSON PrefixQuery where
parseJSON = withObject "PrefixQuery" parse
where parse = fieldTagged $ \fn o ->
PrefixQuery fn
<$> o .: "value"
<*> o .:? "boost"
data NestedQuery =
NestedQuery
{ nestedQueryPath :: QueryPath
, nestedQueryScoreType :: ScoreType
, nestedQuery :: Query } deriving (Eq, Show)
instance ToJSON NestedQuery where
toJSON (NestedQuery nqPath nqScoreType nqQuery) =
object [ "path" .= nqPath
, "score_mode" .= nqScoreType
, "query" .= nqQuery ]
instance FromJSON NestedQuery where
parseJSON = withObject "NestedQuery" parse
where parse o = NestedQuery
<$> o .: "path"
<*> o .: "score_mode"
<*> o .: "query"
data MoreLikeThisFieldQuery =
MoreLikeThisFieldQuery
{ moreLikeThisFieldText :: Text
, moreLikeThisFieldFields :: FieldName
-- default 0.3 (30%)
, moreLikeThisFieldPercentMatch :: Maybe PercentMatch
, moreLikeThisFieldMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisFieldMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisFieldStopWords :: Maybe (NonEmpty StopWord)
, moreLikeThisFieldMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisFieldMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisFieldMinWordLength :: Maybe MinWordLength
, moreLikeThisFieldMaxWordLength :: Maybe MaxWordLength
, moreLikeThisFieldBoostTerms :: Maybe BoostTerms
, moreLikeThisFieldBoost :: Maybe Boost
, moreLikeThisFieldAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
instance ToJSON MoreLikeThisFieldQuery where
toJSON (MoreLikeThisFieldQuery text (FieldName fieldName)
percent mtf mqt stopwords mindf maxdf
minwl maxwl boostTerms boost analyzer) =
object [ fieldName .= omitNulls base ]
where base = [ "like_text" .= text
, "percent_terms_to_match" .= percent
, "min_term_freq" .= mtf
, "max_query_terms" .= mqt
, "stop_words" .= stopwords
, "min_doc_freq" .= mindf
, "max_doc_freq" .= maxdf
, "min_word_length" .= minwl
, "max_word_length" .= maxwl
, "boost_terms" .= boostTerms
, "boost" .= boost
, "analyzer" .= analyzer ]
instance FromJSON MoreLikeThisFieldQuery where
parseJSON = withObject "MoreLikeThisFieldQuery" parse
where parse = fieldTagged $ \fn o ->
MoreLikeThisFieldQuery
<$> o .: "like_text"
<*> pure fn
<*> o .:? "percent_terms_to_match"
<*> o .:? "min_term_freq"
<*> o .:? "max_query_terms"
-- <*> (optionalNE =<< o .:? "stop_words")
<*> o .:? "stop_words"
<*> o .:? "min_doc_freq"
<*> o .:? "max_doc_freq"
<*> o .:? "min_word_length"
<*> o .:? "max_word_length"
<*> o .:? "boost_terms"
<*> o .:? "boost"
<*> o .:? "analyzer"
-- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON)
data MoreLikeThisQuery =
MoreLikeThisQuery
{ moreLikeThisText :: Text
, moreLikeThisFields :: Maybe (NonEmpty FieldName)
-- default 0.3 (30%)
, moreLikeThisPercentMatch :: Maybe PercentMatch
, moreLikeThisMinimumTermFreq :: Maybe MinimumTermFrequency
, moreLikeThisMaxQueryTerms :: Maybe MaxQueryTerms
, moreLikeThisStopWords :: Maybe (NonEmpty StopWord)
, moreLikeThisMinDocFrequency :: Maybe MinDocFrequency
, moreLikeThisMaxDocFrequency :: Maybe MaxDocFrequency
, moreLikeThisMinWordLength :: Maybe MinWordLength
, moreLikeThisMaxWordLength :: Maybe MaxWordLength
, moreLikeThisBoostTerms :: Maybe BoostTerms
, moreLikeThisBoost :: Maybe Boost
, moreLikeThisAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
instance ToJSON MoreLikeThisQuery where
toJSON (MoreLikeThisQuery text fields percent
mtf mqt stopwords mindf maxdf
minwl maxwl boostTerms boost analyzer) =
omitNulls base
where base = [ "like_text" .= text
, "fields" .= fields
, "percent_terms_to_match" .= percent
, "min_term_freq" .= mtf
, "max_query_terms" .= mqt
, "stop_words" .= stopwords
, "min_doc_freq" .= mindf
, "max_doc_freq" .= maxdf
, "min_word_length" .= minwl
, "max_word_length" .= maxwl
, "boost_terms" .= boostTerms
, "boost" .= boost
, "analyzer" .= analyzer ]
instance FromJSON MoreLikeThisQuery where
parseJSON = withObject "MoreLikeThisQuery" parse
where parse o = MoreLikeThisQuery
<$> o .: "like_text"
-- <*> (optionalNE =<< o .:? "fields")
<*> o .:? "fields"
<*> o .:? "percent_terms_to_match"
<*> o .:? "min_term_freq"
<*> o .:? "max_query_terms"
-- <*> (optionalNE =<< o .:? "stop_words")
<*> o .:? "stop_words"
<*> o .:? "min_doc_freq"
<*> o .:? "max_doc_freq"
<*> o .:? "min_word_length"
<*> o .:? "max_word_length"
<*> o .:? "boost_terms"
<*> o .:? "boost"
<*> o .:? "analyzer"
-- optionalNE = maybe (pure Nothing) (fmap Just . parseNEJSON)
data IndicesQuery =
IndicesQuery
{ indicesQueryIndices :: [IndexName]
, indicesQuery :: Query
-- default "all"
, indicesQueryNoMatch :: Maybe Query } deriving (Eq, Show)
instance ToJSON IndicesQuery where
toJSON (IndicesQuery indices query noMatch) =
omitNulls [ "indices" .= indices
, "no_match_query" .= noMatch
, "query" .= query ]
instance FromJSON IndicesQuery where
parseJSON = withObject "IndicesQuery" parse
where parse o = IndicesQuery
<$> o .:? "indices" .!= []
<*> o .: "query"
<*> o .:? "no_match_query"
data HasParentQuery =
HasParentQuery
{ hasParentQueryType :: RelationName
, hasParentQuery :: Query
, hasParentQueryScore :: Maybe AggregateParentScore
, hasParentIgnoreUnmapped :: Maybe IgnoreUnmapped
} deriving (Eq, Show)
instance ToJSON HasParentQuery where
toJSON (HasParentQuery queryType query scoreType ignoreUnmapped) =
omitNulls [ "parent_type" .= queryType
, "score" .= scoreType
, "query" .= query
, "ignore_unmapped" .= ignoreUnmapped
]
instance FromJSON HasParentQuery where
parseJSON = withObject "HasParentQuery" parse
where parse o = HasParentQuery
<$> o .: "parent_type"
<*> o .: "query"
<*> o .:? "score"
<*> o .:? "ignore_unmapped"
data HasChildQuery =
HasChildQuery
{ hasChildQueryType :: RelationName
, hasChildQuery :: Query
, hasChildQueryScoreType :: Maybe ScoreType
, hasChildIgnoreUnmappped :: Maybe IgnoreUnmapped
, hasChildMinChildren :: Maybe MinChildren
, hasChildMaxChildren :: Maybe MaxChildren
} deriving (Eq, Show)
instance ToJSON HasChildQuery where
toJSON (HasChildQuery queryType query scoreType ignoreUnmapped minChildren maxChildren) =
omitNulls [ "query" .= query
, "score_mode" .= scoreType
, "type" .= queryType
, "min_children" .= minChildren
, "max_children" .= maxChildren
, "ignore_unmapped" .= ignoreUnmapped
]
instance FromJSON HasChildQuery where
parseJSON = withObject "HasChildQuery" parse
where parse o = HasChildQuery
<$> o .: "type"
<*> o .: "query"
<*> o .:? "score_mode"
<*> o .:? "ignore_unmapped"
<*> o .:? "min_children"
<*> o .:? "max_children"
data ScoreType =
ScoreTypeMax
| ScoreTypeSum
| ScoreTypeAvg
| ScoreTypeNone deriving (Eq, Show)
instance ToJSON ScoreType where
toJSON ScoreTypeMax = "max"
toJSON ScoreTypeAvg = "avg"
toJSON ScoreTypeSum = "sum"
toJSON ScoreTypeNone = "none"
instance FromJSON ScoreType where
parseJSON = withText "ScoreType" parse
where parse "max" = pure ScoreTypeMax
parse "avg" = pure ScoreTypeAvg
parse "sum" = pure ScoreTypeSum
parse "none" = pure ScoreTypeNone
parse t = fail ("Unexpected ScoreType: " <> show t)
data FuzzyQuery =
FuzzyQuery { fuzzyQueryField :: FieldName
, fuzzyQueryValue :: Text
, fuzzyQueryPrefixLength :: PrefixLength
, fuzzyQueryMaxExpansions :: MaxExpansions
, fuzzyQueryFuzziness :: Fuzziness
, fuzzyQueryBoost :: Maybe Boost
} deriving (Eq, Show)
instance ToJSON FuzzyQuery where
toJSON (FuzzyQuery (FieldName fieldName) queryText
prefixLength maxEx fuzziness boost) =
object [ fieldName .= omitNulls base ]
where base = [ "value" .= queryText
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "boost" .= boost
, "max_expansions" .= maxEx ]
instance FromJSON FuzzyQuery where
parseJSON = withObject "FuzzyQuery" parse
where parse = fieldTagged $ \fn o ->
FuzzyQuery fn
<$> o .: "value"
<*> o .: "prefix_length"
<*> o .: "max_expansions"
<*> o .: "fuzziness"
<*> o .:? "boost"
data FuzzyLikeFieldQuery =
FuzzyLikeFieldQuery
{ fuzzyLikeField :: FieldName
-- anaphora is good for the soul.
, fuzzyLikeFieldText :: Text
, fuzzyLikeFieldMaxQueryTerms :: MaxQueryTerms
, fuzzyLikeFieldIgnoreTermFrequency :: IgnoreTermFrequency
, fuzzyLikeFieldFuzziness :: Fuzziness
, fuzzyLikeFieldPrefixLength :: PrefixLength
, fuzzyLikeFieldBoost :: Boost
, fuzzyLikeFieldAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
instance ToJSON FuzzyLikeFieldQuery where
toJSON (FuzzyLikeFieldQuery (FieldName fieldName)
fieldText maxTerms ignoreFreq fuzziness prefixLength
boost analyzer) =
object [ fieldName .=
omitNulls [ "like_text" .= fieldText
, "max_query_terms" .= maxTerms
, "ignore_tf" .= ignoreFreq
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "analyzer" .= analyzer
, "boost" .= boost ]]
instance FromJSON FuzzyLikeFieldQuery where
parseJSON = withObject "FuzzyLikeFieldQuery" parse
where parse = fieldTagged $ \fn o ->
FuzzyLikeFieldQuery fn
<$> o .: "like_text"
<*> o .: "max_query_terms"
<*> o .: "ignore_tf"
<*> o .: "fuzziness"
<*> o .: "prefix_length"
<*> o .: "boost"
<*> o .:? "analyzer"
data FuzzyLikeThisQuery =
FuzzyLikeThisQuery
{ fuzzyLikeFields :: [FieldName]
, fuzzyLikeText :: Text
, fuzzyLikeMaxQueryTerms :: MaxQueryTerms
, fuzzyLikeIgnoreTermFrequency :: IgnoreTermFrequency
, fuzzyLikeFuzziness :: Fuzziness
, fuzzyLikePrefixLength :: PrefixLength
, fuzzyLikeBoost :: Boost
, fuzzyLikeAnalyzer :: Maybe Analyzer
} deriving (Eq, Show)
instance ToJSON FuzzyLikeThisQuery where
toJSON (FuzzyLikeThisQuery fields text maxTerms
ignoreFreq fuzziness prefixLength boost analyzer) =
omitNulls base
where base = [ "fields" .= fields
, "like_text" .= text
, "max_query_terms" .= maxTerms
, "ignore_tf" .= ignoreFreq
, "fuzziness" .= fuzziness
, "prefix_length" .= prefixLength
, "analyzer" .= analyzer
, "boost" .= boost ]
instance FromJSON FuzzyLikeThisQuery where
parseJSON = withObject "FuzzyLikeThisQuery" parse
where parse o = FuzzyLikeThisQuery
<$> o .:? "fields" .!= []
<*> o .: "like_text"
<*> o .: "max_query_terms"
<*> o .: "ignore_tf"
<*> o .: "fuzziness"
<*> o .: "prefix_length"
<*> o .: "boost"
<*> o .:? "analyzer"
data DisMaxQuery =
DisMaxQuery { disMaxQueries :: [Query]
-- default 0.0
, disMaxTiebreaker :: Tiebreaker
, disMaxBoost :: Maybe Boost
} deriving (Eq, Show)
instance ToJSON DisMaxQuery where
toJSON (DisMaxQuery queries tiebreaker boost) =
omitNulls base
where base = [ "queries" .= queries
, "boost" .= boost
, "tie_breaker" .= tiebreaker ]
instance FromJSON DisMaxQuery where
parseJSON = withObject "DisMaxQuery" parse
where parse o = DisMaxQuery
<$> o .:? "queries" .!= []
<*> o .: "tie_breaker"
<*> o .:? "boost"
data MatchQuery = MatchQuery
{ matchQueryField :: FieldName
, matchQueryQueryString :: QueryString
, matchQueryOperator :: BooleanOperator
, matchQueryZeroTerms :: ZeroTermsQuery
, matchQueryCutoffFrequency :: Maybe CutoffFrequency
, matchQueryMatchType :: Maybe MatchQueryType
, matchQueryAnalyzer :: Maybe Analyzer
, matchQueryMaxExpansions :: Maybe MaxExpansions
, matchQueryLenient :: Maybe Lenient
, matchQueryBoost :: Maybe Boost
, matchQueryMinimumShouldMatch :: Maybe Text
, matchQueryFuzziness :: Maybe Fuzziness
} deriving (Eq, Show)
instance ToJSON MatchQuery where
toJSON (MatchQuery (FieldName fieldName)
(QueryString mqQueryString) booleanOperator
zeroTermsQuery cutoffFrequency matchQueryType
analyzer maxExpansions lenient boost
minShouldMatch mqFuzziness
) =
object [ fieldName .= omitNulls base ]
where base = [ "query" .= mqQueryString
, "operator" .= booleanOperator
, "zero_terms_query" .= zeroTermsQuery
, "cutoff_frequency" .= cutoffFrequency
, "type" .= matchQueryType
, "analyzer" .= analyzer
, "max_expansions" .= maxExpansions
, "lenient" .= lenient
, "boost" .= boost
, "minimum_should_match" .= minShouldMatch
, "fuzziness" .= mqFuzziness
]
instance FromJSON MatchQuery where
parseJSON = withObject "MatchQuery" parse
where parse = fieldTagged $ \fn o ->
MatchQuery fn
<$> o .: "query"
<*> o .: "operator"
<*> o .: "zero_terms_query"
<*> o .:? "cutoff_frequency"
<*> o .:? "type"
<*> o .:? "analyzer"
<*> o .:? "max_expansions"
<*> o .:? "lenient"
<*> o .:? "boost"
<*> o .:? "minimum_should_match"
<*> o .:? "fuzziness"
{-| 'mkMatchQuery' is a convenience function that defaults the less common parameters,
enabling you to provide only the 'FieldName' and 'QueryString' to make a 'MatchQuery'
-}
mkMatchQuery :: FieldName -> QueryString -> MatchQuery
mkMatchQuery field query = MatchQuery field query Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
data MatchQueryType =
MatchPhrase
| MatchPhrasePrefix deriving (Eq, Show)
instance ToJSON MatchQueryType where
toJSON MatchPhrase = "phrase"
toJSON MatchPhrasePrefix = "phrase_prefix"
instance FromJSON MatchQueryType where
parseJSON = withText "MatchQueryType" parse
where parse "phrase" = pure MatchPhrase
parse "phrase_prefix" = pure MatchPhrasePrefix
parse t = fail ("Unexpected MatchQueryType: " <> show t)
data MultiMatchQuery = MultiMatchQuery
{ multiMatchQueryFields :: [FieldName]
, multiMatchQueryString :: QueryString
, multiMatchQueryOperator :: BooleanOperator
, multiMatchQueryZeroTerms :: ZeroTermsQuery
, multiMatchQueryTiebreaker :: Maybe Tiebreaker
, multiMatchQueryType :: Maybe MultiMatchQueryType
, multiMatchQueryCutoffFrequency :: Maybe CutoffFrequency
, multiMatchQueryAnalyzer :: Maybe Analyzer
, multiMatchQueryMaxExpansions :: Maybe MaxExpansions
, multiMatchQueryLenient :: Maybe Lenient
} deriving (Eq, Show)
instance ToJSON MultiMatchQuery where
toJSON (MultiMatchQuery fields (QueryString query) boolOp
ztQ tb mmqt cf analyzer maxEx lenient) =
object ["multi_match" .= omitNulls base]
where base = [ "fields" .= fmap toJSON fields
, "query" .= query
, "operator" .= boolOp
, "zero_terms_query" .= ztQ
, "tie_breaker" .= tb
, "type" .= mmqt
, "cutoff_frequency" .= cf
, "analyzer" .= analyzer
, "max_expansions" .= maxEx
, "lenient" .= lenient ]
instance FromJSON MultiMatchQuery where
parseJSON = withObject "MultiMatchQuery" parse
where parse raw = do o <- raw .: "multi_match"
MultiMatchQuery
<$> o .:? "fields" .!= []
<*> o .: "query"
<*> o .: "operator"
<*> o .: "zero_terms_query"
<*> o .:? "tie_breaker"
<*> o .:? "type"
<*> o .:? "cutoff_frequency"
<*> o .:? "analyzer"
<*> o .:? "max_expansions"
<*> o .:? "lenient"
{-| 'mkMultiMatchQuery' is a convenience function that defaults the less common parameters,
enabling you to provide only the list of 'FieldName's and 'QueryString' to
make a 'MultiMatchQuery'.
-}
mkMultiMatchQuery :: [FieldName] -> QueryString -> MultiMatchQuery
mkMultiMatchQuery matchFields query =
MultiMatchQuery matchFields query
Or ZeroTermsNone Nothing Nothing Nothing Nothing Nothing Nothing
data MultiMatchQueryType =
MultiMatchBestFields
| MultiMatchMostFields
| MultiMatchCrossFields
| MultiMatchPhrase
| MultiMatchPhrasePrefix deriving (Eq, Show)
instance ToJSON MultiMatchQueryType where
toJSON MultiMatchBestFields = "best_fields"
toJSON MultiMatchMostFields = "most_fields"
toJSON MultiMatchCrossFields = "cross_fields"
toJSON MultiMatchPhrase = "phrase"
toJSON MultiMatchPhrasePrefix = "phrase_prefix"
instance FromJSON MultiMatchQueryType where
parseJSON = withText "MultiMatchPhrasePrefix" parse
where parse "best_fields" = pure MultiMatchBestFields
parse "most_fields" = pure MultiMatchMostFields
parse "cross_fields" = pure MultiMatchCrossFields
parse "phrase" = pure MultiMatchPhrase
parse "phrase_prefix" = pure MultiMatchPhrasePrefix
parse t = fail ("Unexpected MultiMatchPhrasePrefix: " <> show t)
data BoolQuery =
BoolQuery { boolQueryMustMatch :: [Query]
, boolQueryFilter :: [Filter]
, boolQueryMustNotMatch :: [Query]
, boolQueryShouldMatch :: [Query]
, boolQueryMinimumShouldMatch :: Maybe MinimumMatch
, boolQueryBoost :: Maybe Boost
, boolQueryDisableCoord :: Maybe DisableCoord
} deriving (Eq, Show)
instance ToJSON BoolQuery where
toJSON (BoolQuery mustM filterM' notM shouldM bqMin boost disableCoord) =
omitNulls base
where base = [ "must" .= mustM
, "filter" .= filterM'
, "must_not" .= notM
, "should" .= shouldM
, "minimum_should_match" .= bqMin
, "boost" .= boost
, "disable_coord" .= disableCoord ]
instance FromJSON BoolQuery where
parseJSON = withObject "BoolQuery" parse
where parse o = BoolQuery
<$> o .:? "must" .!= []
<*> o .:? "filter" .!= []
<*> o .:? "must_not" .!= []
<*> o .:? "should" .!= []
<*> o .:? "minimum_should_match"
<*> o .:? "boost"
<*> o .:? "disable_coord"
mkBoolQuery :: [Query] -> [Filter] -> [Query] -> [Query] -> BoolQuery
mkBoolQuery must filt mustNot should =
BoolQuery must filt mustNot should Nothing Nothing Nothing
data BoostingQuery =
BoostingQuery { positiveQuery :: Query
, negativeQuery :: Query
, negativeBoost :: Boost } deriving (Eq, Show)
instance ToJSON BoostingQuery where
toJSON (BoostingQuery bqPositiveQuery bqNegativeQuery bqNegativeBoost) =
object [ "positive" .= bqPositiveQuery
, "negative" .= bqNegativeQuery
, "negative_boost" .= bqNegativeBoost ]
instance FromJSON BoostingQuery where
parseJSON = withObject "BoostingQuery" parse
where parse o = BoostingQuery
<$> o .: "positive"
<*> o .: "negative"
<*> o .: "negative_boost"
data CommonTermsQuery =
CommonTermsQuery { commonField :: FieldName
, commonQuery :: QueryString
, commonCutoffFrequency :: CutoffFrequency
, commonLowFreqOperator :: BooleanOperator
, commonHighFreqOperator :: BooleanOperator
, commonMinimumShouldMatch :: Maybe CommonMinimumMatch
, commonBoost :: Maybe Boost
, commonAnalyzer :: Maybe Analyzer
, commonDisableCoord :: Maybe DisableCoord
} deriving (Eq, Show)
instance ToJSON CommonTermsQuery where
toJSON (CommonTermsQuery (FieldName fieldName)
(QueryString query) cf lfo hfo msm
boost analyzer disableCoord) =
object [fieldName .= omitNulls base ]
where base = [ "query" .= query
, "cutoff_frequency" .= cf
, "low_freq_operator" .= lfo
, "minimum_should_match" .= msm
, "boost" .= boost
, "analyzer" .= analyzer
, "disable_coord" .= disableCoord
, "high_freq_operator" .= hfo ]
instance FromJSON CommonTermsQuery where
parseJSON = withObject "CommonTermsQuery" parse
where parse = fieldTagged $ \fn o ->
CommonTermsQuery fn
<$> o .: "query"
<*> o .: "cutoff_frequency"
<*> o .: "low_freq_operator"
<*> o .: "high_freq_operator"
<*> o .:? "minimum_should_match"
<*> o .:? "boost"
<*> o .:? "analyzer"
<*> o .:? "disable_coord"
data CommonMinimumMatch =
CommonMinimumMatchHighLow MinimumMatchHighLow
| CommonMinimumMatch MinimumMatch
deriving (Eq, Show)
instance ToJSON CommonMinimumMatch where
toJSON (CommonMinimumMatch mm) = toJSON mm
toJSON (CommonMinimumMatchHighLow (MinimumMatchHighLow lowF highF)) =
object [ "low_freq" .= lowF
, "high_freq" .= highF ]
instance FromJSON CommonMinimumMatch where
parseJSON v = parseMinimum v
<|> parseMinimumHighLow v
where parseMinimum = fmap CommonMinimumMatch . parseJSON
parseMinimumHighLow = fmap CommonMinimumMatchHighLow . withObject "CommonMinimumMatchHighLow" (\o ->
MinimumMatchHighLow
<$> o .: "low_freq"
<*> o .: "high_freq")
data MinimumMatchHighLow =
MinimumMatchHighLow { lowFreq :: MinimumMatch
, highFreq :: MinimumMatch } deriving (Eq, Show)
data ZeroTermsQuery =
ZeroTermsNone
| ZeroTermsAll deriving (Eq, Show)
instance ToJSON ZeroTermsQuery where
toJSON ZeroTermsNone = String "none"
toJSON ZeroTermsAll = String "all"
instance FromJSON ZeroTermsQuery where
parseJSON = withText "ZeroTermsQuery" parse
where parse "none" = pure ZeroTermsNone
parse "all" = pure ZeroTermsAll
parse q = fail ("Unexpected ZeroTermsQuery: " <> show q)
data RangeExecution = RangeExecutionIndex
| RangeExecutionFielddata deriving (Eq, Show)
-- index for smaller ranges, fielddata for longer ranges
instance ToJSON RangeExecution where
toJSON RangeExecutionIndex = "index"
toJSON RangeExecutionFielddata = "fielddata"
instance FromJSON RangeExecution where
parseJSON = withText "RangeExecution" parse
where parse "index" = pure RangeExecutionIndex
parse "fielddata" = pure RangeExecutionFielddata
parse t = error ("Unrecognized RangeExecution " <> show t)
newtype Regexp = Regexp Text deriving (Eq, Show, FromJSON)
data RegexpFlags = AllRegexpFlags
| NoRegexpFlags
| SomeRegexpFlags (NonEmpty RegexpFlag) deriving (Eq, Show)
instance ToJSON RegexpFlags where
toJSON AllRegexpFlags = String "ALL"
toJSON NoRegexpFlags = String "NONE"
toJSON (SomeRegexpFlags (h :| fs)) = String $ T.intercalate "|" flagStrs
where flagStrs = map flagStr . nub $ h:fs
flagStr AnyString = "ANYSTRING"
flagStr Automaton = "AUTOMATON"
flagStr Complement = "COMPLEMENT"
flagStr Empty = "EMPTY"
flagStr Intersection = "INTERSECTION"
flagStr Interval = "INTERVAL"
instance FromJSON RegexpFlags where
parseJSON = withText "RegexpFlags" parse
where parse "ALL" = pure AllRegexpFlags
parse "NONE" = pure NoRegexpFlags
parse t = SomeRegexpFlags <$> parseNEJSON (String <$> T.splitOn "|" t)
data RegexpFlag = AnyString
| Automaton
| Complement
| Empty
| Intersection
| Interval deriving (Eq, Show)
instance FromJSON RegexpFlag where
parseJSON = withText "RegexpFlag" parse
where parse "ANYSTRING" = pure AnyString
parse "AUTOMATON" = pure Automaton
parse "COMPLEMENT" = pure Complement
parse "EMPTY" = pure Empty
parse "INTERSECTION" = pure Intersection
parse "INTERVAL" = pure Interval
parse f = fail ("Unknown RegexpFlag: " <> show f)
newtype LessThan = LessThan Double deriving (Eq, Show)
newtype LessThanEq = LessThanEq Double deriving (Eq, Show)
newtype GreaterThan = GreaterThan Double deriving (Eq, Show)
newtype GreaterThanEq = GreaterThanEq Double deriving (Eq, Show)
newtype LessThanD = LessThanD UTCTime deriving (Eq, Show)
newtype LessThanEqD = LessThanEqD UTCTime deriving (Eq, Show)
newtype GreaterThanD = GreaterThanD UTCTime deriving (Eq, Show)
newtype GreaterThanEqD = GreaterThanEqD UTCTime deriving (Eq, Show)
data RangeValue = RangeDateLte LessThanEqD
| RangeDateLt LessThanD
| RangeDateGte GreaterThanEqD
| RangeDateGt GreaterThanD
| RangeDateGtLt GreaterThanD LessThanD
| RangeDateGteLte GreaterThanEqD LessThanEqD
| RangeDateGteLt GreaterThanEqD LessThanD
| RangeDateGtLte GreaterThanD LessThanEqD
| RangeDoubleLte LessThanEq
| RangeDoubleLt LessThan
| RangeDoubleGte GreaterThanEq
| RangeDoubleGt GreaterThan
| RangeDoubleGtLt GreaterThan LessThan
| RangeDoubleGteLte GreaterThanEq LessThanEq
| RangeDoubleGteLt GreaterThanEq LessThan
| RangeDoubleGtLte GreaterThan LessThanEq
deriving (Eq, Show)
parseRangeValue :: ( FromJSON t4
, FromJSON t3
, FromJSON t2
, FromJSON t1
)
=> (t3 -> t5)
-> (t1 -> t6)
-> (t4 -> t7)
-> (t2 -> t8)
-> (t5 -> t6 -> b)
-> (t7 -> t6 -> b)
-> (t5 -> t8 -> b)
-> (t7 -> t8 -> b)
-> (t5 -> b)
-> (t6 -> b)
-> (t7 -> b)
-> (t8 -> b)
-> Parser b
-> Object
-> Parser b
parseRangeValue mkGt mkLt mkGte mkLte
fGtLt fGteLt fGtLte fGteLte
fGt fLt fGte fLte nada o = do
lt <- o .:? "lt"
lte <- o .:? "lte"
gt <- o .:? "gt"
gte <- o .:? "gte"
case (lt, lte, gt, gte) of
(Just a, _, Just b, _) ->
return (fGtLt (mkGt b) (mkLt a))
(Just a, _, _, Just b) ->
return (fGteLt (mkGte b) (mkLt a))
(_, Just a, Just b, _) ->
return (fGtLte (mkGt b) (mkLte a))
(_, Just a, _, Just b) ->
return (fGteLte (mkGte b) (mkLte a))
(_, _, Just a, _) ->
return (fGt (mkGt a))
(Just a, _, _, _) ->
return (fLt (mkLt a))
(_, _, _, Just a) ->
return (fGte (mkGte a))
(_, Just a, _, _) ->
return (fLte (mkLte a))
(Nothing, Nothing, Nothing, Nothing) ->
nada
instance FromJSON RangeValue where
parseJSON = withObject "RangeValue" parse
where parse o = parseDate o
<|> parseDouble o
parseDate o =
parseRangeValue
GreaterThanD LessThanD
GreaterThanEqD LessThanEqD
RangeDateGtLt RangeDateGteLt
RangeDateGtLte RangeDateGteLte
RangeDateGt RangeDateLt
RangeDateGte RangeDateLte
mzero o
parseDouble o =
parseRangeValue
GreaterThan LessThan
GreaterThanEq LessThanEq
RangeDoubleGtLt RangeDoubleGteLt
RangeDoubleGtLte RangeDoubleGteLte
RangeDoubleGt RangeDoubleLt
RangeDoubleGte RangeDoubleLte
mzero o
rangeValueToPair :: RangeValue -> [Pair]
rangeValueToPair rv = case rv of
RangeDateLte (LessThanEqD t) -> ["lte" .= t]
RangeDateGte (GreaterThanEqD t) -> ["gte" .= t]
RangeDateLt (LessThanD t) -> ["lt" .= t]
RangeDateGt (GreaterThanD t) -> ["gt" .= t]
RangeDateGteLte (GreaterThanEqD l) (LessThanEqD g) -> ["gte" .= l, "lte" .= g]
RangeDateGtLte (GreaterThanD l) (LessThanEqD g) -> ["gt" .= l, "lte" .= g]
RangeDateGteLt (GreaterThanEqD l) (LessThanD g) -> ["gte" .= l, "lt" .= g]
RangeDateGtLt (GreaterThanD l) (LessThanD g) -> ["gt" .= l, "lt" .= g]
RangeDoubleLte (LessThanEq t) -> ["lte" .= t]
RangeDoubleGte (GreaterThanEq t) -> ["gte" .= t]
RangeDoubleLt (LessThan t) -> ["lt" .= t]
RangeDoubleGt (GreaterThan t) -> ["gt" .= t]
RangeDoubleGteLte (GreaterThanEq l) (LessThanEq g) -> ["gte" .= l, "lte" .= g]
RangeDoubleGtLte (GreaterThan l) (LessThanEq g) -> ["gt" .= l, "lte" .= g]
RangeDoubleGteLt (GreaterThanEq l) (LessThan g) -> ["gte" .= l, "lt" .= g]
RangeDoubleGtLt (GreaterThan l) (LessThan g) -> ["gt" .= l, "lt" .= g]
data Term = Term { termField :: Text
, termValue :: Text } deriving (Eq, Show)
instance ToJSON Term where
toJSON (Term field value) = object ["term" .= object
[field .= value]]
instance FromJSON Term where
parseJSON = withObject "Term" parse
where parse o = do termObj <- o .: "term"
case HM.toList termObj of
[(fn, v)] -> Term fn <$> parseJSON v
_ -> fail "Expected object with 1 field-named key"
data BoolMatch = MustMatch Term Cache
| MustNotMatch Term Cache
| ShouldMatch [Term] Cache deriving (Eq, Show)
instance ToJSON BoolMatch where
toJSON (MustMatch term cache) = object ["must" .= term,
"_cache" .= cache]
toJSON (MustNotMatch term cache) = object ["must_not" .= term,
"_cache" .= cache]
toJSON (ShouldMatch terms cache) = object ["should" .= fmap toJSON terms,
"_cache" .= cache]
instance FromJSON BoolMatch where
parseJSON = withObject "BoolMatch" parse
where parse o = mustMatch `taggedWith` "must"
<|> mustNotMatch `taggedWith` "must_not"
<|> shouldMatch `taggedWith` "should"
where taggedWith parser k = parser =<< o .: k
mustMatch t = MustMatch t <$> o .:? "_cache" .!= defaultCache
mustNotMatch t = MustNotMatch t <$> o .:? "_cache" .!= defaultCache
shouldMatch t = ShouldMatch t <$> o .:? "_cache" .!= defaultCache
-- "memory" or "indexed"
data GeoFilterType = GeoFilterMemory
| GeoFilterIndexed deriving (Eq, Show)
instance ToJSON GeoFilterType where
toJSON GeoFilterMemory = String "memory"
toJSON GeoFilterIndexed = String "indexed"
instance FromJSON GeoFilterType where
parseJSON = withText "GeoFilterType" parse
where parse "memory" = pure GeoFilterMemory
parse "indexed" = pure GeoFilterIndexed
parse t = fail ("Unrecognized GeoFilterType: " <> show t)
data LatLon = LatLon { lat :: Double
, lon :: Double } deriving (Eq, Show)
instance ToJSON LatLon where
toJSON (LatLon lLat lLon) =
object ["lat" .= lLat
, "lon" .= lLon]
instance FromJSON LatLon where
parseJSON = withObject "LatLon" parse
where parse o = LatLon <$> o .: "lat"
<*> o .: "lon"
data GeoBoundingBox =
GeoBoundingBox { topLeft :: LatLon
, bottomRight :: LatLon } deriving (Eq, Show)
instance ToJSON GeoBoundingBox where
toJSON (GeoBoundingBox gbbTopLeft gbbBottomRight) =
object ["top_left" .= gbbTopLeft
, "bottom_right" .= gbbBottomRight]
instance FromJSON GeoBoundingBox where
parseJSON = withObject "GeoBoundingBox" parse
where parse o = GeoBoundingBox
<$> o .: "top_left"
<*> o .: "bottom_right"
data GeoBoundingBoxConstraint =
GeoBoundingBoxConstraint { geoBBField :: FieldName
, constraintBox :: GeoBoundingBox
, bbConstraintcache :: Cache
, geoType :: GeoFilterType
} deriving (Eq, Show)
instance ToJSON GeoBoundingBoxConstraint where
toJSON (GeoBoundingBoxConstraint
(FieldName gbbcGeoBBField) gbbcConstraintBox cache type') =
object [gbbcGeoBBField .= gbbcConstraintBox
, "_cache" .= cache
, "type" .= type']
instance FromJSON GeoBoundingBoxConstraint where
parseJSON = withObject "GeoBoundingBoxConstraint" parse
where parse o = case HM.toList (deleteSeveral ["type", "_cache"] o) of
[(fn, v)] -> GeoBoundingBoxConstraint (FieldName fn)
<$> parseJSON v
<*> o .:? "_cache" .!= defaultCache
<*> o .: "type"
_ -> fail "Could not find field name for GeoBoundingBoxConstraint"
data GeoPoint =
GeoPoint { geoField :: FieldName
, latLon :: LatLon} deriving (Eq, Show)
instance ToJSON GeoPoint where
toJSON (GeoPoint (FieldName geoPointField) geoPointLatLon) =
object [ geoPointField .= geoPointLatLon ]
data DistanceUnit = Miles
| Yards
| Feet
| Inches
| Kilometers
| Meters
| Centimeters
| Millimeters
| NauticalMiles deriving (Eq, Show)
instance ToJSON DistanceUnit where
toJSON Miles = String "mi"
toJSON Yards = String "yd"
toJSON Feet = String "ft"
toJSON Inches = String "in"
toJSON Kilometers = String "km"
toJSON Meters = String "m"
toJSON Centimeters = String "cm"
toJSON Millimeters = String "mm"
toJSON NauticalMiles = String "nmi"
instance FromJSON DistanceUnit where
parseJSON = withText "DistanceUnit" parse
where parse "mi" = pure Miles
parse "yd" = pure Yards
parse "ft" = pure Feet
parse "in" = pure Inches
parse "km" = pure Kilometers
parse "m" = pure Meters
parse "cm" = pure Centimeters
parse "mm" = pure Millimeters
parse "nmi" = pure NauticalMiles
parse u = fail ("Unrecognized DistanceUnit: " <> show u)
data DistanceType = Arc
| SloppyArc -- doesn't exist <1.0
| Plane deriving (Eq, Show)
instance ToJSON DistanceType where
toJSON Arc = String "arc"
toJSON SloppyArc = String "sloppy_arc"
toJSON Plane = String "plane"
instance FromJSON DistanceType where
parseJSON = withText "DistanceType" parse
where parse "arc" = pure Arc
parse "sloppy_arc" = pure SloppyArc
parse "plane" = pure Plane
parse t = fail ("Unrecognized DistanceType: " <> show t)
data OptimizeBbox = OptimizeGeoFilterType GeoFilterType
| NoOptimizeBbox deriving (Eq, Show)
instance ToJSON OptimizeBbox where
toJSON NoOptimizeBbox = String "none"
toJSON (OptimizeGeoFilterType gft) = toJSON gft
instance FromJSON OptimizeBbox where
parseJSON v = withText "NoOptimizeBbox" parseNoOptimize v
<|> parseOptimize v
where parseNoOptimize "none" = pure NoOptimizeBbox
parseNoOptimize _ = mzero
parseOptimize = fmap OptimizeGeoFilterType . parseJSON
data Distance =
Distance { coefficient :: Double
, unit :: DistanceUnit } deriving (Eq, Show)
instance ToJSON Distance where
toJSON (Distance dCoefficient dUnit) =
String boltedTogether where
coefText = showText dCoefficient
(String unitText) = toJSON dUnit
boltedTogether = mappend coefText unitText
instance FromJSON Distance where
parseJSON = withText "Distance" parse
where parse t = Distance <$> parseCoeff nT
<*> parseJSON (String unitT)
where (nT, unitT) = T.span validForNumber t
-- may be a better way to do this
validForNumber '-' = True
validForNumber '.' = True
validForNumber 'e' = True
validForNumber c = isNumber c
parseCoeff "" = fail "Empty string cannot be parsed as number"
parseCoeff s = return (read (T.unpack s))
data DistanceRange =
DistanceRange { distanceFrom :: Distance
, distanceTo :: Distance } deriving (Eq, Show)
type TemplateQueryKey = Text
type TemplateQueryValue = Text
newtype TemplateQueryKeyValuePairs =
TemplateQueryKeyValuePairs (HM.HashMap TemplateQueryKey TemplateQueryValue)
deriving (Eq, Show)
instance ToJSON TemplateQueryKeyValuePairs where
toJSON (TemplateQueryKeyValuePairs x) =
Object $ HM.map toJSON x
instance FromJSON TemplateQueryKeyValuePairs where
parseJSON (Object o) =
pure . TemplateQueryKeyValuePairs $ HM.mapMaybe getValue o
where getValue (String x) = Just x
getValue _ = Nothing
parseJSON _ =
fail "error parsing TemplateQueryKeyValuePairs"
{-| 'BooleanOperator' is the usual And/Or operators with an ES compatible
JSON encoding baked in. Used all over the place.
-}
data BooleanOperator = And | Or deriving (Eq, Show)
instance ToJSON BooleanOperator where
toJSON And = String "and"
toJSON Or = String "or"
instance FromJSON BooleanOperator where
parseJSON = withText "BooleanOperator" parse
where parse "and" = pure And
parse "or" = pure Or
parse o = fail ("Unexpected BooleanOperator: " <> show o)
{-| 'Cache' is for telling ES whether it should cache a 'Filter' not.
'Query's cannot be cached.
-}
type Cache = Bool -- caching on/off
defaultCache :: Cache
defaultCache = False
data FunctionScoreQuery =
FunctionScoreQuery { functionScoreQuery :: Maybe Query
, functionScoreBoost :: Maybe Boost
, functionScoreFunctions :: FunctionScoreFunctions
, functionScoreMaxBoost :: Maybe Boost
, functionScoreBoostMode :: Maybe BoostMode
, functionScoreMinScore :: Score
, functionScoreScoreMode :: Maybe ScoreMode
} deriving (Eq, Show)
instance ToJSON FunctionScoreQuery where
toJSON (FunctionScoreQuery query boost fns maxBoost boostMode minScore scoreMode) =
omitNulls base
where base = functionScoreFunctionsPair fns :
[ "query" .= query
, "boost" .= boost
, "max_boost" .= maxBoost
, "boost_mode" .= boostMode
, "min_score" .= minScore
, "score_mode" .= scoreMode ]
instance FromJSON FunctionScoreQuery where
parseJSON = withObject "FunctionScoreQuery" parse
where parse o = FunctionScoreQuery
<$> o .:? "query"
<*> o .:? "boost"
<*> (singleFunction o
<|> multipleFunctions `taggedWith` "functions")
<*> o .:? "max_boost"
<*> o .:? "boost_mode"
<*> o .:? "min_score"
<*> o .:? "score_mode"
where taggedWith parser k = parser =<< o .: k
singleFunction = fmap FunctionScoreSingle . parseFunctionScoreFunction
multipleFunctions = pure . FunctionScoreMultiple
data FunctionScoreFunctions =
FunctionScoreSingle FunctionScoreFunction
| FunctionScoreMultiple (NonEmpty ComponentFunctionScoreFunction) deriving (Eq, Show)
data ComponentFunctionScoreFunction =
ComponentFunctionScoreFunction { componentScoreFunctionFilter :: Maybe Filter
, componentScoreFunction :: FunctionScoreFunction
, componentScoreFunctionWeight :: Maybe Weight
} deriving (Eq, Show)
instance ToJSON ComponentFunctionScoreFunction where
toJSON (ComponentFunctionScoreFunction filter' fn weight) =
omitNulls base
where base = functionScoreFunctionPair fn :
[ "filter" .= filter'
, "weight" .= weight ]
instance FromJSON ComponentFunctionScoreFunction where
parseJSON = withObject "ComponentFunctionScoreFunction" parse
where parse o = ComponentFunctionScoreFunction
<$> o .:? "filter"
<*> parseFunctionScoreFunction o
<*> o .:? "weight"
functionScoreFunctionsPair :: FunctionScoreFunctions -> (Text, Value)
functionScoreFunctionsPair (FunctionScoreSingle fn)
= functionScoreFunctionPair fn
functionScoreFunctionsPair (FunctionScoreMultiple componentFns) =
("functions", toJSON componentFns)
fieldTagged :: (Monad m, MonadFail m)=> (FieldName -> Object -> m a) -> Object -> m a
fieldTagged f o = case HM.toList o of
[(k, Object o')] -> f (FieldName k) o'
_ -> fail "Expected object with 1 field-named key"
-- | Fuzziness value as a number or 'AUTO'.
-- See:
-- https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#fuzziness
data Fuzziness = Fuzziness Double | FuzzinessAuto
deriving (Eq, Show)
instance ToJSON Fuzziness where
toJSON (Fuzziness n) = toJSON n
toJSON FuzzinessAuto = String "AUTO"
instance FromJSON Fuzziness where
parseJSON (String "AUTO") = return FuzzinessAuto
parseJSON v = Fuzziness <$> parseJSON v
| bitemyapp/bloodhound | src/Database/Bloodhound/Internal/Query.hs | bsd-3-clause | 64,896 | 0 | 59 | 21,405 | 13,834 | 7,281 | 6,553 | 1,374 | 16 |
module Text.ParserCombinators.UU.TyErr
( module Text.ParserCombinators.UU.TyErr.Core
, module Text.ParserCombinators.UU.TyErr.Derived
) where
import Text.ParserCombinators.UU.TyErr.Core
import Text.ParserCombinators.UU.TyErr.Derived
| carlostome/uu-parsinglib-tyerr | src/Text/ParserCombinators/UU/TyErr.hs | bsd-3-clause | 241 | 0 | 5 | 22 | 44 | 33 | 11 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Network.SSH.Client.SimpleSSH
( -- * Data types
SimpleSSHError(..)
, SimpleSSH
, Session
, Result(..)
, ResultExit(..)
-- * Main functions
, runSimpleSSH
, withSessionPassword
, withSessionKey
, execCommand
, sendFile
-- * Lower-level functions
, openSession
, openSession'
, authenticateWithPassword
, authenticateWithKey
, closeSession
) where
import Control.Applicative
import Control.Monad.Error
import Data.Maybe (isJust)
import qualified Data.ByteString.Char8 as BS
import Foreign.C.String
import Foreign.Marshal.Alloc
import Foreign.Ptr
import Network.SSH.Client.SimpleSSH.Foreign
import Network.SSH.Client.SimpleSSH.Types
getValue :: CEither -> (Ptr () -> IO b) -> IO b
getValue eitherC builder = builder =<< getValueC eitherC
getError :: CEither -> IO SimpleSSHError
getError eitherC = readError <$> getErrorC eitherC
getOut :: CResult -> IO BS.ByteString
getOut ptr = BS.packCString =<< getOutC ptr
getErr :: CResult -> IO BS.ByteString
getErr ptr = BS.packCString =<< getErrC ptr
getExitCode :: CResult -> IO Integer
getExitCode ptr = toInteger <$> getExitCodeC ptr
getExitSignal :: CResult -> IO BS.ByteString
getExitSignal ptr = do
signalPtr <- getExitSignalC ptr
if signalPtr == nullPtr
then return ""
else BS.packCString signalPtr
readResult :: CResult -> IO Result
readResult resultC = Result
<$> getOut resultC
<*> getErr resultC
<*> readResultExit resultC
readResultExit :: CResult -> IO ResultExit
readResultExit resultC = do
exitCode <- getExitCode resultC
exitSignal <- getExitSignal resultC
return $ case (exitCode, exitSignal) of
(0, _) -> ExitSuccess
(_, "") -> ExitFailure exitCode
_ -> ExitSignal exitSignal
readCount :: CCount -> IO Integer
readCount countC = toInteger <$> getCountC countC
-- | Helper which lifts IO actions into 'SimpleSSH'. This is used all over the place.
liftIOEither :: IO (Either SimpleSSHError a) -> SimpleSSH a
liftIOEither ioAction = do
eRes <- liftIO ioAction
case eRes of
Left err -> throwError err
Right res -> return res
-- | Helper which interprets a result coming from C.
--
-- Functions in the C part return pointers to a structure mimicking 'Either'.
liftEitherCFree :: (CEither -> IO ()) -- ^ A custom function to free the CEither
-> (Ptr () -> IO a) -- ^ A function to transform the pointer contained in the C structure
-> IO CEither -- ^ An action returning the structure, typically a call to C
-> IO (Either SimpleSSHError a)
liftEitherCFree customFree builder action = do
eitherC <- action
checkLeft <- isLeftC eitherC
res <- if checkLeft == 0
then Right <$> getValue eitherC builder
else Left <$> getError eitherC
customFree eitherC
return res
-- | Version of 'liftEitherCFree' using the normal 'free'.
liftEitherC :: (Ptr () -> IO a) -> IO CEither -> IO (Either SimpleSSHError a)
liftEitherC = liftEitherCFree free
-- | Open a SSH session and maybe ignore the known hosts. The next step is to authenticate.
openSession' :: String -- ^ Hostname
-> Integer -- ^ Port
-> Maybe String -- ^ Path to the known hosts (e.g. ~/.ssh/known_hosts)
-> SimpleSSH Session
openSession' hostname port knownhostsPathM = liftIOEither $ do
hostnameC <- newCString hostname
knownhostsPathC <- case knownhostsPathM of
Nothing ->
return nullPtr
Just knownhostsPath ->
newCString knownhostsPath
let portC = fromInteger port
res <- liftEitherC (return . Session) $ openSessionC hostnameC portC knownhostsPathC
free hostnameC
when (isJust knownhostsPathM) $
free knownhostsPathC
return res
-- | Open a SSH session. The next step is to authenticate.
openSession :: String -- ^ Hostname
-> Integer -- ^ Port
-> String -- ^ Path to the known hosts (e.g. ~/.ssh/known_hosts)
-> SimpleSSH Session
openSession hostname port knownhostsPath = openSession' hostname port $ Just knownhostsPath
-- | Authenticate a session with a pair username / password.
authenticateWithPassword :: Session -- ^ Session to use
-> String -- ^ Username
-> String -- ^ Password
-> SimpleSSH Session
authenticateWithPassword session username password = liftIOEither $ do
usernameC <- newCString username
passwordC <- newCString password
res <- liftEitherC (return . Session) $ authenticatePasswordC session usernameC passwordC
free usernameC
free passwordC
return res
-- ^ Authenticate with a public key for a given username.
--
-- Leave the passphrase empty if not needed.
authenticateWithKey :: Session -- ^ Session to use
-> String -- ^ Username
-> FilePath -- ^ Path to the public key (e.g. ~/.ssh/id_rsa.pub)
-> FilePath -- ^ Path to the private key (e.g. ~/.ssh/id_rsa)
-> String -- ^ Passphrase
-> SimpleSSH Session
authenticateWithKey session username publicKeyPath privateKeyPath passphrase = liftIOEither $ do
(usernameC, publicKeyPathC, privateKeyPathC, passphraseC) <-
(,,,) <$> newCString username
<*> newCString publicKeyPath
<*> newCString privateKeyPath
<*> newCString passphrase
res <- liftEitherC (return . Session) $ authenticateKeyC session usernameC publicKeyPathC privateKeyPathC passphraseC
mapM_ free [usernameC, publicKeyPathC, privateKeyPathC, passphraseC]
return res
-- | Send a command to the server.
--
-- One should be authenticated before sending commands on a 'Session'.
execCommand :: Session -- ^ Session to use
-> String -- ^ Command
-> SimpleSSH Result
execCommand session command = do
liftIOEither $ do
commandC <- newCString command
res <- liftEitherCFree freeEitherResultC readResult $ execCommandC session commandC
free commandC
return res
-- | Send a file to the server and returns the number of bytes transferred.
--
-- One should be authenticated before sending files on a 'Session.
sendFile :: Session -- ^ Session to use
-> Integer -- ^ File mode (e.g. 0o777, note the octal notation)
-> String -- ^ Source path
-> String -- ^ Target path
-> SimpleSSH Integer
sendFile session mode source target = do
liftIOEither $ do
sourceC <- newCString source
targetC <- newCString target
let modeC = fromInteger mode
res <- liftEitherCFree freeEitherCountC readCount $ sendFileC session modeC sourceC targetC
free sourceC
free targetC
return res
-- | Close a session.
closeSession :: Session -> SimpleSSH ()
closeSession = lift . closeSessionC
-- | Open a connection, authenticate, execute some action and close the connection.
--
-- It is the safe way of using SimpleSSH. This function is to be used to authenticate with a pair username / password, otherwise see 'withSessionKey'.
withSessionPassword :: String -- ^ Hostname
-> Integer -- ^ Port
-> String -- ^ Path to known_hosts
-> String -- ^ Username
-> String -- ^ Password
-> (Session -> SimpleSSH a) -- ^ Monadic action on the session
-> SimpleSSH a
withSessionPassword hostname port knownhostsPath username password action = do
session <- openSession hostname port knownhostsPath
authenticatedSession <- authenticateWithPassword session username password
res <- action authenticatedSession
closeSession authenticatedSession
return res
-- | Open a connection, authenticate, execute some action and close the connection.
--
-- It is the safe way of using SimpleSSH. This function is to be used to authenticate with a key, otherwise see 'withSessionPassword'.
withSessionKey :: String -- ^ Hostname
-> Integer -- ^ port
-> String -- ^ Path to known_hosts
-> String -- ^ Username
-> String -- ^ Path to public key
-> String -- ^ Path to private key
-> String -- ^ Passphrase
-> (Session -> SimpleSSH a) -- ^ Monadic action on the session
-> SimpleSSH a
withSessionKey hostname port knownhostsPath username publicKeyPath privateKeyPath passphrase action = do
session <- openSession hostname port knownhostsPath
authenticatedSession <- authenticateWithKey session username publicKeyPath privateKeyPath passphrase
res <- action authenticatedSession
closeSession authenticatedSession
return res
| jprider63/simplessh-modified | src/Network/SSH/Client/SimpleSSH.hs | bsd-3-clause | 9,089 | 0 | 15 | 2,519 | 1,742 | 872 | 870 | 179 | 3 |
module ReadOnly.EqualitySpec (main, spec) where
import Test.Hspec
import Test.QuickCheck
import PolyGraph.Common
import PolyGraph.Buildable
import PolyGraph.Buildable.Properties
import PolyGraph.Common.PropertySupport
import qualified PolyGraph.ReadOnly.Graph.Properties as GProps
import qualified PolyGraph.ReadOnly.DiGraph.Properties as DiGProps
import PolyGraph.ReadOnly.DiGraph.DiGraphEquality ((~>#==))
import PolyGraph.ReadOnly.Graph.GraphEquality ((~#==))
import qualified Instances.ListGraphs as List
import qualified Instances.EdgeCountMapGraph as Adj
import qualified Instances.DiGraph.DiEdgesByVertexMap as Indx
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "di-edgeCount equality (~>#==)" $ do
it "is reflexive" $ property $
(\(mixedEsAndVsBag :: (MultiOBag Int)) ->
let graph :: Indx.DiEdgeListByVertexMap Int (OPair Int)
graph = buildGraph emptyGraph (getMix mixedEsAndVsBag)
in (graph ~>#== graph)
)
it "two graph types with the same forgetfulness created the same way are equal" $ property $
(\(mixedEsAndVsBag :: (MultiOBag Int)) ->
let graph1 :: Indx.DiEdgeListByVertexMap Int (OPair Int)
graph1 = buildGraph emptyGraph (getMix mixedEsAndVsBag)
graph2 :: Adj.EdgeCountMapDiGraph Int
graph2 = buildGraph emptyGraph (getMix mixedEsAndVsBag)
in (graph1 ~>#== graph2)
)
describe "edgeCount equality (~#==)" $ do
it "is reflexive" $ property $
(\(mixedEsAndVsBag :: MultiUOList Int) ->
let graph :: List.Edges Int (UOPair Int)
graph = buildGraph emptyGraph (getMix mixedEsAndVsBag)
in (graph ~#== graph)
)
it "two graph types with the same forgetfulness created the same way are equal" $ property $
(\(mixedEsAndVsBag :: MultiUOList Int) ->
let graph1 :: List.Edges Int (UOPair Int)
graph1 = buildGraph emptyGraph (getMix mixedEsAndVsBag)
graph2 :: Adj.EdgeCountMapGraph Int
graph2 = buildGraph emptyGraph (getMix mixedEsAndVsBag)
in (graph1 ~#== graph2)
)
| rpeszek/GraphPlay | test/ReadOnly/EqualitySpec.hs | bsd-3-clause | 2,331 | 0 | 19 | 675 | 541 | 293 | 248 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Run where
import Control.Lens
import Control.Monad.Logger
import Control.Monad.Reader
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import qualified Data.Text.Encoding.Error as Text
import Data.Text.Strict.Lens
import Database.Persist.Postgresql
import Network.HTTP.Types as HTTP
import qualified Network.Socket as Net
import qualified Network.Wai as Wai
import qualified Network.Wai.Handler.Warp as Warp
import qualified SignedAuth
import System.Environment
import System.Exit
import System.IO
import NejlaCommon ( withDBPool, getDBConnectInfo )
import Api
import Audit
import Config
import Monad
import Persist.Migration (doMigrate)
import Persist.Schema
import Types
import User
logMiddleware :: Wai.Middleware
logMiddleware app req respond = app req respond'
where
debug f = hPutStrLn stderr $ "[Info#auth-service]" ++ f
respond' res = do
unless ( Wai.requestMethod req == "GET"
&& Wai.pathInfo req == ["status"]
&& isLocal (Wai.remoteHost req)
) $ debug $ concat
[ " "
, fromBS (Wai.requestMethod req) , " "
, fromBS (Wai.rawPathInfo req) , " > "
, show . HTTP.statusCode $ Wai.responseStatus res
, " "
]
respond res
fromBS = Text.unpack . Text.decodeUtf8With Text.lenientDecode
isLocal Net.SockAddrUnix{} = True
isLocal (Net.SockAddrInet _port haddr) =
Net.hostAddressToTuple haddr == (127,0,0,1)
isLocal (Net.SockAddrInet6 _port _flow haddr6 _scope) =
haddr6 == (0, 0, 0, 1)
runMain :: IO ()
runMain = runStderrLoggingT . filterLogger (\_source level -> level >= LevelWarn)
$ do
confFile <- loadConf "auth_service"
conf <- getAuthServiceConfig confFile
mbLogging <- liftIO $ lookupEnv "log"
let logM = case mbLogging of
Just "true" -> logMiddleware
_ -> Prelude.id
conInfo <- getDBConnectInfo confFile
withDBPool conInfo 5 doMigrate $ \pool -> do
args <- liftIO getArgs
noncePool <- liftIO SignedAuth.newNoncePool
-- AppState for CLI invocations
let appState = ApiState { apiStateConfig = conf
, apiStateAuditSource =
AuditSourceCli { auditSourceCliArguments =
Text.pack <$> args
}
, apiStateNoncePool = noncePool
}
let run :: forall a m. MonadIO m => API a -> m a
run = liftIO . runAPI pool appState
case args of
("adduser": args') -> do
res <- run $ addUser (args' ^.. each . packed)
case res of
Nothing -> liftIO $ do
hPutStrLn stderr "Could not add user"
exitFailure
Just (UserID uid) -> liftIO $ print uid
("chpass": args') -> run $ changePassword args'
("addrole" : args') -> run $ addRole args'
("rmrole" : args') -> run $ removeRole args'
("newinstance": args') -> run $ addInstance' args'
("addinstance": args') -> run $ userAddInstance args'
("removeinstance": args') -> run $ userRemoveInstance args'
("deactivateuser": args') -> run $ userDeactivate' args'
("reactivateuser": args') -> run $ userReactivate args'
["run"] -> do
secrets <- getSecrets confFile
liftIO $ Warp.run 80 (logM $ serveAPI pool noncePool conf secrets)
_ -> liftIO $ do
hPutStrLn stderr
"Usage: auth-service [run|adduser|chpass|addrole|rmrole|newinstance|addinstance|removeinstance|\
\deactivateuser|eeactivateuser] [options]"
exitFailure
-- Compares the schema to the SQL server and prints out necessary changes. For
-- development.
checkMigration :: IO ()
checkMigration = runStderrLoggingT $ do
withPostgresqlConn "host=localhost user=postgres" $ \(conn :: SqlBackend) -> do
runReaderT (printMigration migrateAll) conn
| nejla/auth-service | service/src/Run.hs | bsd-3-clause | 4,508 | 0 | 23 | 1,529 | 1,074 | 562 | 512 | -1 | -1 |
{-# LANGUAGE TypeSynonymInstances,
FlexibleInstances,
FlexibleContexts #-}
module Obsidian.MonadObsidian.Tools where
import Control.Monad.State
import Obsidian.MonadObsidian.Exp
import Obsidian.MonadObsidian.IC
import Obsidian.MonadObsidian.Types
import Obsidian.MonadObsidian.GPUMonad
import Obsidian.MonadObsidian.Arr
import Obsidian.MonadObsidian.PureAPI
import qualified Data.Map as Map
type Dim = Int
--------------------------------------------------------------------------------
data SourceState = SourceState {st :: SymbolTable, count :: Int}
type Src a = State SourceState a
updateSymT :: Name -> Type -> Int -> Src (Name,Int)
updateSymT name t len = do
s <- get
let c = count s
name' = name ++ show c
st' = Map.insert name' (t,len) (st s)
put (SourceState {st = st', count = (c + 1)} )
return (name',len)
--------------------------------------------------------------------------------
class Sources' a where
mkSrc :: Name -> Dim -> Src a
instance Sources' (GArr IntE) where
mkSrc name x = do
(name',len') <- updateSymT name (Global_Array Int) x
return $ mkArray (\ix -> index name' ix) (len')
instance Sources' (GArr FloatE) where
mkSrc name x = do
(name',len') <- updateSymT name (Global_Array Float) x
return $ mkArray (\ix -> index name' ix) (len')
instance (Sources' (GArr a), Sources' (GArr b)) =>
Sources' (GArr (a,b)) where
mkSrc name x = do
a1 <- mkSrc name x
a2 <- mkSrc name x
return $ (fst . runGPU . (pure zipp)) (a1,a2)
--------------------------------------------------------------------------------
class Sources a where
mkT :: Name -> Dim -> SymbolTable -> (SymbolTable, a)
instance Sources (GArr IntE) where
mkT name x st = (
Map.insert name (Global_Array Int,x) st,
mkArray (\ix -> index name ix) (x)
)
instance Sources (GArr FloatE) where
mkT name x st = (
Map.insert name (Global_Array Float,x) st,
mkArray (\ix -> index name ix) (x)
)
instance Sources (GArr BoolE) where
mkT name x st = (
Map.insert name (Global_Array Bool,x) st,
mkArray (\ix -> index name ix) (x)
)
instance (Sources (GArr a), Sources (GArr b)) => Sources (GArr (a,b)) where
mkT name x st = (st'',
fst (runGPU ((pure zipp) (arr1,arr2)))
)
where
(st' ,arr1) = mkT (name ++ "1") x st
(st'',arr2) = mkT (name ++ "2") x st'
instance (Sources (GArr a), Sources (GArr b)) => Sources (GArr a,GArr b) where
mkT name x st = (
st'',
(arr1,arr2)
)
where
(st',arr1) = mkT (name ++ "1") (x) st
(st'',arr2) = mkT (name ++ "2") (x) st'
--------------------------------------------------------------------------------
{-
getState :: Sources a => (a -> W b) -> S
getState a = runWs (a t) st
where
(st,t) = mkT "source" "n" empty
getIC :: Sources a => (a -> W b) -> IC
getIC a = snd (runW (a t)) (variable "tid")
where
(st,t) = mkT "source" "n" empty
getResult :: Sources a => (a -> W b) -> b
getResult a = fst (runW (a t))
where
(st,t) = mkT "source" "n" empty
-}
initState = SourceState {st = Map.empty, count = 0}
getState :: Sources' a => (a -> GPU b) -> Int -> S
getState a n = runGPUs (a t) st'
where
(t,srct) = runState (mkSrc "source" n) initState
st' = st srct
getIC :: Sources' a => (a -> GPU b) -> Int -> IC
getIC a n = snd (runGPU (a t)) -- (variable "tid")
where
t = evalState (mkSrc "source" n) initState
getResult :: Sources' a => (a -> GPU b) -> Int -> b
getResult a n = fst (runGPU (a t))
where
t = evalState (mkSrc "source" n) initState
--------------------------------------------------------------------------------
class Names a where
names :: a -> [Name]
instance Names (GArr (Exp a)) where
names arr = [name $ unE (arr ! (variable "tid"))]
where name (Index x _) = x
name _ = error "In Names (Garr (Exp a))"
instance Names (GArr (Exp a,Exp b)) where
names arr = [name $ unE $ fst (arr ! (variable "tid"))] ++
[name $ unE $ snd (arr ! (variable "tid"))]
where name (Index x _) = x
name _ = error "In Names (Garr (Exp a, Exp b))"
{-
getExp :: ObsType a => (a -> W b) -> b
getExp a = (fst (runW (a array))) ! ix
where
array = mkT "source"
ix = variable "tid" Int
-}
| svenssonjoel/MonadObsidian | Obsidian/MonadObsidian/Tools.hs | bsd-3-clause | 4,757 | 0 | 13 | 1,460 | 1,538 | 809 | 729 | 86 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE KindSignatures #-}
module System.DirWatch.PreProcessor (
PreProcessor
, PreProcessorT
, runPreProcessor
, getTime
, getLbs
, yieldSource
, yieldConduit
, yieldFilePath
, yieldFileName
, (=>=)
) where
import Control.Applicative (Applicative)
import Control.Monad (forM_)
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Resource (MonadResource)
import Control.Monad.Reader (asks, ReaderT, runReaderT)
import Control.Monad.Writer (MonadWriter(tell), WriterT, execWriterT)
import Data.Typeable (Typeable)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as LBS
import Data.Time (UTCTime)
import Data.Conduit (Conduit, Source, (=$=), ($$))
import qualified Data.Conduit.Binary as CB
import System.DirWatch.Types (HasCurrentTime(..))
import System.FilePath.Posix (takeFileName)
type PreProcessor m = FilePath -> PreProcessorT m ()
type PreProcessorOutput m = [(FilePath, Source m ByteString)]
data PreProcessorEnv m
= PreProcessorEnv {
ppeTime :: UTCTime
, ppeSource :: Source m ByteString
} deriving Typeable
newtype PreProcessorT (m :: * -> *) a
= PreProcessorT {
unPreProcessorT :: WriterT (PreProcessorOutput m) (
ReaderT (PreProcessorEnv m) m
) a
} deriving (Functor, Applicative, Monad, Typeable)
runPreProcessor
:: Monad m
=> Source m ByteString -> UTCTime -> PreProcessorT m ()
-> m (PreProcessorOutput m)
runPreProcessor source time
= flip runReaderT env . execWriterT . unPreProcessorT
where env = PreProcessorEnv {ppeTime=time, ppeSource=source}
instance Monad m => HasCurrentTime (PreProcessorT m) where
getTime = PreProcessorT (asks ppeTime)
getSource :: Monad m => PreProcessorT m (Source m ByteString)
getSource = PreProcessorT (asks ppeSource)
getLbs :: MonadResource m => PreProcessorT m LBS.ByteString
getLbs = PreProcessorT (asks ppeSource) >>= liftProcessor . ($$ CB.sinkLbs)
yieldSource :: Monad m => FilePath -> Source m ByteString -> PreProcessorT m ()
yieldSource filepath source = PreProcessorT $ tell [(filepath, source)]
yieldFilePath :: Monad m => FilePath -> PreProcessorT m ()
yieldFilePath filepath = getSource >>= yieldSource filepath
yieldFileName :: Monad m => PreProcessor m
yieldFileName = yieldFilePath . takeFileName
yieldConduit
:: Monad m
=> FilePath -> Conduit ByteString m ByteString -> PreProcessorT m ()
yieldConduit filepath conduit
= getSource >>= yieldSource filepath . (=$= conduit)
infixr 0 =>=
(=>=) :: Monad m => PreProcessor m -> PreProcessor m -> PreProcessor m
ppA =>= ppB = \filepath -> do
time <- getTime
src <- getSource
pairsA <- liftProcessor $ runPreProcessor src time (ppA filepath)
forM_ pairsA $ \(filepathA, srcA) -> do
pairsB <- liftProcessor $ runPreProcessor srcA time (ppB filepathA)
mapM_ (uncurry yieldSource) pairsB
{-# INLINE (=>=) #-}
liftProcessor :: Monad m => m a -> PreProcessorT m a
liftProcessor = PreProcessorT . lift . lift
| meteogrid/dropdirwatch | src/System/DirWatch/PreProcessor.hs | bsd-3-clause | 3,045 | 0 | 16 | 507 | 938 | 512 | 426 | 77 | 1 |
{-# LANGUAGE TypeFamilies, OverloadedStrings #-}
module Main(main) where
import Theory
import Dirs
import SolutionMap
import TaskGroup
import SiteState
import StorageBackend
import Control.Monad
import Control.Exception
import Data.List
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Aeson as JS
import Data.Aeson ((.=))
import qualified Data.ByteString.Lazy.Char8 as BS
import System.Directory
import System.FilePath
import qualified Data.Map as Map
import Data.Map ( Map )
main :: IO ()
main =
do s <- newSiteState (disableWrite localStorage)
(disableWrite localSharedStorage)
cats <- loadCats
fs <- listFuns s FilterAll
infos <- mapM (getFunInfo s cats) fs
BS.putStrLn $ BS.append "var data = "
$ JS.encode [ summarize i | Right i <- infos ]
summarize :: (Name, [TaskInfo]) -> JS.Value
summarize (nm,tasks) =
JS.object
[ "name" .= nm
, "percent" .= percent
, "autoSolve" .= autoLen
, "otherSolve" .= otherLen
, "todo" .= totLen
, "solutions" .= map jsTask solved
]
where
(unsolved,solved) = partition (null . taskSlns) tasks
(auto,nonAuto) = partition taskAuto solved
autoLen = length auto
otherLen = length nonAuto
doneLen = autoLen + otherLen
totLen = doneLen + length unsolved
percent = if totLen == 0 then 100 else div (100 * doneLen) totLen
jsTask t = JS.object [ "tags" .= taskTags t
, "slns" .= map (show . ppE) (taskSlns t)
, "category" .= taskCat t
, "auto" .= taskAuto t
, "label" .= taskLabel t
]
getFunInfo :: (Readable (Local s), Readable (Shared s)) =>
SiteState s -> Map (FunName,TaskGroup,Text) String ->
FunName -> IO (Either SomeException (Name, [TaskInfo]))
-- real name, and a list of solutions for each task
getFunInfo s cats f =
try $
do tgd <- loadTaskGroupData s f
name <- getRealName s f
let safetyTaskNames = taskNames f SafetyLevels tgd
es <- forM safetyTaskNames $ \tn ->
do slnNames <- listSolutions s tn
tags <- listTags s tn
let auto = any (`elem` [ "copy-up"
, "copy-multi"
, "only-freebies-simple"
, "only-freebies-advanced"
]) tags
es <- forM slnNames $ \sln ->
do (_,_,e) <- loadSolutionPre s tn sln
return e
return TaskInfo { taskTags = tags
, taskSlns = es
, taskCat = Map.lookup (taskFun tn
,taskGroup tn
,taskName tn) cats
, taskAuto = auto
, taskLabel = Text.unpack (showTaskGroup
(taskGroup tn)) ++ " / " ++
Text.unpack (taskName tn)
}
return (name,es)
data TaskInfo = TaskInfo
{ taskTags :: [String]
, taskSlns :: [Expr]
, taskCat :: Maybe String
, taskAuto :: Bool
, taskLabel :: String
}
loadCats :: IO (Map (FunName,TaskGroup,Text) String)
loadCats =
fmap (Map.fromList . concat . concat . concat) $
do let top = "queues" </> "solved"
cats <- list top
forM cats $ \cat ->
do let catPath = top </> cat
funs <- list catPath
forM funs $ \fun ->
do let funPath = catPath </> fun
tgs <- list funPath
forM tgs $ \tgName ->
do tg <- case parseTaskGroup (Text.pack tgName) of
Just tg -> return tg
Nothing -> fail $ "Failed to parse task group: "
++ show tgName
let tgPath = funPath </> tgName
tasks <- list tgPath
forM tasks $ \t ->
return ( (rawFunName fun, tg, Text.pack t), cat )
where
list d = do fs <- getDirectoryContents d
return (filter ((/= ".") . take 1) fs)
| GaloisInc/verification-game | web-prover/exes/MakeReport.hs | bsd-3-clause | 4,562 | 0 | 26 | 1,953 | 1,261 | 654 | 607 | -1 | -1 |
{- SVM main implementation area -}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RankNTypes #-}
module Numeric.Algorithms.HSVO.SVM where
import Numeric.Algorithms.HSVO.Detail
import Numeric.Algorithms.HSVO.Types
import Control.Lens
import Data.Maybe (catMaybes)
import qualified Data.Vector as V
import Control.Monad.Reader --(MonadReader, Reader, ReaderT)
import Control.Monad.State --(MonadState, State, StateT)
import Control.Monad.Writer
import Control.Monad.Random
import System.Random
import System.Random.Shuffle
type SVMProblem a = WriterT String (ReaderT SVMParameters (StateT WorkingState (Rand StdGen))) a
runSVMProblem :: SVMProblem a -> SVMParameters -> WorkingState -> (((a, String), WorkingState), StdGen)
runSVMProblem prob params state = runRand (runStateT ( runReaderT (runWriterT prob ) params) state) (mkStdGen 10)
type RawFeatures = [Double]
makeParams :: Maybe SVMParameters -> SVMParameters
makeParams (Just p) = p
makeParams Nothing = testParams
{-| HighLevel function to fit an SVM, takes in the training data and labels -}
fitSVM' :: [RawFeatures] -> [ClassLabel] -> Maybe SVMParameters -> Maybe SVMParameters
fitSVM' trainingData trainingLabels initParams = do
supVectors <- makeSupportVectors trainingData trainingLabels
fitSVM initParams supVectors
{-| Given a list of support vectors, solve to find the parameters -}
fitSVM :: Maybe SVMParameters -> [TrainingSupportVector] -> Maybe SVMParameters
fitSVM initParams tsvList = do
maxRounds <- case initParams of
Nothing -> pure 100
Just params -> pure $ params ^. maxIters
newParams <- pure $ (extractParams . mainLoop) maxRounds
runSVMProblem newParams (makeParams initParams) (WorkingState tsvList) ^. _1 ._1 . _1
{-| Extract the SVMParameters out of the transformer -}
extractParams :: SVMProblem Bool -> SVMProblem (Maybe SVMParameters)
extractParams m = do
test <- m
workState <- get
params <- ask
supVecs <- pure $ (filterSupVectors params) $ focusOnSupportVector workState
if not test
then pure Nothing
else pure $ Just params
{-| Remove any support vectors whose weight is zero from the list -}
filterSupVectors :: SVMParameters -> [SupportVector] -> [SupportVector]
filterSupVectors params = filter (filterVec params)
{-| Determine if a supVector should be filtered out -}
filterVec :: SVMParameters -> SupportVector -> Bool
filterVec params sv = if abs (sv ^. alpha) >= params ^. epsillon then True else False
{- Wrapper on filterVec to take training support vectors -}
filterTSV :: SVMParameters -> TrainingSupportVector -> Bool
filterTSV params tsv = filterVec params $ tsv ^. supvec
-- To create a law-abiding filter, lets apply the filter, remember the indecies, and then use the indecies to construct the lens. This way we filter what we want, but don't break any laws... although we suffer a penalty... an alternative would be to make this better by changing it
{-| A function that takes a working state and produces a filtered lens.
-}
--makeFilter :: SVMParameters -> WorkingState -> Optical' [TrainingSupportVector] (Indexed Int) f a a
--IndexedFold Int WorkingState [TrainingSupportVector]
helpLens
:: (Indexable Int p, Applicative f) =>
p SupportVector (f SupportVector) -> WorkingState -> f WorkingState
helpLens = vectorList . traversed <. supvec
{-makeFilter :: (Indexable Int p, Applicative f) =>
SVMParameters
-> WorkingState
-> p SupportVector (f SupportVector)
-> WorkingState
-> f WorkingState -}
makeFilter :: SVMParameters -> WorkingState -> Traversal' WorkingState TrainingSupportVector
makeFilter params ws =
let
indexedSupVec = vectorList . itraversed -- :: (Indexable Int p, Applicative f) => p SupportVector (f SupportVector) -> WorkingState -> f WorkingState
targetIndicies = ifoldMapOf indexedSupVec
(\i a -> if filterTSV params a then [i] else []) ws
filt = ifiltered (\i _ -> i `elem` targetIndicies)
in
(vectorList . traversed ) . filt
{-| The actual main loop. Implemented as a recursive function. -}
mainLoop :: Int -> SVMProblem Bool
mainLoop remainingIters
| remainingIters == 0 = return False
| otherwise = do
converged <- takeStep
if converged then return True else mainLoop (remainingIters - 1 )
{-| Evaluate the SVM's current error -}
evalSVM :: SVMProblem Double
evalSVM = do
ws <- get
params <- ask
let
svList = ws ^.. vectorList . traversed. supvec
samples = ws ^.. vectorList . traversed . supvec . vector
trueLabels = ws ^.. vectorList . traversed . trueLabel
f = svm params svList
predictedLabels = map f samples
truePredPairs = zip trueLabels predictedLabels
modelError = foldl (\b (t, p) -> b + calcClassError t p ) 0.0 truePredPairs
pure modelError
{-| take a single step of the algorithm -}
takeStep :: SVMProblem Bool
takeStep = do
--workState <- get
params <- ask
shuffleVectors
ws <- get
trainError <- evalSVM
(newVector, intResult) <- pure $ solvePairs params $ filter (filterTSV params) $ ws ^. vectorList
vectorList .= newVector
result <-if intResult
then pure intResult
else do
(newPairsComplete, completeResult) <- pure $ solvePairs params $ ws ^. vectorList
vectorList .= newVector
pure completeResult
newTrainError <- evalSVM
-- Consider converged if our the difference in our error is too small.
if abs (newTrainError - trainError) <= params ^. epsillon then pure False else pure result
{-| Shuffle vectors -}
shuffleVectors :: SVMProblem ()
shuffleVectors = do
ws <- get :: SVMProblem WorkingState
let
l = ws ^.. vectorList . traversed :: [TrainingSupportVector]
shuffled <- (shuffleM l) :: SVMProblem [TrainingSupportVector]
vectorList .= shuffled
{-| Walk the list and then attempt to improve the SVM. Don't forget to shuffle the list! -}
solvePairs :: SVMParameters -> [TrainingSupportVector] -> ([TrainingSupportVector], Bool)
solvePairs params (x:xs) = let
(tsv, _, success) = foldl (pairHelper params (x:xs) ) ([], Just x, False) xs
in
(tsv, success)
solvePairs _ [] = ([], False)
{-| Helper function for solvePairs, it will run over the pairs and determine appropriate output,
keeps a boolean to know if any of the pairs were successfully changed. -}
pairHelper :: SVMParameters
-> [TrainingSupportVector]
-> ([TrainingSupportVector], Maybe TrainingSupportVector, Bool)
-> TrainingSupportVector
-> ([TrainingSupportVector], Maybe TrainingSupportVector, Bool)
pairHelper _ _ (examined, Nothing, success) next =
(examined, Just next, success)
pairHelper params allVecs (examined, Just target, success) next =
case takeStepDetail params (WorkingState allVecs) target next of
Nothing -> ((next:examined), Just target, success)
Just (r1, r2) -> (r1: r2:examined, Nothing, True)
-- Note to self, we could use the lens "zoom" to implement the reduced set heuristic
-- from the paper....
-- Also, the order of the training examples is arbitrary... I just want to choose two
-- and work with them, then choose a different two untill the list is exhausted...
{-| Construct a list of supportVector objects from the raw data -}
{-| Note this initialised the weights all to 1. Could consider using a random number gen. -}
makeSupportVectors :: [RawFeatures] -> [ClassLabel] -> Maybe [TrainingSupportVector]
makeSupportVectors feat classes =
let
defTSV = constructTSV 1 (PredictClass1 1) 1 :: Sample -> ClassLabel -> TrainingSupportVector
inputs = zip (map (\x -> Sample x) feat) classes :: [(Sample, ClassLabel)]
tsvList = map (\(f, c) -> defTSV f c) inputs :: [TrainingSupportVector]
in
if (length feat) == (length classes) then
Just tsvList
else
Nothing
constructTSV :: Value
-> PredictedLabel
-> BaseScalar
-> Sample
-> ClassLabel
-> TrainingSupportVector
constructTSV defValue pLabel alpha x label =
TrainingSV {_trueLabel=label, _predLabel=pLabel,
_classError=defValue,
_supvec = SupportVector {_alpha=alpha, _vector=x}}
{-| HighLevel function to predict data given an SVM -}
predictSVM :: SVMParameters -> [RawFeatures] -> [PredictedLabel]
predictSVM params testData =
let
svs = params ^. supportVectors
svm' = svm params svs
in
map svm' (map (\x -> x ^. vector) svs)
-- Check to ensure that the Current vectors are not in the
inSet :: WorkingState -> Double -> TrainingSupportVector -> Bool
inSet ws eps tsv =
let
vList = ws ^.. vectorList . traversed . supvec . vector
samp = tsv ^. supvec . vector
diff = map (vDiff samp) vList
filt = filter (\a -> a < eps) diff
in
length filt < 1
vDiff :: Sample -> Sample -> Double
vDiff (Sample x1) (Sample x2) =
let
diff = map (\(a, b)-> abs (a-b)) $ zip x1 x2
sqr = map (\a -> a*a) diff
in
foldr (\a b -> a+b) 0 sqr
maybeInSet :: Double -> WorkingState -> TrainingSupportVector -> Maybe ()
maybeInSet eps ws sv | (inSet ws eps sv) == True = Just ()
maybeInSet _ _ _ = Nothing
maybePass :: Bool -> Maybe ()
maybePass True = Just ()
maybePass False = Nothing
testMaybe :: Bool -> Maybe ()
testMaybe True = Just ()
testMaybe False = Nothing
takeStepDetail :: SVMParameters
-> WorkingState
-> TrainingSupportVector
-> TrainingSupportVector
-> Maybe (TrainingSupportVector, TrainingSupportVector)
takeStepDetail params workState sv1 sv2 = --Just (sv1, sv2)
let
x1 = sv1^.supvec.vector
x2 = sv2^.supvec.vector
diff = sumVector (elementDifference x1 x2)
identical = abs (diff) < params^.epsillon
eps = params ^. epsillon
in
do
maybeInSet eps workState sv1
maybeInSet eps workState sv2
maybePass identical
(a2, a2clip) <- determineAlpha2 params sv1 sv2
a1 <- pure $ alpha1New sv1 sv2 (wrapScalar a2) (wrapScalar a2clip)
sv1' <- pure $ SupportVector {_alpha= a1, _vector=sv1^.supvec.vector}
sv2' <- pure $ SupportVector {_alpha=wrapScalar (a2), _vector=sv2^.supvec.vector}
-- Since workState doesn't contain sv1 and sv2 we can append them to a temp list and use it here
oldSvec <- pure $ focusOnSupportVector workState
newSvec <- pure $ concat [[sv1', sv2'], oldSvec]
pred1 <- pure $ svm params newSvec x1
pred2 <- pure $ svm params newSvec x2
finalSv1 <- pure $ constructTrainingVec sv1 pred1 sv1'
finalSv2 <- pure $ constructTrainingVec sv2 pred2 sv2'
return (finalSv1, finalSv2)-- modify existing training vector
| johnny555/HSVO | src/Numeric/Algorithms/HSVO/SVM.hs | bsd-3-clause | 10,870 | 1 | 15 | 2,398 | 2,771 | 1,444 | 1,327 | -1 | -1 |
module Procfile.Types where
-- | Procfile, that consists of a list of `App`s.
type Procfile = [App]
-- | An environmental variable.
type Env = (String, String)
-- | An app in `Procfile`.
data App = App { name :: String
, cmd :: String
} deriving (Eq,Ord,Show)
| fujimura/houseman | src/Procfile/Types.hs | bsd-3-clause | 294 | 0 | 8 | 82 | 66 | 42 | 24 | 6 | 0 |
module Compile (compile) where
import Control.Monad.State hiding (State)
import Data.List
import Language.C
import Language.C.Data.Ident
import Error
import Model hiding (CInteger)
import qualified Model as M
import Utils
-- | Compiles a program to a model for analysis.
compile :: CTranslUnit -> IO Model
compile unit = do
m <- execStateT (evalStat initEnv $ rewrite unit) initMDB
return (model m) { initActions = reverse $ initActions (model m), loopActions = reverse $ loopActions (model m) }
none :: NodeInfo
none = internalNode
-- | Rewrites a program to a single statement.
rewrite :: CTranslUnit -> CStat
rewrite (CTranslUnit items _) = CCompound [] (map f items ++ [CBlockStmt callMain]) none
where
f (CDeclExt a) = CBlockDecl a
f (CFDefExt a) = CNestedFunDef a
f a@(CAsmExt _) = notSupported a "inline assembly"
callMain :: CStat
callMain = CExpr (Just $ CCall (CVar (Ident "main" 0 none) none) [] none) none
type M = StateT MDB IO
data MDB = MDB
{ nextId' :: Int
, stage :: Stage
, stack :: [Ident]
, model :: Model
}
data Stage = Init | Loop | Done deriving Eq
initMDB :: MDB
initMDB = MDB
{ nextId' = 0
, stage = Init
, stack = []
, model = Model { initActions = [], loopActions = [] }
}
nextId :: M Int
nextId = do
m <- get
put m { nextId' = nextId' m + 1 }
return $ nextId' m
setStage :: Stage -> M ()
setStage a = do
m <- get
put m { stage = a }
getStage :: M Stage
getStage = do
m <- get
return $ stage m
-- | Environment for resolving identifiers.
type Env = Ident -> Thing
data Thing
= Type Type
| Variable V
| Function Env CFunDef
| Primitive Primitive
data Primitive
= PrimAssert
| PrimAssume
-- | Creates a branch.
branch :: Position -> V -> M () -> M () -> M ()
branch p a onTrue onFalse = do --XXX What happens if infinite loop is called within branch?
m1 <- get
case stage m1 of
Init -> put m1 { model = (model m1) { initActions = [] } }
Loop -> put m1 { model = (model m1) { loopActions = [] } }
Done -> unexpected' p "statements after infinite loop"
onTrue
m2 <- get
case stage m2 of
Init | stage m1 == Init -> put m1 { model = (model m2) { initActions = [] } }
Loop | stage m1 == Loop -> put m1 { model = (model m2) { loopActions = [] } }
_ -> unexpected' p "infinite loop in branch"
onFalse
m3 <- get
case stage m3 of
Init | stage m1 == Init && stage m2 == Init -> do
put m3 { model = (model m3) { initActions = initActions $ model m1 } }
newAction $ Branch a (reverse $ initActions $ model m2) (reverse $ initActions $ model m3) p
Loop | stage m1 == Loop && stage m2 == Loop -> do
put m3 { model = (model m3) { loopActions = loopActions $ model m1 } }
newAction $ Branch a (reverse $ loopActions $ model m2) (reverse $ loopActions $ model m3) p
_ -> unexpected' p "infinite loop in branch"
-- | Push an identifier onto the call stack, do something, then pop it off.
callStack :: Ident -> M a -> M a
callStack id a = do
m <- get
put m { stack = id : stack m }
a <- a
m <- get
put m { stack = tail $ stack m }
return a
callPath :: M String
callPath = do
m <- get
let s = stack m
return $ intercalate "." [ n | Ident n _ _ <- reverse $ tail s ]
-- | The initial environment defines the assert and assume functions.
initEnv :: Env
initEnv a@(Ident name _ _) = case name of
"assert" -> Primitive PrimAssert
"assume" -> Primitive PrimAssume
_ -> unexpected' a "name not found"
-- | Adds new action.
newAction :: Action -> M ()
newAction a = do
m <- get
case stage m of
Init -> put m { model = (model m) { initActions = a : initActions (model m) }}
Loop -> put m { model = (model m) { loopActions = a : loopActions (model m) }}
Done -> error "Compile.newAction"
-- | Adds a new 'Thing' to an environment.
addThing :: String -> Thing -> Env -> Env
addThing name thing env i@(Ident name' _ _)
| name == name' = thing
| otherwise = env i
-- | Adds new variable.
addVar :: V -> Env -> Env
addVar v env = addThing name (Variable v) env
where
name = case v of
State (VS a _ _ _) -> a
Volatile a _ _ -> a
Local a _ _ _ -> a
_ -> error "Compile.addVar: should not call addVar with Tmp or Branch"
evalStat :: Env -> CStat -> M ()
evalStat env a = do
stage <- getStage
when (stage == Done) $ unexpected a "statements after infinite loop"
case a of
CFor (Left Nothing) Nothing Nothing a _ | stage == Init -> do setStage Loop >> evalStat env a >> setStage Done
CWhile (CConst (CIntConst (CInteger 1 _ _) _)) a _ _ | stage == Init -> do setStage Loop >> evalStat env a >> setStage Done
CLabel i a [] _ -> callStack i $ evalStat env a
CCompound ids items _ -> f ids
where
f :: [Ident] -> M ()
f [] = foldM evalBlockItem env items >> return ()
f (a:b) = callStack a $ f b
CIf a b Nothing n -> evalStat env $ CIf a b (Just $ CCompound [] [] n) n
CIf a b (Just c) n -> do
a <- latchBool (posOf n) $ evalExpr env a
branch (posOf n) a (evalStat env b) (evalStat env c)
return ()
CExpr Nothing _ -> return ()
CExpr (Just (CAssign op a b n)) _ -> case op of
CAssignOp -> case evalExpr env a of
Var v -> assign (posOf n) v $ evalExpr env b
_ -> unexpected a "non variable in left hand of assignment"
CMulAssOp -> f CMulOp
CDivAssOp -> f CDivOp
CRmdAssOp -> f CRmdOp
CAddAssOp -> f CAddOp
CSubAssOp -> f CSubOp
CShlAssOp -> f CShlOp
CShrAssOp -> f CShrOp
CAndAssOp -> f CAndOp
CXorAssOp -> f CXorOp
COrAssOp -> f COrOp
where
f :: CBinaryOp -> M ()
f op = evalStat env (CExpr (Just (CAssign CAssignOp a (CBinary op a b n) n)) n)
CExpr (Just (CCall (CVar f _) args _)) _ -> apply env' func' (map (evalExpr env) args) >> return ()
where
(env', func') = case env f of
Function env func -> (env, func)
_ -> unexpected' f "environment returned something other than a function"
CExpr (Just (CCall _ _ _)) _ -> notSupported a "non named function references"
CExpr (Just (CUnary op a n1)) n2 | elem op [CPreIncOp, CPostIncOp] -> evalStat env (CExpr (Just (CAssign CAddAssOp a one n1)) n2)
| elem op [CPreDecOp, CPostDecOp] -> evalStat env (CExpr (Just (CAssign CSubAssOp a one n1)) n2)
where
one = CConst $ CIntConst (cInteger 1) n1
_ -> notSupported a "statement"
apply :: Env -> CFunDef -> [E] -> M (Maybe E)
apply env func args = return Nothing --XXX
where
--callStack ...
evalBlockItem :: Env -> CBlockItem -> M Env
evalBlockItem env a = case a of
CBlockStmt a -> evalStat env a >> return env
CBlockDecl a -> evalDecl env a
CNestedFunDef a -> return $ addThing name (Function env a) env
where
(_, (Ident name _ _), _, _) = functionInfo a
-- No stateful operations for expressions.
evalExpr :: Env -> CExpr -> E
evalExpr env a = case a of
CCond a (Just b) c n -> Mux (evalExpr env a) (evalExpr env b) (evalExpr env c) $ posOf n
CCond a Nothing b n -> Or (evalExpr env a) (evalExpr env b) $ posOf n
CBinary op a' b' n -> case op of
CMulOp -> Mul a b p
CDivOp -> Div a b p
CRmdOp -> Mod a b p
CAddOp -> Add a b p
CSubOp -> Sub a b p
CShlOp -> notSupported' a "(<<)"
CShrOp -> notSupported' a "(>>)"
CLeOp -> Lt a b p
CGrOp -> Lt b a p
CLeqOp -> Not (Lt b a p) p
CGeqOp -> Not (Lt a b p) p
CEqOp -> Eq a b p
CNeqOp -> Not (Eq a b p) p
CAndOp -> notSupported' a "(&)"
CXorOp -> notSupported' a "(^)"
COrOp -> notSupported' a "(|)"
CLndOp -> And a b p
CLorOp -> Or a b p
where
a = evalExpr env a'
b = evalExpr env b'
p = posOf n
CUnary op a' n -> case op of
CPlusOp -> a
CMinOp -> Sub zero a p
CNegOp -> Not a p
--(CAdrOp, a) -> return $ Ref a p
--(CIndOp, a) -> return $ Deref a p
_ -> notSupported' n "unary operator"
where
a = evalExpr env a'
p = posOf n
zero = Const $ M.CInteger 0 p
CVar i _ -> case env i of
Variable v -> Var v
_ -> unexpected' i "environment returned non variable"
CConst a -> case a of
CIntConst (CInteger a _ _) n -> Const $ M.CInteger a $ posOf n
CFloatConst (CFloat a) n -> Const $ CRational (toRational (read a :: Double)) $ posOf n
_ -> notSupported a "char or string constant"
_ -> notSupported a "expression"
evalDecl :: Env -> CDecl -> M Env
evalDecl env d@(CDecl specs decls _) = if isExtern typInfo then return env else foldM evalDecl' env decls
where
(typInfo, typ) = typeInfo specs
evalDecl' :: Env -> (Maybe CDeclr, Maybe CInit, Maybe CExpr) -> M Env
evalDecl' env (a, b, c) = case a of
Just (CDeclr (Just i@(Ident name _ n)) [] Nothing [] _) -> case (b, c) of
(Nothing, Nothing) -> evalDecl' env (a, Just $ CInitExpr (CConst (CIntConst (cInteger 0) n)) n, Nothing)
(Just (CInitExpr (CConst const) n'), Nothing) | isStatic typInfo && not (isVolatile typInfo) -> return $ addVar v env
where
v = State $ VS name typ init $ posOf n
init = case typ of
Void -> unexpected d "void type for variable declaration"
Ptr _ -> notSupported d "pointer types"
Bool -> CBool (cInt /= 0) $ posOf n'
Integer _ -> M.CInteger cInt $ posOf n'
Rational _ -> CRational cRat $ posOf n'
cInt :: Integer
cInt = case const of
CIntConst (CInteger a _ _) _ -> a
_ -> unexpected const "non integer initialization"
cRat :: Rational
cRat = case const of
CIntConst (CInteger a _ _) _ -> fromIntegral a
CFloatConst (CFloat a) _ -> fromIntegral (read a :: Integer)
_ -> unexpected const "non numeric initialization"
(Just (CInitExpr c _), Nothing) -> evalDecl' env (a, Nothing, Just c)
(Nothing, Just e) -> do
v <- if isVolatile typInfo
then return $ Volatile name typ $ posOf n
else do
i <- nextId
return $ Local name typ i p
declare p v $ evalExpr env e
return $ addVar v env
where
p = posOf e
_ -> notSupported' i "variable declaration"
-- Arrays.
Just (CDeclr (Just ident) (CArrDeclr _ (CArrSize _ (CConst (CIntConst size _))) _ : _) _ _ _) -> return env --XXX
-- Ignore function prototypes.
Just (CDeclr _ (CFunDeclr _ _ _ :_) _ _ _) -> return env
_ -> notSupported' d "arrays, pointers, or functional pointers (So what good is this tool anyway?)"
{-
bindArg :: Env -> ((Ident, (TypeInfo, Type)), E) -> M Env
bindArg env ((Ident name _ n, (typInfo, typ)), a) = if isVolatile typInfo
then return $ addVar (Volatile name typ (posOf n)) env
else do
i <- nextId
let v = Local name typ i (posOf n)
declare (posOf n) v a
return $ addVar v env
funcArgs :: CDecl -> [(Ident, (TypeInfo, Type))]
funcArgs (CDecl specs decls n) = map f decls
where
t = typeInfo specs
f (Just (CDeclr (Just i) [] Nothing [] _), Nothing, Nothing) = (i, t)
f _ = notSupported' n "function argument"
-}
-- | Declares a new variable with a value.
declare :: Position -> V -> E -> M ()
declare n v a = case v of
Volatile _ _ _ -> return ()
_ -> newAction $ Declare v a n
-- | Assigns a value to an existing variable.
assign :: Position -> V -> E -> M ()
assign n v a = case v of
Volatile _ _ _ -> return ()
_ -> newAction $ Assign v a n
-- | Latch a value at a point in time.
latchBool :: Position -> E -> M V
latchBool n a = do
i <- nextId
let v = Tmp Bool i n
declare n v a
return v
-- | Extract relavent info from a function declaration.
functionInfo :: CFunDef -> ([CDeclSpec], Ident, [CDecl], CStat)
functionInfo (CFunDef specs (CDeclr (Just ident) [(CFunDeclr (Right (args, False)) _ _)] Nothing [] _) [] stmt _) = (specs, ident, args, stmt) --XXX What is the False?
functionInfo f = notSupported f "function"
| tomahawkins/afv | src/Compile.hs | bsd-3-clause | 12,002 | 0 | 22 | 3,371 | 4,711 | 2,309 | 2,402 | 264 | 30 |
module Web.EmbedThis.ServiceConfig where
| rtrvrtg/embed-this | src/Web/EmbedThis/ServiceConfig.hs | bsd-3-clause | 42 | 0 | 3 | 4 | 7 | 5 | 2 | 1 | 0 |
module Text.EBNF (syntax) where
import Text.EBNF.Informal (syntax)
main :: IO()
main = putStrLn "this library is queer"
| Lokidottir/ebnf-bff | src/Text/EBNF.hs | mit | 122 | 0 | 6 | 19 | 40 | 23 | 17 | 4 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module Qi.Test.Ipret where
import Control.Lens
import Control.Monad.Freer
import Control.Monad.Freer.Reader
import Control.Monad.Freer.State
import Control.Monad.Freer.Writer
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Default (Default, def)
import qualified Data.HashMap.Strict as SHM
import Network.AWS.Types
import Protolude hiding (Reader, State, asks, get,
log, put, runReader, runState)
import Qi.CLI.Dispatcher
import Qi.Config.AWS
import Qi.Config.AWS.S3
import Qi.Config.Identifier
import qualified Qi.Program.CF.Ipret.Gen as CF
import Qi.Program.CF.Lang
import qualified Qi.Program.Config.Ipret.State as Config
import Qi.Program.Config.Lang
import Qi.Program.Gen.Lang
import qualified Qi.Program.Lambda.Ipret.Gen as Lbd
import qualified Qi.Program.S3.Ipret.Gen as S3
import Qi.Program.S3.Lang
import qualified Qi.Program.Wiring.IO as IO
import Test.Tasty.Hspec
data Journal = Journal {
cfActions :: [ CfAction ]
, s3Actions :: [ S3Action ]
, genActions :: [ GenAction ]
, logs :: [ Text ]
}
deriving (Eq, Show)
instance Semigroup Journal where
Journal cf1 s31 gen1 ls1 <> Journal cf2 s32 gen2 ls2 =
Journal (cf1 <> cf2) (s31 <> s32) (gen1 <> gen2) (ls1 <> ls2)
instance Monoid Journal where
mempty = Journal mempty mempty mempty mempty
instance Default Journal where
def = mempty
data GenAction =
AmazonkaAction
deriving (Eq, Show)
data CfAction =
CreateStackAction StackName LBS.ByteString
| UpdateStackAction StackName LBS.ByteString
| DeleteStackAction StackName
| DescribeStacksAction
deriving (Eq, Show)
data S3Action =
PutContentAction S3Object LBS.ByteString
| DeleteBucketAction Text
deriving (Eq, Show)
data Params = Params {
config :: Config
, stacks :: StackDescriptionDict
}
instance Default Params where
def = Params {
config = def
, stacks = mempty
}
testGenRun
:: forall effs a
. (Members '[ ConfigEff, Writer Journal ] effs)
=> (Eff (GenEff ': effs) a -> Eff effs a)
testGenRun = interpret (\case
GetAppName ->
(^. namePrefix) <$> getConfig
Http mgrSettings _req ->
panic "Http"
RunServant _mgrSettings _baseUrl _req ->
panic "RunServant"
Amazonka _svc (_req :: b) -> do
genAction AmazonkaAction
pure (panic "response cannot be evaluated" :: (Rs b))
AmazonkaPostBodyExtract _svc _req _post ->
panic "AmazonkaPostBodyExtract"
Say msg ->
log msg
-- panic "Say"
GetCurrentTime ->
panic "GetCurrentTime"
Sleep _us ->
panic "Sleep"
Build ->
panic "Build"
ReadFileLazy _path ->
panic "ReadFileLazy"
{- GetLine -> -}
{- panic "GetLine" -}
PutStr _content ->
panic "PutStr"
)
testCfRun
:: forall effs a
. (Members '[ ConfigEff, Reader Params, Writer Journal ] effs)
=> (Eff (CfEff ': effs) a -> Eff effs a)
testCfRun = interpret (\case
CreateStack name template -> do
cfAction $ CreateStackAction name template
UpdateStack name template -> do
cfAction $ UpdateStackAction name template
DeleteStack name ->
panic "DeleteStack"
DescribeStacks -> do
cfAction $ DescribeStacksAction
asks stacks
WaitOnStackStatus name status isAbsentOk -> do
-- asks stackdescs
pure ()
)
testS3Run
:: forall effs a
. (Members '[ ConfigEff, Reader Params, Writer Journal ] effs)
=> (Eff (S3Eff ': effs) a -> Eff effs a)
testS3Run = interpret (\case
GetContent S3Object{ _s3oBucketId, _s3oKey } ->
panic "GetContent"
PutContent s3Obj@S3Object{_s3oBucketId, _s3oKey } payload -> do
s3Action $ PutContentAction s3Obj payload
ListObjects id maybeToken -> do
panic "ListObjects"
DeleteObject s3Obj@S3Object{_s3oBucketId, _s3oKey } -> do
{- config <- getConfig -}
{- getName _s3oBucketId -}
{- tell $ s3Action $ DeleteBucketAction name -}
panic "DeleteObject"
DeleteObjects s3objs -> do
panic "DeleteObjects"
)
testRun
:: Params
-> Eff '[ CfEff, S3Eff, GenEff, ConfigEff, State Config, Reader Params, Writer Journal ] ()
-> (Config, Journal)
testRun params@Params{ config } = run
. runWriter
. runReader params
. map snd . runState config
. Config.run
. testGenRun
{- . Lbd.run -}
. testS3Run
. testCfRun
genAction
:: Member (Writer Journal) effs
=> GenAction
-> Eff effs ()
genAction action = tell $ def{ genActions = [action]}
cfAction
:: Member (Writer Journal) effs
=> CfAction
-> Eff effs ()
cfAction action = tell $ def{ cfActions = [action]}
s3Action
:: Member (Writer Journal) effs
=> S3Action
-> Eff effs ()
s3Action action = tell $ def{ s3Actions = [action]}
log
:: Member (Writer Journal) effs
=> Text
-> Eff effs ()
log msg = tell $ def{ logs = [msg]}
| ababkin/qmuli | tests/Qi/Test/Ipret.hs | mit | 5,537 | 0 | 15 | 1,493 | 1,486 | 814 | 672 | 161 | 11 |
module Main (main) where
-- | Run quickly with `make test`
-- Framework
import Test.Framework (Test, defaultMain)
import Test.Framework.Providers.QuickCheck2 (testProperty)
-- Module Tests
import qualified Codec.Encryption.Historical.Caesar.Test as Caesar
import qualified Codec.Encryption.Historical.XOR.Test as XOR
import qualified Codec.Encryption.Historical.Utilities.Test as Utilities
-- Main
main :: IO ()
main = defaultMain tests
-- Documentation and Sanity Checking
sanityCheck :: Test
sanityCheck = testProperty "Sanity Check" (\ a -> (*2) a == ((*2) a :: Int))
-- Run All Module Tests
tests :: [Test]
tests = [ sanityCheck
, Utilities.test
, Caesar.test
, XOR.test
]
| beni55/Historical-Cryptography | TestSuite.hs | mit | 746 | 0 | 11 | 159 | 169 | 110 | 59 | 15 | 1 |
--
--
--
-----------------
-- Exercise 5.15.
-----------------
--
--
--
module E'5'15 where
-- GHCi> [0, 0.1 .. 1]
-- [0.0, 0.1, 0.2, 0.30000000000000004, 0.4000000000000001, 0.5000000000000001, 0.6000000000000001, 0.7000000000000001, 0.8, 0.9, 1.0]
-- Floating point issues (see IEEE 754 documentation, Haskell documentation, etc.).
{- GHCi>
1 - 0.9
-}
-- 9.999999999999998e-2
| pascal-knodel/haskell-craft | _/links/E'5'15.hs | mit | 390 | 0 | 2 | 63 | 18 | 17 | 1 | 1 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CloudFront.CreateStreamingDistribution
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Create a new streaming distribution.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonCloudFront/latest/APIReference/CreateStreamingDistribution.html AWS API Reference> for CreateStreamingDistribution.
module Network.AWS.CloudFront.CreateStreamingDistribution
(
-- * Creating a Request
createStreamingDistribution
, CreateStreamingDistribution
-- * Request Lenses
, csdStreamingDistributionConfig
-- * Destructuring the Response
, createStreamingDistributionResponse
, CreateStreamingDistributionResponse
-- * Response Lenses
, csdrsETag
, csdrsLocation
, csdrsStreamingDistribution
, csdrsResponseStatus
) where
import Network.AWS.CloudFront.Types
import Network.AWS.CloudFront.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | The request to create a new streaming distribution.
--
-- /See:/ 'createStreamingDistribution' smart constructor.
newtype CreateStreamingDistribution = CreateStreamingDistribution'
{ _csdStreamingDistributionConfig :: StreamingDistributionConfig
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateStreamingDistribution' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'csdStreamingDistributionConfig'
createStreamingDistribution
:: StreamingDistributionConfig -- ^ 'csdStreamingDistributionConfig'
-> CreateStreamingDistribution
createStreamingDistribution pStreamingDistributionConfig_ =
CreateStreamingDistribution'
{ _csdStreamingDistributionConfig = pStreamingDistributionConfig_
}
-- | The streaming distribution\'s configuration information.
csdStreamingDistributionConfig :: Lens' CreateStreamingDistribution StreamingDistributionConfig
csdStreamingDistributionConfig = lens _csdStreamingDistributionConfig (\ s a -> s{_csdStreamingDistributionConfig = a});
instance AWSRequest CreateStreamingDistribution where
type Rs CreateStreamingDistribution =
CreateStreamingDistributionResponse
request = postXML cloudFront
response
= receiveXML
(\ s h x ->
CreateStreamingDistributionResponse' <$>
(h .#? "ETag") <*> (h .#? "Location") <*>
(parseXML x)
<*> (pure (fromEnum s)))
instance ToElement CreateStreamingDistribution where
toElement
= mkElement
"{http://cloudfront.amazonaws.com/doc/2015-04-17/}StreamingDistributionConfig"
.
_csdStreamingDistributionConfig
instance ToHeaders CreateStreamingDistribution where
toHeaders = const mempty
instance ToPath CreateStreamingDistribution where
toPath = const "/2015-04-17/streaming-distribution"
instance ToQuery CreateStreamingDistribution where
toQuery = const mempty
-- | The returned result of the corresponding request.
--
-- /See:/ 'createStreamingDistributionResponse' smart constructor.
data CreateStreamingDistributionResponse = CreateStreamingDistributionResponse'
{ _csdrsETag :: !(Maybe Text)
, _csdrsLocation :: !(Maybe Text)
, _csdrsStreamingDistribution :: !(Maybe StreamingDistribution)
, _csdrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'CreateStreamingDistributionResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'csdrsETag'
--
-- * 'csdrsLocation'
--
-- * 'csdrsStreamingDistribution'
--
-- * 'csdrsResponseStatus'
createStreamingDistributionResponse
:: Int -- ^ 'csdrsResponseStatus'
-> CreateStreamingDistributionResponse
createStreamingDistributionResponse pResponseStatus_ =
CreateStreamingDistributionResponse'
{ _csdrsETag = Nothing
, _csdrsLocation = Nothing
, _csdrsStreamingDistribution = Nothing
, _csdrsResponseStatus = pResponseStatus_
}
-- | The current version of the streaming distribution created.
csdrsETag :: Lens' CreateStreamingDistributionResponse (Maybe Text)
csdrsETag = lens _csdrsETag (\ s a -> s{_csdrsETag = a});
-- | The fully qualified URI of the new streaming distribution resource just
-- created. For example:
-- https:\/\/cloudfront.amazonaws.com\/2010-11-01\/streaming-distribution\/EGTXBD79H29TRA8.
csdrsLocation :: Lens' CreateStreamingDistributionResponse (Maybe Text)
csdrsLocation = lens _csdrsLocation (\ s a -> s{_csdrsLocation = a});
-- | The streaming distribution\'s information.
csdrsStreamingDistribution :: Lens' CreateStreamingDistributionResponse (Maybe StreamingDistribution)
csdrsStreamingDistribution = lens _csdrsStreamingDistribution (\ s a -> s{_csdrsStreamingDistribution = a});
-- | The response status code.
csdrsResponseStatus :: Lens' CreateStreamingDistributionResponse Int
csdrsResponseStatus = lens _csdrsResponseStatus (\ s a -> s{_csdrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-cloudfront/gen/Network/AWS/CloudFront/CreateStreamingDistribution.hs | mpl-2.0 | 5,788 | 0 | 14 | 1,043 | 689 | 412 | 277 | 89 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sr-SP">
<title>SOAP Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/soap/src/main/javahelp/org/zaproxy/zap/extension/soap/resources/help_sr_SP/helpset_sr_SP.hs | apache-2.0 | 974 | 80 | 66 | 160 | 415 | 210 | 205 | -1 | -1 |
{-# LANGUAGE TemplateHaskell, BangPatterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Query.Query (testQuery_Query) where
import Test.HUnit (Assertion, assertEqual)
import Test.QuickCheck hiding (Result)
import Test.QuickCheck.Monadic
import Data.Function (on)
import Data.List
import qualified Data.Map as Map
import Data.Maybe
import qualified Data.Set as Set
import Text.JSON (JSValue(..), showJSON)
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Objects (genEmptyCluster)
import Ganeti.BasicTypes
import Ganeti.Errors
import Ganeti.JSON
import Ganeti.Objects
import Ganeti.Query.Filter
import qualified Ganeti.Query.Group as Group
import Ganeti.Query.Language
import qualified Ganeti.Query.Node as Node
import Ganeti.Query.Query
import qualified Ganeti.Query.Job as Job
import Ganeti.Utils (sepSplit)
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Helpers
-- | Checks if a list of field definitions contains unknown fields.
hasUnknownFields :: [FieldDefinition] -> Bool
hasUnknownFields = (QFTUnknown `notElem`) . map fdefKind
-- * Test cases
-- ** Node queries
-- | Tests that querying any existing fields, via either query or
-- queryFields, will not return unknown fields.
prop_queryNode_noUnknown :: Property
prop_queryNode_noUnknown =
forAll (choose (0, maxNodes) >>= genEmptyCluster) $ \cluster ->
forAll (elements (Map.keys Node.fieldsMap)) $ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cluster False (Query (ItemTypeOpCode QRNode)
[field] EmptyFilter)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields (ItemTypeOpCode QRNode) [field])
_ <- stop $ conjoin
[ counterexample ("Got unknown fields via query (" ++
show fdefs ++ ")") (hasUnknownFields fdefs)
, counterexample ("Got unknown result status via query (" ++
show fdata ++ ")")
(all (all ((/= RSUnknown) . rentryStatus)) fdata)
, counterexample ("Got unknown fields via query fields (" ++
show fdefs'++ ")") (hasUnknownFields fdefs')
]
return ()
-- | Tests that an unknown field is returned as such.
prop_queryNode_Unknown :: Property
prop_queryNode_Unknown =
forAll (choose (0, maxNodes) >>= genEmptyCluster) $ \cluster ->
forAll (arbitrary `suchThat` (`notElem` Map.keys Node.fieldsMap))
$ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cluster False (Query (ItemTypeOpCode QRNode)
[field] EmptyFilter)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields (ItemTypeOpCode QRNode) [field])
_ <- stop $ conjoin
[ counterexample ("Got known fields via query (" ++ show fdefs ++ ")")
(not $ hasUnknownFields fdefs)
, counterexample ("Got /= ResultUnknown result status via query (" ++
show fdata ++ ")")
(all (all ((== RSUnknown) . rentryStatus)) fdata)
, counterexample ("Got a Just in a result value (" ++
show fdata ++ ")")
(all (all (isNothing . rentryValue)) fdata)
, counterexample ("Got known fields via query fields (" ++ show fdefs'
++ ")") (not $ hasUnknownFields fdefs')
]
return ()
-- | Checks that a result type is conforming to a field definition.
checkResultType :: FieldDefinition -> ResultEntry -> Property
checkResultType _ (ResultEntry RSNormal Nothing) =
failTest "Nothing result in RSNormal field"
checkResultType _ (ResultEntry _ Nothing) = passTest
checkResultType fdef (ResultEntry RSNormal (Just v)) =
case (fdefKind fdef, v) of
(QFTText , JSString {}) -> passTest
(QFTBool , JSBool {}) -> passTest
(QFTNumber , JSRational {}) -> passTest
(QFTNumberFloat , JSRational {}) -> passTest
(QFTTimestamp , JSRational {}) -> passTest
(QFTUnit , JSRational {}) -> passTest
(QFTOther , _) -> passTest -- meh, QFT not precise...
(kind, _) -> failTest $ "Type mismatch, field definition says " ++
show kind ++ " but returned value is " ++ show v ++
" for field '" ++ fdefName fdef ++ "'"
checkResultType _ (ResultEntry r (Just _)) =
failTest $ "Just result in " ++ show r ++ " field"
-- | Tests that querying any existing fields, the following three
-- properties hold: RSNormal corresponds to a Just value, any other
-- value corresponds to Nothing, and for a RSNormal and value field,
-- the type of the value corresponds to the type of the field as
-- declared in the FieldDefinition.
prop_queryNode_types :: Property
prop_queryNode_types =
forAll (choose (0, maxNodes)) $ \numnodes ->
forAll (genEmptyCluster numnodes) $ \cfg ->
forAll (elements (Map.keys Node.fieldsMap)) $ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cfg False (Query (ItemTypeOpCode QRNode)
[field] EmptyFilter)) >>= resultProp
_ <- stop $ conjoin
[ counterexample ("Inconsistent result entries (" ++ show fdata ++ ")")
(conjoin $ map (conjoin . zipWith checkResultType fdefs) fdata)
, counterexample "Wrong field definitions length"
(length fdefs ==? 1)
, counterexample "Wrong field result rows length"
(all ((== 1) . length) fdata)
, counterexample "Wrong number of result rows"
(length fdata ==? numnodes)
]
return ()
-- | Test that queryFields with empty fields list returns all node fields.
case_queryNode_allfields :: Assertion
case_queryNode_allfields = do
fdefs <- case queryFields (QueryFields (ItemTypeOpCode QRNode) []) of
Bad msg -> fail $ "Error in query all fields: " ++
formatError msg
Ok (QueryFieldsResult v) -> return v
let field_sort = compare `on` fdefName
assertEqual "Mismatch in all fields list"
(sortBy field_sort . map (\(f, _, _) -> f) $ Map.elems Node.fieldsMap)
(sortBy field_sort fdefs)
-- | Check if cluster node names are unique (first elems).
areNodeNamesSane :: ConfigData -> Bool
areNodeNamesSane cfg =
let fqdns = map nodeName . Map.elems . fromContainer $ configNodes cfg
names = map (head . sepSplit '.') fqdns
in length names == length (nub names)
-- | Check that the nodes reported by a name filter are sane.
prop_queryNode_filter :: Property
prop_queryNode_filter =
forAll (choose (1, maxNodes)) $ \nodes ->
forAll (genEmptyCluster nodes `suchThat`
areNodeNamesSane) $ \cluster -> monadicIO $ do
let node_list = map nodeName . Map.elems . fromContainer $
configNodes cluster
count <- pick $ choose (1, nodes)
fqdn_set <- pick . genSetHelper node_list $ Just count
let fqdns = Set.elems fqdn_set
names = map (head . sepSplit '.') fqdns
flt = makeSimpleFilter "name" $ map Left names
QueryResult _ fdata <-
run (query cluster False (Query (ItemTypeOpCode QRNode)
["name"] flt)) >>= resultProp
_ <- stop $ conjoin
[ counterexample "Invalid node names" $
map (map rentryValue) fdata ==? map (\f -> [Just (showJSON f)]) fqdns
]
return ()
-- ** Group queries
prop_queryGroup_noUnknown :: Property
prop_queryGroup_noUnknown =
forAll (choose (0, maxNodes) >>= genEmptyCluster) $ \cluster ->
forAll (elements (Map.keys Group.fieldsMap)) $ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cluster False (Query (ItemTypeOpCode QRGroup)
[field] EmptyFilter)) >>=
resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields (ItemTypeOpCode QRGroup) [field])
_ <- stop $ conjoin
[ counterexample ("Got unknown fields via query (" ++ show fdefs ++ ")")
(hasUnknownFields fdefs)
, counterexample ("Got unknown result status via query (" ++
show fdata ++ ")")
(all (all ((/= RSUnknown) . rentryStatus)) fdata)
, counterexample ("Got unknown fields via query fields (" ++ show fdefs'
++ ")") (hasUnknownFields fdefs')
]
return ()
prop_queryGroup_Unknown :: Property
prop_queryGroup_Unknown =
forAll (choose (0, maxNodes) >>= genEmptyCluster) $ \cluster ->
forAll (arbitrary `suchThat` (`notElem` Map.keys Group.fieldsMap))
$ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cluster False (Query (ItemTypeOpCode QRGroup)
[field] EmptyFilter)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields (ItemTypeOpCode QRGroup) [field])
_ <- stop $ conjoin
[ counterexample ("Got known fields via query (" ++ show fdefs ++ ")")
(not $ hasUnknownFields fdefs)
, counterexample ("Got /= ResultUnknown result status via query (" ++
show fdata ++ ")")
(all (all ((== RSUnknown) . rentryStatus)) fdata)
, counterexample ("Got a Just in a result value (" ++
show fdata ++ ")")
(all (all (isNothing . rentryValue)) fdata)
, counterexample ("Got known fields via query fields (" ++ show fdefs'
++ ")") (not $ hasUnknownFields fdefs')
]
return ()
prop_queryGroup_types :: Property
prop_queryGroup_types =
forAll (choose (0, maxNodes)) $ \numnodes ->
forAll (genEmptyCluster numnodes) $ \cfg ->
forAll (elements (Map.keys Group.fieldsMap)) $ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cfg False (Query (ItemTypeOpCode QRGroup)
[field] EmptyFilter)) >>= resultProp
_ <- stop $ conjoin
[ counterexample ("Inconsistent result entries (" ++ show fdata ++ ")")
(conjoin $ map (conjoin . zipWith checkResultType fdefs) fdata)
, counterexample "Wrong field definitions length" (length fdefs ==? 1)
, counterexample "Wrong field result rows length"
(all ((== 1) . length) fdata)
]
return ()
case_queryGroup_allfields :: Assertion
case_queryGroup_allfields = do
fdefs <- case queryFields (QueryFields (ItemTypeOpCode QRGroup) []) of
Bad msg -> fail $ "Error in query all fields: " ++
formatError msg
Ok (QueryFieldsResult v) -> return v
let field_sort = compare `on` fdefName
assertEqual "Mismatch in all fields list"
(sortBy field_sort . map (\(f, _, _) -> f) $ Map.elems Group.fieldsMap)
(sortBy field_sort fdefs)
-- | Check that the node count reported by a group list is sane.
--
-- FIXME: also verify the node list, etc.
prop_queryGroup_nodeCount :: Property
prop_queryGroup_nodeCount =
forAll (choose (0, maxNodes)) $ \nodes ->
forAll (genEmptyCluster nodes) $ \cluster -> monadicIO $
do
QueryResult _ fdata <-
run (query cluster False (Query (ItemTypeOpCode QRGroup)
["node_cnt"] EmptyFilter)) >>= resultProp
_ <- stop $ conjoin
[ counterexample "Invalid node count" $
map (map rentryValue) fdata ==? [[Just (showJSON nodes)]]
]
return ()
-- ** Job queries
-- | Tests that querying any existing fields, via either query or
-- queryFields, will not return unknown fields. This uses 'undefined'
-- for config, as job queries shouldn't use the configuration, and an
-- explicit filter as otherwise non-live queries wouldn't return any
-- result rows.
prop_queryJob_noUnknown :: Property
prop_queryJob_noUnknown =
forAll (listOf (arbitrary::Gen (Positive Integer))) $ \ids ->
forAll (elements (Map.keys Job.fieldsMap)) $ \field -> monadicIO $ do
let qtype = ItemTypeLuxi QRJob
flt = makeSimpleFilter (nameField qtype) $
map (\(Positive i) -> Right i) ids
QueryResult fdefs fdata <-
run (query undefined False (Query qtype [field] flt)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields qtype [field])
_ <- stop $ conjoin
[ counterexample ("Got unknown fields via query (" ++
show fdefs ++ ")") (hasUnknownFields fdefs)
, counterexample ("Got unknown result status via query (" ++
show fdata ++ ")")
(all (all ((/= RSUnknown) . rentryStatus)) fdata)
, counterexample ("Got unknown fields via query fields (" ++
show fdefs'++ ")") (hasUnknownFields fdefs')
]
return ()
-- | Tests that an unknown field is returned as such.
prop_queryJob_Unknown :: Property
prop_queryJob_Unknown =
forAll (listOf (arbitrary::Gen (Positive Integer))) $ \ids ->
forAll (arbitrary `suchThat` (`notElem` Map.keys Job.fieldsMap))
$ \field -> monadicIO $ do
let qtype = ItemTypeLuxi QRJob
flt = makeSimpleFilter (nameField qtype) $
map (\(Positive i) -> Right i) ids
QueryResult fdefs fdata <-
run (query undefined False (Query qtype [field] flt)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields qtype [field])
_ <- stop $ conjoin
[ counterexample ("Got known fields via query (" ++ show fdefs ++ ")")
(not $ hasUnknownFields fdefs)
, counterexample ("Got /= ResultUnknown result status via query (" ++
show fdata ++ ")")
(all (all ((== RSUnknown) . rentryStatus)) fdata)
, counterexample ("Got a Just in a result value (" ++
show fdata ++ ")")
(all (all (isNothing . rentryValue)) fdata)
, counterexample ("Got known fields via query fields (" ++ show fdefs'
++ ")") (not $ hasUnknownFields fdefs')
]
return ()
-- ** Misc other tests
-- | Tests that requested names checking behaves as expected.
prop_getRequestedNames :: Property
prop_getRequestedNames =
forAll genName $ \node1 ->
let chk = getRequestedNames . Query (ItemTypeOpCode QRNode) []
q_node1 = QuotedString node1
eq_name = EQFilter "name"
eq_node1 = eq_name q_node1
in conjoin [ counterexample "empty filter" $ chk EmptyFilter ==? []
, counterexample "and filter" $ chk (AndFilter [eq_node1]) ==? []
, counterexample "simple equality" $ chk eq_node1 ==? [node1]
, counterexample "non-name field" $
chk (EQFilter "foo" q_node1) ==? []
, counterexample "non-simple filter" $
chk (OrFilter [ eq_node1 , LTFilter "foo" q_node1]) ==? []
]
testSuite "Query/Query"
[ 'prop_queryNode_noUnknown
, 'prop_queryNode_Unknown
, 'prop_queryNode_types
, 'prop_queryNode_filter
, 'case_queryNode_allfields
, 'prop_queryGroup_noUnknown
, 'prop_queryGroup_Unknown
, 'prop_queryGroup_types
, 'case_queryGroup_allfields
, 'prop_queryGroup_nodeCount
, 'prop_queryJob_noUnknown
, 'prop_queryJob_Unknown
, 'prop_getRequestedNames
]
| mbakke/ganeti | test/hs/Test/Ganeti/Query/Query.hs | bsd-2-clause | 16,613 | 0 | 24 | 4,252 | 4,099 | 2,115 | 1,984 | 289 | 8 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Config
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Utilities for handling saved state such as known packages, known servers and
-- downloaded packages.
-----------------------------------------------------------------------------
module Distribution.Client.Config (
SavedConfig(..),
loadConfig,
showConfig,
showConfigWithComments,
parseConfig,
defaultCabalDir,
defaultConfigFile,
defaultCacheDir,
defaultCompiler,
defaultLogsDir,
defaultUserInstall,
baseSavedConfig,
commentSavedConfig,
initialSavedConfig,
configFieldDescriptions,
haddockFlagsFields,
installDirsFields,
withProgramsFields,
withProgramOptionsFields,
userConfigDiff,
userConfigUpdate
) where
import Distribution.Client.Types
( RemoteRepo(..), Username(..), Password(..) )
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.Setup
( GlobalFlags(..), globalCommand, defaultGlobalFlags
, ConfigExFlags(..), configureExOptions, defaultConfigExFlags
, InstallFlags(..), installOptions, defaultInstallFlags
, UploadFlags(..), uploadCommand
, ReportFlags(..), reportCommand
, showRepo, parseRepo )
import Distribution.Utils.NubList
( NubList, fromNubList, toNubList)
import Distribution.Simple.Compiler
( DebugInfoLevel(..), OptimisationLevel(..) )
import Distribution.Simple.Setup
( ConfigFlags(..), configureOptions, defaultConfigFlags
, HaddockFlags(..), haddockOptions, defaultHaddockFlags
, installDirsOptions
, programConfigurationPaths', programConfigurationOptions
, Flag(..), toFlag, flagToMaybe, fromFlagOrDefault )
import Distribution.Simple.InstallDirs
( InstallDirs(..), defaultInstallDirs
, PathTemplate, toPathTemplate )
import Distribution.ParseUtils
( FieldDescr(..), liftField
, ParseResult(..), PError(..), PWarning(..)
, locatedErrorMsg, showPWarning
, readFields, warning, lineNo
, simpleField, listField, parseFilePathQ, parseTokenQ )
import Distribution.Client.ParseUtils
( parseFields, ppFields, ppSection )
import qualified Distribution.ParseUtils as ParseUtils
( Field(..) )
import qualified Distribution.Text as Text
( Text(..) )
import Distribution.Simple.Command
( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..)
, viewAsFieldDescr )
import Distribution.Simple.Program
( defaultProgramConfiguration )
import Distribution.Simple.Utils
( die, notice, warn, lowercase, cabalVersion )
import Distribution.Compiler
( CompilerFlavor(..), defaultCompilerFlavor )
import Distribution.Verbosity
( Verbosity, normal )
import Data.List
( partition, find, foldl' )
import Data.Maybe
( fromMaybe )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid
( Monoid(..) )
#endif
import Control.Monad
( unless, foldM, liftM, liftM2 )
import qualified Distribution.Compat.ReadP as Parse
( option )
import qualified Text.PrettyPrint as Disp
( render, text, empty )
import Text.PrettyPrint
( ($+$) )
import System.Directory
( createDirectoryIfMissing, getAppUserDataDirectory, renameFile )
import Network.URI
( URI(..), URIAuth(..) )
import System.FilePath
( (<.>), (</>), takeDirectory )
import System.IO.Error
( isDoesNotExistError )
import Distribution.Compat.Environment
( getEnvironment )
import Distribution.Compat.Exception
( catchIO )
import qualified Paths_cabal_install
( version )
import Data.Version
( showVersion )
import Data.Char
( isSpace )
import qualified Data.Map as M
--
-- * Configuration saved in the config file
--
data SavedConfig = SavedConfig {
savedGlobalFlags :: GlobalFlags,
savedInstallFlags :: InstallFlags,
savedConfigureFlags :: ConfigFlags,
savedConfigureExFlags :: ConfigExFlags,
savedUserInstallDirs :: InstallDirs (Flag PathTemplate),
savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate),
savedUploadFlags :: UploadFlags,
savedReportFlags :: ReportFlags,
savedHaddockFlags :: HaddockFlags
}
instance Monoid SavedConfig where
mempty = SavedConfig {
savedGlobalFlags = mempty,
savedInstallFlags = mempty,
savedConfigureFlags = mempty,
savedConfigureExFlags = mempty,
savedUserInstallDirs = mempty,
savedGlobalInstallDirs = mempty,
savedUploadFlags = mempty,
savedReportFlags = mempty,
savedHaddockFlags = mempty
}
mappend a b = SavedConfig {
savedGlobalFlags = combinedSavedGlobalFlags,
savedInstallFlags = combinedSavedInstallFlags,
savedConfigureFlags = combinedSavedConfigureFlags,
savedConfigureExFlags = combinedSavedConfigureExFlags,
savedUserInstallDirs = combinedSavedUserInstallDirs,
savedGlobalInstallDirs = combinedSavedGlobalInstallDirs,
savedUploadFlags = combinedSavedUploadFlags,
savedReportFlags = combinedSavedReportFlags,
savedHaddockFlags = combinedSavedHaddockFlags
}
where
-- This is ugly, but necessary. If we're mappending two config files, we
-- want the values of the *non-empty* list fields from the second one to
-- *override* the corresponding values from the first one. Default
-- behaviour (concatenation) is confusing and makes some use cases (see
-- #1884) impossible.
--
-- However, we also want to allow specifying multiple values for a list
-- field in a *single* config file. For example, we want the following to
-- continue to work:
--
-- remote-repo: hackage.haskell.org:http://hackage.haskell.org/
-- remote-repo: private-collection:http://hackage.local/
--
-- So we can't just wrap the list fields inside Flags; we have to do some
-- special-casing just for SavedConfig.
-- NB: the signature prevents us from using 'combine' on lists.
combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a
combine' field subfield =
(subfield . field $ a) `mappend` (subfield . field $ b)
lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a]
lastNonEmpty' field subfield =
let a' = subfield . field $ a
b' = subfield . field $ b
in case b' of [] -> a'
_ -> b'
lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a)
-> NubList a
lastNonEmptyNL' field subfield =
let a' = subfield . field $ a
b' = subfield . field $ b
in case fromNubList b' of [] -> a'
_ -> b'
combinedSavedGlobalFlags = GlobalFlags {
globalVersion = combine globalVersion,
globalNumericVersion = combine globalNumericVersion,
globalConfigFile = combine globalConfigFile,
globalSandboxConfigFile = combine globalSandboxConfigFile,
globalRemoteRepos = lastNonEmptyNL globalRemoteRepos,
globalCacheDir = combine globalCacheDir,
globalLocalRepos = lastNonEmptyNL globalLocalRepos,
globalLogsDir = combine globalLogsDir,
globalWorldFile = combine globalWorldFile,
globalRequireSandbox = combine globalRequireSandbox,
globalIgnoreSandbox = combine globalIgnoreSandbox
}
where
combine = combine' savedGlobalFlags
lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags
combinedSavedInstallFlags = InstallFlags {
installDocumentation = combine installDocumentation,
installHaddockIndex = combine installHaddockIndex,
installDryRun = combine installDryRun,
installMaxBackjumps = combine installMaxBackjumps,
installReorderGoals = combine installReorderGoals,
installIndependentGoals = combine installIndependentGoals,
installShadowPkgs = combine installShadowPkgs,
installStrongFlags = combine installStrongFlags,
installReinstall = combine installReinstall,
installAvoidReinstalls = combine installAvoidReinstalls,
installOverrideReinstall = combine installOverrideReinstall,
installUpgradeDeps = combine installUpgradeDeps,
installOnly = combine installOnly,
installOnlyDeps = combine installOnlyDeps,
installRootCmd = combine installRootCmd,
installSummaryFile = lastNonEmptyNL installSummaryFile,
installLogFile = combine installLogFile,
installBuildReports = combine installBuildReports,
installReportPlanningFailure = combine installReportPlanningFailure,
installSymlinkBinDir = combine installSymlinkBinDir,
installOneShot = combine installOneShot,
installNumJobs = combine installNumJobs,
installRunTests = combine installRunTests
}
where
combine = combine' savedInstallFlags
lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags
combinedSavedConfigureFlags = ConfigFlags {
configPrograms = configPrograms . savedConfigureFlags $ b,
-- TODO: NubListify
configProgramPaths = lastNonEmpty configProgramPaths,
-- TODO: NubListify
configProgramArgs = lastNonEmpty configProgramArgs,
configProgramPathExtra = lastNonEmptyNL configProgramPathExtra,
configHcFlavor = combine configHcFlavor,
configHcPath = combine configHcPath,
configHcPkg = combine configHcPkg,
configVanillaLib = combine configVanillaLib,
configProfLib = combine configProfLib,
configProf = combine configProf,
configSharedLib = combine configSharedLib,
configDynExe = combine configDynExe,
configProfExe = combine configProfExe,
-- TODO: NubListify
configConfigureArgs = lastNonEmpty configConfigureArgs,
configOptimization = combine configOptimization,
configDebugInfo = combine configDebugInfo,
configProgPrefix = combine configProgPrefix,
configProgSuffix = combine configProgSuffix,
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
configInstallDirs =
(configInstallDirs . savedConfigureFlags $ a)
`mappend` (configInstallDirs . savedConfigureFlags $ b),
configScratchDir = combine configScratchDir,
-- TODO: NubListify
configExtraLibDirs = lastNonEmpty configExtraLibDirs,
-- TODO: NubListify
configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs,
configDistPref = combine configDistPref,
configVerbosity = combine configVerbosity,
configUserInstall = combine configUserInstall,
-- TODO: NubListify
configPackageDBs = lastNonEmpty configPackageDBs,
configGHCiLib = combine configGHCiLib,
configSplitObjs = combine configSplitObjs,
configStripExes = combine configStripExes,
configStripLibs = combine configStripLibs,
-- TODO: NubListify
configConstraints = lastNonEmpty configConstraints,
-- TODO: NubListify
configDependencies = lastNonEmpty configDependencies,
configInstantiateWith = lastNonEmpty configInstantiateWith,
-- TODO: NubListify
configConfigurationsFlags = lastNonEmpty configConfigurationsFlags,
configTests = combine configTests,
configBenchmarks = combine configBenchmarks,
configCoverage = combine configCoverage,
configLibCoverage = combine configLibCoverage,
configExactConfiguration = combine configExactConfiguration,
configFlagError = combine configFlagError,
configRelocatable = combine configRelocatable
}
where
combine = combine' savedConfigureFlags
lastNonEmpty = lastNonEmpty' savedConfigureFlags
lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags
combinedSavedConfigureExFlags = ConfigExFlags {
configCabalVersion = combine configCabalVersion,
-- TODO: NubListify
configExConstraints = lastNonEmpty configExConstraints,
-- TODO: NubListify
configPreferences = lastNonEmpty configPreferences,
configSolver = combine configSolver,
configAllowNewer = combine configAllowNewer
}
where
combine = combine' savedConfigureExFlags
lastNonEmpty = lastNonEmpty' savedConfigureExFlags
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
combinedSavedUserInstallDirs = savedUserInstallDirs a
`mappend` savedUserInstallDirs b
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a
`mappend` savedGlobalInstallDirs b
combinedSavedUploadFlags = UploadFlags {
uploadCheck = combine uploadCheck,
uploadUsername = combine uploadUsername,
uploadPassword = combine uploadPassword,
uploadVerbosity = combine uploadVerbosity
}
where
combine = combine' savedUploadFlags
combinedSavedReportFlags = ReportFlags {
reportUsername = combine reportUsername,
reportPassword = combine reportPassword,
reportVerbosity = combine reportVerbosity
}
where
combine = combine' savedReportFlags
combinedSavedHaddockFlags = HaddockFlags {
-- TODO: NubListify
haddockProgramPaths = lastNonEmpty haddockProgramPaths,
-- TODO: NubListify
haddockProgramArgs = lastNonEmpty haddockProgramArgs,
haddockHoogle = combine haddockHoogle,
haddockHtml = combine haddockHtml,
haddockHtmlLocation = combine haddockHtmlLocation,
haddockExecutables = combine haddockExecutables,
haddockTestSuites = combine haddockTestSuites,
haddockBenchmarks = combine haddockBenchmarks,
haddockInternal = combine haddockInternal,
haddockCss = combine haddockCss,
haddockHscolour = combine haddockHscolour,
haddockHscolourCss = combine haddockHscolourCss,
haddockContents = combine haddockContents,
haddockDistPref = combine haddockDistPref,
haddockKeepTempFiles = combine haddockKeepTempFiles,
haddockVerbosity = combine haddockVerbosity
}
where
combine = combine' savedHaddockFlags
lastNonEmpty = lastNonEmpty' savedHaddockFlags
updateInstallDirs :: Flag Bool -> SavedConfig -> SavedConfig
updateInstallDirs userInstallFlag
savedConfig@SavedConfig {
savedConfigureFlags = configureFlags,
savedUserInstallDirs = userInstallDirs,
savedGlobalInstallDirs = globalInstallDirs
} =
savedConfig {
savedConfigureFlags = configureFlags {
configInstallDirs = installDirs
}
}
where
installDirs | userInstall = userInstallDirs
| otherwise = globalInstallDirs
userInstall = fromFlagOrDefault defaultUserInstall $
configUserInstall configureFlags `mappend` userInstallFlag
--
-- * Default config
--
-- | These are the absolute basic defaults. The fields that must be
-- initialised. When we load the config from the file we layer the loaded
-- values over these ones, so any missing fields in the file take their values
-- from here.
--
baseSavedConfig :: IO SavedConfig
baseSavedConfig = do
userPrefix <- defaultCabalDir
logsDir <- defaultLogsDir
worldFile <- defaultWorldFile
return mempty {
savedConfigureFlags = mempty {
configHcFlavor = toFlag defaultCompiler,
configUserInstall = toFlag defaultUserInstall,
configVerbosity = toFlag normal
},
savedUserInstallDirs = mempty {
prefix = toFlag (toPathTemplate userPrefix)
},
savedGlobalFlags = mempty {
globalLogsDir = toFlag logsDir,
globalWorldFile = toFlag worldFile
}
}
-- | This is the initial configuration that we write out to to the config file
-- if the file does not exist (or the config we use if the file cannot be read
-- for some other reason). When the config gets loaded it gets layered on top
-- of 'baseSavedConfig' so we do not need to include it into the initial
-- values we save into the config file.
--
initialSavedConfig :: IO SavedConfig
initialSavedConfig = do
cacheDir <- defaultCacheDir
logsDir <- defaultLogsDir
worldFile <- defaultWorldFile
extraPath <- defaultExtraPath
return mempty {
savedGlobalFlags = mempty {
globalCacheDir = toFlag cacheDir,
globalRemoteRepos = toNubList [defaultRemoteRepo],
globalWorldFile = toFlag worldFile
},
savedConfigureFlags = mempty {
configProgramPathExtra = toNubList extraPath
},
savedInstallFlags = mempty {
installSummaryFile = toNubList [toPathTemplate (logsDir </> "build.log")],
installBuildReports= toFlag AnonymousReports,
installNumJobs = toFlag Nothing
}
}
--TODO: misleading, there's no way to override this default
-- either make it possible or rename to simply getCabalDir.
defaultCabalDir :: IO FilePath
defaultCabalDir = getAppUserDataDirectory "cabal"
defaultConfigFile :: IO FilePath
defaultConfigFile = do
dir <- defaultCabalDir
return $ dir </> "config"
defaultCacheDir :: IO FilePath
defaultCacheDir = do
dir <- defaultCabalDir
return $ dir </> "packages"
defaultLogsDir :: IO FilePath
defaultLogsDir = do
dir <- defaultCabalDir
return $ dir </> "logs"
-- | Default position of the world file
defaultWorldFile :: IO FilePath
defaultWorldFile = do
dir <- defaultCabalDir
return $ dir </> "world"
defaultExtraPath :: IO [FilePath]
defaultExtraPath = do
dir <- defaultCabalDir
return [dir </> "bin"]
defaultCompiler :: CompilerFlavor
defaultCompiler = fromMaybe GHC defaultCompilerFlavor
defaultUserInstall :: Bool
defaultUserInstall = True
-- We do per-user installs by default on all platforms. We used to default to
-- global installs on Windows but that no longer works on Windows Vista or 7.
defaultRemoteRepo :: RemoteRepo
defaultRemoteRepo = RemoteRepo name uri
where
name = "hackage.haskell.org"
uri = URI "http:" (Just (URIAuth "" name "")) "/" "" ""
-- Note that lots of old ~/.cabal/config files will have the old url
-- http://hackage.haskell.org/packages/archive
-- but new config files can use the new url (without the /packages/archive)
-- and avoid having to do a http redirect
--
-- * Config file reading
--
loadConfig :: Verbosity -> Flag FilePath -> Flag Bool -> IO SavedConfig
loadConfig verbosity configFileFlag userInstallFlag = addBaseConf $ do
let sources = [
("commandline option", return . flagToMaybe $ configFileFlag),
("env var CABAL_CONFIG", lookup "CABAL_CONFIG" `liftM` getEnvironment),
("default config file", Just `liftM` defaultConfigFile) ]
getSource [] = error "no config file path candidate found."
getSource ((msg,action): xs) =
action >>= maybe (getSource xs) (return . (,) msg)
(source, configFile) <- getSource sources
minp <- readConfigFile mempty configFile
case minp of
Nothing -> do
notice verbosity $ "Config file path source is " ++ source ++ "."
notice verbosity $ "Config file " ++ configFile ++ " not found."
notice verbosity $ "Writing default configuration to " ++ configFile
commentConf <- commentSavedConfig
initialConf <- initialSavedConfig
writeConfigFile configFile commentConf initialConf
return initialConf
Just (ParseOk ws conf) -> do
unless (null ws) $ warn verbosity $
unlines (map (showPWarning configFile) ws)
return conf
Just (ParseFailed err) -> do
let (line, msg) = locatedErrorMsg err
die $
"Error parsing config file " ++ configFile
++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg
where
addBaseConf body = do
base <- baseSavedConfig
extra <- body
return (updateInstallDirs userInstallFlag (base `mappend` extra))
readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig))
readConfigFile initial file = handleNotExists $
fmap (Just . parseConfig initial) (readFile file)
where
handleNotExists action = catchIO action $ \ioe ->
if isDoesNotExistError ioe
then return Nothing
else ioError ioe
writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO ()
writeConfigFile file comments vals = do
let tmpFile = file <.> "tmp"
createDirectoryIfMissing True (takeDirectory file)
writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n"
renameFile tmpFile file
where
explanation = unlines
["-- This is the configuration file for the 'cabal' command line tool."
,""
,"-- The available configuration options are listed below."
,"-- Some of them have default values listed."
,""
,"-- Lines (like this one) beginning with '--' are comments."
,"-- Be careful with spaces and indentation because they are"
,"-- used to indicate layout for nested sections."
,""
,"-- Cabal library version: " ++ showVersion cabalVersion
,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version
,"",""
]
-- | These are the default values that get used in Cabal if a no value is
-- given. We use these here to include in comments when we write out the
-- initial config file so that the user can see what default value they are
-- overriding.
--
commentSavedConfig :: IO SavedConfig
commentSavedConfig = do
userInstallDirs <- defaultInstallDirs defaultCompiler True True
globalInstallDirs <- defaultInstallDirs defaultCompiler False True
return SavedConfig {
savedGlobalFlags = defaultGlobalFlags,
savedInstallFlags = defaultInstallFlags,
savedConfigureExFlags = defaultConfigExFlags,
savedConfigureFlags = (defaultConfigFlags defaultProgramConfiguration) {
configUserInstall = toFlag defaultUserInstall
},
savedUserInstallDirs = fmap toFlag userInstallDirs,
savedGlobalInstallDirs = fmap toFlag globalInstallDirs,
savedUploadFlags = commandDefaultFlags uploadCommand,
savedReportFlags = commandDefaultFlags reportCommand,
savedHaddockFlags = defaultHaddockFlags
}
-- | All config file fields.
--
configFieldDescriptions :: [FieldDescr SavedConfig]
configFieldDescriptions =
toSavedConfig liftGlobalFlag
(commandOptions (globalCommand []) ParseArgs)
["version", "numeric-version", "config-file", "sandbox-config-file"] []
++ toSavedConfig liftConfigFlag
(configureOptions ParseArgs)
(["builddir", "constraint", "dependency"]
++ map fieldName installDirsFields)
--FIXME: this is only here because viewAsFieldDescr gives us a parser
-- that only recognises 'ghc' etc, the case-sensitive flag names, not
-- what the normal case-insensitive parser gives us.
[simpleField "compiler"
(fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse)
configHcFlavor (\v flags -> flags { configHcFlavor = v })
-- TODO: The following is a temporary fix. The "optimization"
-- and "debug-info" fields are OptArg, and viewAsFieldDescr
-- fails on that. Instead of a hand-written hackaged parser
-- and printer, we should handle this case properly in the
-- library.
,liftField configOptimization (\v flags -> flags { configOptimization = v }) $
let name = "optimization" in
FieldDescr name
(\f -> case f of
Flag NoOptimisation -> Disp.text "False"
Flag NormalOptimisation -> Disp.text "True"
Flag MaximumOptimisation -> Disp.text "2"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoOptimisation)
| str == "True" -> ParseOk [] (Flag NormalOptimisation)
| str == "0" -> ParseOk [] (Flag NoOptimisation)
| str == "1" -> ParseOk [] (Flag NormalOptimisation)
| str == "2" -> ParseOk [] (Flag MaximumOptimisation)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.")
,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $
let name = "debug-info" in
FieldDescr name
(\f -> case f of
Flag NoDebugInfo -> Disp.text "False"
Flag MinimalDebugInfo -> Disp.text "1"
Flag NormalDebugInfo -> Disp.text "True"
Flag MaximalDebugInfo -> Disp.text "3"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoDebugInfo)
| str == "True" -> ParseOk [] (Flag NormalDebugInfo)
| str == "0" -> ParseOk [] (Flag NoDebugInfo)
| str == "1" -> ParseOk [] (Flag MinimalDebugInfo)
| str == "2" -> ParseOk [] (Flag NormalDebugInfo)
| str == "3" -> ParseOk [] (Flag MaximalDebugInfo)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.")
]
++ toSavedConfig liftConfigExFlag
(configureExOptions ParseArgs)
[] []
++ toSavedConfig liftInstallFlag
(installOptions ParseArgs)
["dry-run", "only", "only-dependencies", "dependencies-only"] []
++ toSavedConfig liftUploadFlag
(commandOptions uploadCommand ParseArgs)
["verbose", "check"] []
++ toSavedConfig liftReportFlag
(commandOptions reportCommand ParseArgs)
["verbose", "username", "password"] []
--FIXME: this is a hack, hiding the user name and password.
-- But otherwise it masks the upload ones. Either need to
-- share the options or make then distinct. In any case
-- they should probably be per-server.
where
toSavedConfig lift options exclusions replacements =
[ lift (fromMaybe field replacement)
| opt <- options
, let field = viewAsFieldDescr opt
name = fieldName field
replacement = find ((== name) . fieldName) replacements
, name `notElem` exclusions ]
optional = Parse.option mempty . fmap toFlag
-- TODO: next step, make the deprecated fields elicit a warning.
--
deprecatedFieldDescriptions :: [FieldDescr SavedConfig]
deprecatedFieldDescriptions =
[ liftGlobalFlag $
listField "repos"
(Disp.text . showRepo) parseRepo
(fromNubList . globalRemoteRepos)
(\rs cfg -> cfg { globalRemoteRepos = toNubList rs })
, liftGlobalFlag $
simpleField "cachedir"
(Disp.text . fromFlagOrDefault "") (optional parseFilePathQ)
globalCacheDir (\d cfg -> cfg { globalCacheDir = d })
, liftUploadFlag $
simpleField "hackage-username"
(Disp.text . fromFlagOrDefault "" . fmap unUsername)
(optional (fmap Username parseTokenQ))
uploadUsername (\d cfg -> cfg { uploadUsername = d })
, liftUploadFlag $
simpleField "hackage-password"
(Disp.text . fromFlagOrDefault "" . fmap unPassword)
(optional (fmap Password parseTokenQ))
uploadPassword (\d cfg -> cfg { uploadPassword = d })
]
++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields
++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields
where
optional = Parse.option mempty . fmap toFlag
modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a
modifyFieldName f d = d { fieldName = f (fieldName d) }
liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate))
-> FieldDescr SavedConfig
liftUserInstallDirs = liftField
savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags })
liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate))
-> FieldDescr SavedConfig
liftGlobalInstallDirs = liftField
savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags })
liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig
liftGlobalFlag = liftField
savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags })
liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig
liftConfigFlag = liftField
savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags })
liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig
liftConfigExFlag = liftField
savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags })
liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig
liftInstallFlag = liftField
savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags })
liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig
liftUploadFlag = liftField
savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags })
liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig
liftReportFlag = liftField
savedReportFlags (\flags conf -> conf { savedReportFlags = flags })
parseConfig :: SavedConfig -> String -> ParseResult SavedConfig
parseConfig initial = \str -> do
fields <- readFields str
let (knownSections, others) = partition isKnownSection fields
config <- parse others
let user0 = savedUserInstallDirs config
global0 = savedGlobalInstallDirs config
(haddockFlags, user, global, paths, args) <-
foldM parseSections
(savedHaddockFlags config, user0, global0, [], [])
knownSections
return config {
savedConfigureFlags = (savedConfigureFlags config) {
configProgramPaths = paths,
configProgramArgs = args
},
savedHaddockFlags = haddockFlags,
savedUserInstallDirs = user,
savedGlobalInstallDirs = global
}
where
isKnownSection (ParseUtils.Section _ "haddock" _ _) = True
isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True
isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True
isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True
isKnownSection _ = False
parse = parseFields (configFieldDescriptions
++ deprecatedFieldDescriptions) initial
parseSections accum@(h,u,g,p,a)
(ParseUtils.Section _ "haddock" name fs)
| name == "" = do h' <- parseFields haddockFlagsFields h fs
return (h', u, g, p, a)
| otherwise = do
warning "The 'haddock' section should be unnamed"
return accum
parseSections accum@(h,u,g,p,a)
(ParseUtils.Section _ "install-dirs" name fs)
| name' == "user" = do u' <- parseFields installDirsFields u fs
return (h, u', g, p, a)
| name' == "global" = do g' <- parseFields installDirsFields g fs
return (h, u, g', p, a)
| otherwise = do
warning "The 'install-paths' section should be for 'user' or 'global'"
return accum
where name' = lowercase name
parseSections accum@(h,u,g,p,a)
(ParseUtils.Section _ "program-locations" name fs)
| name == "" = do p' <- parseFields withProgramsFields p fs
return (h, u, g, p', a)
| otherwise = do
warning "The 'program-locations' section should be unnamed"
return accum
parseSections accum@(h, u, g, p, a)
(ParseUtils.Section _ "program-default-options" name fs)
| name == "" = do a' <- parseFields withProgramOptionsFields a fs
return (h, u, g, p, a')
| otherwise = do
warning "The 'program-default-options' section should be unnamed"
return accum
parseSections accum f = do
warning $ "Unrecognized stanza on line " ++ show (lineNo f)
return accum
showConfig :: SavedConfig -> String
showConfig = showConfigWithComments mempty
showConfigWithComments :: SavedConfig -> SavedConfig -> String
showConfigWithComments comment vals = Disp.render $
ppFields configFieldDescriptions mcomment vals
$+$ Disp.text ""
$+$ ppSection "haddock" "" haddockFlagsFields
(fmap savedHaddockFlags mcomment) (savedHaddockFlags vals)
$+$ Disp.text ""
$+$ installDirsSection "user" savedUserInstallDirs
$+$ Disp.text ""
$+$ installDirsSection "global" savedGlobalInstallDirs
$+$ Disp.text ""
$+$ configFlagsSection "program-locations" withProgramsFields
configProgramPaths
$+$ Disp.text ""
$+$ configFlagsSection "program-default-options" withProgramOptionsFields
configProgramArgs
where
mcomment = Just comment
installDirsSection name field =
ppSection "install-dirs" name installDirsFields
(fmap field mcomment) (field vals)
configFlagsSection name fields field =
ppSection name "" fields
(fmap (field . savedConfigureFlags) mcomment)
((field . savedConfigureFlags) vals)
-- | Fields for the 'install-dirs' sections.
installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))]
installDirsFields = map viewAsFieldDescr installDirsOptions
-- | Fields for the 'haddock' section.
haddockFlagsFields :: [FieldDescr HaddockFlags]
haddockFlagsFields = [ field
| opt <- haddockOptions ParseArgs
, let field = viewAsFieldDescr opt
name = fieldName field
, name `notElem` exclusions ]
where
exclusions = ["verbose", "builddir"]
-- | Fields for the 'program-locations' section.
withProgramsFields :: [FieldDescr [(String, FilePath)]]
withProgramsFields =
map viewAsFieldDescr $
programConfigurationPaths' (++ "-location") defaultProgramConfiguration
ParseArgs id (++)
-- | Fields for the 'program-default-options' section.
withProgramOptionsFields :: [FieldDescr [(String, [String])]]
withProgramOptionsFields =
map viewAsFieldDescr $
programConfigurationOptions defaultProgramConfiguration ParseArgs id (++)
-- | Get the differences (as a pseudo code diff) between the user's
-- '~/.cabal/config' and the one that cabal would generate if it didn't exist.
userConfigDiff :: GlobalFlags -> IO [String]
userConfigDiff globalFlags = do
userConfig <- loadConfig normal (globalConfigFile globalFlags) mempty
testConfig <- liftM2 mappend baseSavedConfig initialSavedConfig
return $ reverse . foldl' createDiff [] . M.toList
$ M.unionWith combine
(M.fromList . map justFst $ filterShow testConfig)
(M.fromList . map justSnd $ filterShow userConfig)
where
justFst (a, b) = (a, (Just b, Nothing))
justSnd (a, b) = (a, (Nothing, Just b))
combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b)
combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b)
combine x y = error $ "Can't happen : userConfigDiff " ++ show x ++ " " ++ show y
createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String]
createDiff acc (key, (Just a, Just b))
| a == b = acc
| otherwise = ("+ " ++ key ++ ": " ++ b) : ("- " ++ key ++ ": " ++ a) : acc
createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc
createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc
createDiff acc (_, (Nothing, Nothing)) = acc
filterShow :: SavedConfig -> [(String, String)]
filterShow cfg = map keyValueSplit
. filter (\s -> not (null s) && any (== ':') s)
. map nonComment
. lines
$ showConfig cfg
nonComment [] = []
nonComment ('-':'-':_) = []
nonComment (x:xs) = x : nonComment xs
topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace
keyValueSplit s =
let (left, right) = break (== ':') s
in (topAndTail left, topAndTail (drop 1 right))
-- | Update the user's ~/.cabal/config' keeping the user's customizations.
userConfigUpdate :: Verbosity -> GlobalFlags -> IO ()
userConfigUpdate verbosity globalFlags = do
userConfig <- loadConfig normal (globalConfigFile globalFlags) mempty
newConfig <- liftM2 mappend baseSavedConfig initialSavedConfig
commentConf <- commentSavedConfig
cabalFile <- defaultConfigFile
let backup = cabalFile ++ ".backup"
notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "."
renameFile cabalFile backup
notice verbosity $ "Writing merged config to " ++ cabalFile ++ "."
writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig)
| seereason/cabal | cabal-install/Distribution/Client/Config.hs | bsd-3-clause | 39,041 | 0 | 25 | 10,505 | 8,107 | 4,366 | 3,741 | 702 | 9 |
module Language.Mecha.Viewer
( viewer
) where
import Control.Monad
import Graphics.Rendering.OpenGL
import Graphics.UI.SDL hiding (init, Color)
import qualified Graphics.UI.SDL as SDL
import Language.Mecha.OpenGL
data State = State
{ leftButton
, middleButton
, rightButton :: Bool
, theta
, phi
, scale'
, theta'
, phi' :: Float
, x'
, y' :: Int
, i
, j
, i'
, j' :: Float
, running :: Bool
} deriving Show
initState = State
{ leftButton = False
, middleButton = False
, rightButton = False
, theta = 45 * pi / 180
, phi = 30 * pi / 180
, scale' = 0.4
, theta' = 0
, phi' = 0
, x' = 0
, y' = 0
, i = 0
, j = 0
, i' = 0
, j' = 0
, running = True
}
type Model = IO ()
viewer :: Model -> IO ()
viewer model = do
SDL.init [InitVideo]
setCaption "ModelView" "ModelView"
glSetAttribute glRedSize 8
glSetAttribute glGreenSize 8
glSetAttribute glBlueSize 8
glSetAttribute glAlphaSize 8
glSetAttribute glDepthSize 24
glSetAttribute glDoubleBuffer 1
setView 600 400
cullFace $= Nothing
shadeModel $= Smooth
normalize $= Enabled
position (Light 0) $= Vertex4 1 1 1 0
ambient (Light 0) $= Color4 0.3 0.3 0.3 1
diffuse (Light 0) $= Color4 1 1 1 1
--specular (Light 0) $= Color4 0 0 0 1
specular (Light 0) $= Color4 1 1 1 1
lightModelAmbient $= Color4 0.2 0.2 0.2 1
lighting $= Enabled
light (Light 0) $= Enabled
colorMaterial $= Just (FrontAndBack, AmbientAndDiffuse)
materialSpecular FrontAndBack $= Color4 1 1 1 1
materialEmission FrontAndBack $= Color4 0 0 0 1
materialShininess FrontAndBack $= 30
clearColor $= Color4 1 1 1 0
clearDepth $= 1
depthFunc $= Just Less
depthMask $= Enabled
loop model initState
quit
setView :: Int -> Int -> IO ()
setView w h = do
setVideoMode w h 16 [OpenGL, Resizable] >> return ()
matrixMode $= Projection
loadIdentity
let r = (fromIntegral w / fromIntegral h)
frustum (-r * 0.1) (r * 0.1) (-0.1) 0.1 0.1 100000
matrixMode $= Modelview 0
viewport $= (Position 0 0, Size (fromIntegral w) (fromIntegral h))
redraw :: Model -> State -> IO ()
redraw model state = do
clear [ColorBuffer, DepthBuffer]
loadIdentity
stateView state
lighting $= Disabled
orign
lighting $= Enabled
model
flush
glSwapBuffers
stateView :: State -> IO ()
stateView state = do
translate3 0 0 (-1)
rotate3 (phi state) 1 0 0
rotate3 (theta state) 0 1 0
rotate3 (-pi / 2) 1 0 0
rotate3 (-pi / 2) 0 0 1
scale3 (scale' state) (scale' state) (scale' state)
loop :: Model -> State -> IO ()
loop model state = do
event <- pollEvent
state <- handler event model state
when (event /= Quit) $ loop model state
handler :: Event -> Model -> State -> IO State
handler event model state = case event of
NoEvent -> return state
VideoExpose -> redraw model state >> return state
VideoResize x y -> setView x y >> return state
event -> case nextState event state of
Nothing -> return state
Just state -> redraw model state >> return state
nextState :: Event -> State -> Maybe State
nextState event state = case event of
MouseMotion x y _ _ | middleButton state -> Just state
{ phi = phi' state + 0.01 * fromIntegral (fromIntegral y - y' state)
, theta = theta' state + 0.01 * fromIntegral (fromIntegral x - x' state)
}
MouseMotion x _ _ _ | leftButton state -> Just state { i = i' state + 0.01 * fromIntegral (fromIntegral x - x' state) }
MouseMotion x _ _ _ | rightButton state -> Just state { j = j' state + 0.01 * fromIntegral (fromIntegral x - x' state) }
MouseButtonDown x y ButtonMiddle -> Just state
{ leftButton = False
, middleButton = True
, rightButton = False
, x' = fromIntegral x
, y' = fromIntegral y
, phi' = phi state
, theta' = theta state
}
MouseButtonDown x y ButtonLeft -> Just state
{ leftButton = True
, middleButton = False
, rightButton = False
, x' = fromIntegral x
, y' = fromIntegral y
, i' = i state
, j' = j state
}
MouseButtonDown x y ButtonRight -> Just state
{ leftButton = False
, middleButton = False
, rightButton = True
, x' = fromIntegral x
, y' = fromIntegral y
, i' = i state
, j' = j state
}
MouseButtonUp _ _ ButtonLeft -> Just state { leftButton = False }
MouseButtonUp _ _ ButtonMiddle -> Just state { middleButton = False }
MouseButtonUp _ _ ButtonRight -> Just state { rightButton = False }
MouseButtonDown _ _ ButtonWheelUp -> Just state { scale' = scale' state * 1.2 }
MouseButtonDown _ _ ButtonWheelDown -> Just state { scale' = scale' state / 1.2 }
_ -> Nothing
darkGray = color3 0.4 0.4 0.4
--lightGray = color3 0.7 0.7 0.7
orign :: IO ()
orign = do
lineWidth $= 1
renderPrimitive Lines $ do
color3 0.7 0 0
vertex3 0 0 0
vertex3 inf 0 0
color3 0 0.7 0
vertex3 0 0 0
vertex3 0 inf 0
color3 0 0 0.7
vertex3 0 0 0
vertex3 0 0 inf
darkGray
vertex3 0 0 0
vertex3 (-inf) 0 0
vertex3 0 0 0
vertex3 0 (-inf) 0
vertex3 0 0 0
vertex3 0 0 (-inf)
where
inf = 1e6
{-
plane :: Float -> Int -> Int -> IO ()
plane delta linesPerMajor totalMajors = do
lineWidth $= 1
renderPrimitive Lines $ line 1 (linesPerMajor - 1) delta
where
y = delta * fromIntegral (linesPerMajor * totalMajors)
line :: Int -> Int -> Float -> IO ()
line majorCount _ _ | majorCount > totalMajors = return ()
line majorCount minorCount x | minorCount == 0 = do
darkGray
line' x
line (majorCount + 1) (linesPerMajor - 1) (x + delta)
line majorCount minorCount x = do
lightGray
line' x
line majorCount (minorCount - 1) (x + delta)
line' x = do
vertex3 x y 0
vertex3 x (-y) 0
vertex3 (-x) y 0
vertex3 (-x) (-y) 0
vertex3 y x 0
vertex3 (-y) x 0
vertex3 y (-x) 0
vertex3 (-y) (-x) 0
-}
{-
posZ0 :: Int -> Int -> IO (Maybe (Float,Float))
posZ0 x y = do
((x1,y1,z1),(x2,y2,z2)) <- unProject x y
if z1 > 0 && z2 > 0 || z1 < 0 && z2 < 0 then return Nothing else do
let r = abs z1 / abs (z2 - z1)
x' = r * (x2 - x1) + x1
y' = r * (y2 - y1) + y1
return $ Just (x',y')
-}
| tomahawkins/mecha | attic/Viewer.hs | bsd-3-clause | 6,282 | 0 | 16 | 1,759 | 2,028 | 997 | 1,031 | 172 | 12 |
module Timer (
Timer
, startTimer
, stopTimer
, getPOSIXTime
) where
import Control.Concurrent
import Control.Monad (when)
import Data.Time.Clock.POSIX
newtype Timer = Timer (MVar Bool)
-- | Start a timer.
--
-- Call given action repeatedly with the passed time since the timer has been
-- started, adjusted by a given offset. The action is called every second;
-- whenever (passed + offset) is close to (truncate $ passed + offset).
startTimer :: POSIXTime -- ^ start time
-> Double -- ^ offset in seconds
-> (Double -> IO ())
-> IO Timer
startTimer t0 s0 action = do
m <- newMVar True
_ <- forkIO $ run t0 (realToFrac s0) (action . realToFrac) m
return (Timer m)
-- | Run until False is put into the MVar.
run :: POSIXTime -> POSIXTime -> (POSIXTime -> IO ()) -> MVar Bool -> IO ()
run t0 s0 action m = go
where
go = do
t1 <- getPOSIXTime
let s_current = s0 + (t1 - t0)
-- Increase s_next to the next full second.
s_next = fromIntegral (ceiling (s_current + 0.001) :: Int)
sleep (s_next - s_current)
isRunning <- takeMVar m
when (isRunning) $ do
-- add 0.001 as a tiebreaker to make sure that `truncate` will work as
-- expected
action (s_next + 0.001)
putMVar m isRunning
go
-- | Like `threadDelay`, but takes the delay in seconds.
sleep :: POSIXTime -> IO ()
sleep s = threadDelay $ round (s * 1000000)
-- | Stop timer; block until the timer is stopped.
stopTimer :: Timer -> IO ()
stopTimer (Timer m) = takeMVar m >> putMVar m False
| haasn/vimus | src/Timer.hs | mit | 1,629 | 0 | 16 | 461 | 415 | 214 | 201 | 33 | 1 |
{-# LANGUAGE PatternGuards, TemplateHaskell, QuasiQuotes #-}
-- |
-- Module : Language.C.Inline.C.Marshal
-- Copyright : [2013] Manuel M T Chakravarty
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- C-specific marshalling functions.
--
-- FIXME: Some of the code can go into a module for general marshalling, as only some of it is C-specific.
module Language.C.Inline.C.Marshal (
-- * Determine corresponding foreign types of Haskell types
haskellTypeToCType,
-- * Marshaller types
HaskellMarshaller, CMarshaller,
-- * Compute bridging types and marshallers
generateHaskellToCMarshaller, generateCToHaskellMarshaller
) where
-- common libraries
import Data.Map as Map
import Data.Word
import Foreign.C as C
import Foreign.Marshal as C
import Foreign.Ptr as C
import Foreign.ForeignPtr as C
import Foreign.StablePtr as C
import Language.Haskell.TH as TH
-- quasi-quotation libraries
import Language.C.Quote as QC
import Language.C.Quote.C as QC
-- friends
import Language.C.Inline.Error
import Language.C.Inline.State
import Language.C.Inline.TH
-- Determine foreign types
-- -----------------------
-- |Determine the C type that we map a given Haskell type to.
--
haskellTypeToCType :: QC.Extensions -> TH.Type -> Q (Maybe QC.Type)
haskellTypeToCType lang (ForallT _tvs _ctxt ty) -- ignore quantifiers and contexts
= haskellTypeToCType lang ty
haskellTypeToCType lang ty
= do
{ maybe_marshaller <- lookupMarshaller ty
; case maybe_marshaller of
Just (_, _, cTy, _, _) -> return $ Just cTy -- use a custom marshaller if one is available for this type
Nothing -> haskellTypeToCType' lang ty -- otherwise, continue below...
}
where
haskellTypeToCType' lang' (ListT `AppT` (ConT ch)) -- marshal '[Char]' as 'String'
| ch == ''Char
= haskellTypeNameToCType lang' ''String
haskellTypeToCType' lang' ty'@(ConT maybeC `AppT` argTy) -- encode a 'Maybe' around a pointer type in the pointer
| maybeC == ''Maybe
= do
{ cargTy <- haskellTypeToCType lang' argTy
; if fmap isCPtrType cargTy == Just True
then
return cargTy
else
unknownType lang' ty'
}
haskellTypeToCType' lang' (ConT tc) -- nullary type constructors are delegated
= haskellTypeNameToCType lang' tc
haskellTypeToCType' lang' ty'@(VarT _) -- can't marshal an unknown type
= unknownType lang' ty'
haskellTypeToCType' lang' ty'@(UnboxedTupleT _) -- there is nothing like unboxed tuples in C
= unknownType lang' ty'
haskellTypeToCType' _lang _ty -- everything else is marshalled as a stable pointer
= return $ Just [cty| typename HsStablePtr |]
unknownType lang' _ty
= do
{ reportErrorWithLang lang' $ "don't know a foreign type suitable for Haskell type '" ++ TH.pprint ty ++ "'"
; return Nothing
}
-- |Determine the C type that we map a given Haskell type constructor to — i.e., we map all Haskell types
-- whose outermost constructor is the given type constructor to the returned C type.
--
-- All types representing boxed values that are not explicitly mapped to a specific C type, are mapped to
-- stable pointers.
--
haskellTypeNameToCType :: QC.Extensions -> TH.Name -> Q (Maybe QC.Type)
haskellTypeNameToCType ext tyname
= case Map.lookup tyname (haskellToCTypeMap ext) of
Just cty' -> return $ Just cty'
Nothing -> do
{ info <- reify tyname
; case info of
PrimTyConI _ _ True -> unknownUnboxedType
_ -> return $ Just [cty| typename HsStablePtr |]
}
where
unknownUnboxedType = do
{ reportErrorWithLang ext $
"don't know a foreign type suitable for the unboxed Haskell type '" ++ show tyname ++ "'"
; return Nothing
}
haskellToCTypeMap :: QC.Extensions -> Map TH.Name QC.Type
haskellToCTypeMap C11
= Map.fromList
[ (''CChar, [cty| char |])
, (''CSChar, [cty| signed char |])
, (''CUChar, [cty| unsigned char |])
, (''CShort, [cty| short |])
, (''CUShort, [cty| unsigned short |])
, (''Int, [cty| int |])
, (''CInt, [cty| int |])
, (''Word, [cty| unsigned int |])
, (''CUInt, [cty| unsigned int |])
, (''CLong, [cty| long |])
, (''CULong, [cty| unsigned long |])
, (''CLLong, [cty| long long |])
, (''CULLong, [cty| unsigned long long |])
--
, (''Float, [cty| float |])
, (''CFloat, [cty| float |])
, (''Double, [cty| double |])
, (''CDouble, [cty| double |])
--
, (''Bool, [cty| typename BOOL |])
, (''String, [cty| const char * |])
, (''(), [cty| void |])
]
haskellToCTypeMap _lang
= Map.empty
-- Check whether the given C type is an overt pointer.
--
isCPtrType :: QC.Type -> Bool
isCPtrType (Type _ (Ptr {}) _) = True
isCPtrType (Type _ (BlockPtr {}) _) = True
isCPtrType (Type _ (Array {}) _) = True
isCPtrType ty
| ty == [cty| typename HsStablePtr |] = True
| otherwise = False
-- Determine marshallers and their bridging types
-- ----------------------------------------------
-- |Constructs Haskell code to marshal a value (used to marshal arguments and results).
--
-- * The first argument is the code referring to the value to be marshalled.
-- * The second argument is the continuation that gets the marshalled value as an argument.
--
type HaskellMarshaller = TH.ExpQ -> TH.ExpQ -> TH.ExpQ
-- |Constructs C code to marshal an argument (used to marshal arguments and results).
--
-- * The argument is the identifier of the value to be marshalled.
-- * The result of the generated expression is the marshalled value.
--
type CMarshaller = TH.Name -> QC.Exp
-- |Generate the type-specific marshalling code for Haskell to C land marshalling for a Haskell-C type pair.
--
-- The result has the following components:
--
-- * Haskell type after Haskell-side marshalling.
-- * C type before C-side marshalling.
-- * Generator for the Haskell-side marshalling code.
-- * Generator for the C-side marshalling code.
--
generateHaskellToCMarshaller :: TH.Type -> QC.Type -> Q (TH.TypeQ, QC.Type, HaskellMarshaller, CMarshaller)
generateHaskellToCMarshaller hsTy cTy@(Type (DeclSpec _ _ (Tnamed (Id name _) _ _) _) (Ptr _ (DeclRoot _) _) _)
| Just name == maybeHeadName -- wrapped ForeignPtr mapped to an Objective-C class
= return ( ptrOfForeignPtrWrapper hsTy
, cTy
, \val cont -> [| C.withForeignPtr ($(unwrapForeignPtrWrapper hsTy) $val) $cont |]
, \argName -> [cexp| $id:(show argName) |]
)
| otherwise
= do
{ maybe_marshaller <- lookupMarshaller hsTy
; case maybe_marshaller of
Just (_, classTy, cTy', haskellToC, _cToHaskell)
| cTy' == cTy -- custom marshaller mapping to an Objective-C class
-> return ( ptrOfForeignPtrWrapper classTy
, cTy
, \val cont -> [| do
{ nsClass <- $(varE haskellToC) $val
; C.withForeignPtr ($(unwrapForeignPtrWrapper classTy) nsClass) $cont
} |]
, \argName -> [cexp| $id:(show argName) |]
)
Nothing -- other => continue below
-> generateHaskellToCMarshaller' hsTy cTy
}
where
maybeHeadName = fmap nameBase $ headTyConName hsTy
generateHaskellToCMarshaller hsTy cTy = generateHaskellToCMarshaller' hsTy cTy
generateHaskellToCMarshaller' :: TH.Type -> QC.Type -> Q (TH.TypeQ, QC.Type, HaskellMarshaller, CMarshaller)
generateHaskellToCMarshaller' hsTy@(ConT mbe `AppT` argTy) cTy
| mbe == ''Maybe && isCPtrType cTy
= do
{ (argTy', cTy', hsMarsh, cMarsh) <- generateHaskellToCMarshaller argTy cTy
; ty <- argTy'
; resolve ty argTy' cTy' hsMarsh cMarsh
}
where
resolve ty argTy' cTy' hsMarsh cMarsh
= case ty of
ConT ptr `AppT` _
| ptr == ''C.Ptr -> return ( argTy'
, cTy'
, \val cont -> [| case $val of
Nothing -> $cont C.nullPtr
Just val' -> $(hsMarsh [|val'|] cont) |]
, cMarsh
)
| ptr == ''C.StablePtr -> return ( argTy'
, cTy'
, \val cont -> [| case $val of
Nothing -> $cont (C.castPtrToStablePtr C.nullPtr)
Just val' -> $(hsMarsh [|val'|] cont) |]
-- NB: the above cast works for GHC, but is in the grey area
-- of the FFI spec
, cMarsh
)
ConT con
-> do
{ info <- reify con
; case info of
TyConI (TySynD _name [] tysyn) -> resolve tysyn argTy' cTy' hsMarsh cMarsh
-- chase type synonyms (only nullary ones at the moment)
_ -> missingErr
}
_ -> missingErr
missingErr = reportErrorAndFail C11 $
"missing 'Maybe' marshalling for '" ++ prettyQC cTy ++ "' to '" ++ TH.pprint hsTy ++ "'"
generateHaskellToCMarshaller' hsTy cTy
| Just hsMarshalTy <- Map.lookup cTy cIntegralMap -- checking whether it is an integral type
= return ( hsMarshalTy
, cTy
, \val cont -> [| $cont (fromIntegral $val) |]
, \argName -> [cexp| $id:(show argName) |]
)
| Just hsMarshalTy <- Map.lookup cTy cFloatingMap -- checking whether it is a floating type
= return ( hsMarshalTy
, cTy
, \val cont -> [| $cont (realToFrac $val) |]
, \argName -> [cexp| $id:(show argName) |]
)
| cTy == [cty| typename BOOL |]
= return ( [t| C.CSChar |]
, cTy
, \val cont -> [| $cont (C.fromBool $val) |]
, \argName -> [cexp| ($id:(show argName)) |]
)
| cTy == [cty| const char * |]
= return ( [t| C.CString |]
, [cty| const char * |]
, \val cont -> [| C.withCString $val $cont |]
, \argName -> [cexp| ($id:(show argName)) |]
)
| cTy == [cty| typename HsStablePtr |]
= return ( [t| C.StablePtr $(return hsTy) |]
, cTy
, \val cont -> [| do { C.newStablePtr $val >>= $cont } |]
, \argName -> [cexp| $id:(show argName) |]
)
| otherwise
= reportErrorAndFail C11 $ "cannot marshal '" ++ TH.pprint hsTy ++ "' to '" ++ prettyQC cTy ++ "'"
-- |Generate the type-specific marshalling code for Haskell to C land marshalling for a C-Haskell type pair.
--
-- The result has the following components:
--
-- * Haskell type after Haskell-side marshalling.
-- * C type before C-side marshalling.
-- * Generator for the Haskell-side marshalling code.
-- * Generator for the C-side marshalling code.
--
generateCToHaskellMarshaller :: TH.Type -> QC.Type -> Q (TH.TypeQ, QC.Type, HaskellMarshaller, CMarshaller)
generateCToHaskellMarshaller hsTy cTy@(Type (DeclSpec _ _ (Tnamed (Id name _) _ _) _) (Ptr _ (DeclRoot _) _) _)
| Just name == maybeHeadName -- ForeignPtr mapped to an Objective-C class
= return ( ptrOfForeignPtrWrapper hsTy
, cTy
, \val cont -> do { let datacon = foreignWrapperDatacon hsTy
; [| do { fptr <- newForeignPtr_ $val; $cont ($datacon fptr) } |]
}
, \argName -> [cexp| $id:(show argName) |]
)
| otherwise
= do
{ maybe_marshaller <- lookupMarshaller hsTy
; case maybe_marshaller of
Just (_, classTy, cTy', _haskellToC, cToHaskell)
| cTy' == cTy -- custom marshaller mapping to an Objective-C class
-> return ( ptrOfForeignPtrWrapper classTy
, cTy
, \val cont -> do { let datacon = foreignWrapperDatacon classTy
; [| do
{ fptr <- newForeignPtr_ $val
; hsVal <- $(varE cToHaskell) ($datacon fptr)
; $cont hsVal
} |]
}
, \argName -> [cexp| $id:(show argName) |]
)
Nothing -- other => continue below
-> generateCToHaskellMarshaller' hsTy cTy
}
where
maybeHeadName = fmap nameBase $ headTyConName hsTy
generateCToHaskellMarshaller hsTy cTy = generateCToHaskellMarshaller' hsTy cTy
generateCToHaskellMarshaller' :: TH.Type -> QC.Type -> Q (TH.TypeQ, QC.Type, HaskellMarshaller, CMarshaller)
generateCToHaskellMarshaller' hsTy@(ConT mbe `AppT` argTy) cTy
| mbe == ''Maybe && isCPtrType cTy
= do
{ (argTy', cTy', hsMarsh, cMarsh) <- generateCToHaskellMarshaller argTy cTy
; ty <- argTy'
; resolve ty argTy' cTy' hsMarsh cMarsh
}
where
resolve ty argTy' cTy' hsMarsh cMarsh
= case ty of
ConT ptr `AppT` _
| ptr == ''C.Ptr -> return ( argTy'
, cTy'
, \val cont -> [| if $val == C.nullPtr
then $cont Nothing
else $(hsMarsh val [| $cont . Just |]) |]
, cMarsh
)
| ptr == ''C.StablePtr -> return ( argTy'
, cTy'
, \val cont -> [| if (C.castStablePtrToPtr $val) == C.nullPtr
then $cont Nothing
else $(hsMarsh val [| $cont . Just |]) |]
-- NB: the above cast works for GHC, but is in the grey area
-- of the FFI spec
, cMarsh
)
ConT con
-> do
{ info <- reify con
; case info of
TyConI (TySynD _name [] tysyn) -> resolve tysyn argTy' cTy' hsMarsh cMarsh
-- chase type synonyms (only nullary ones at the moment)
_ -> missingErr
}
_ -> missingErr
missingErr = reportErrorAndFail C11 $
"missing 'Maybe' marshalling for '" ++ prettyQC cTy ++ "' to '" ++ TH.pprint hsTy ++ "'"
generateCToHaskellMarshaller' hsTy cTy
| Just hsMarshalTy <- Map.lookup cTy cIntegralMap -- checking whether it is an integral type
= return ( hsMarshalTy
, cTy
, \val cont -> [| $cont (fromIntegral $val) |]
, \argName -> [cexp| $id:(show argName) |]
)
| Just hsMarshalTy <- Map.lookup cTy cFloatingMap -- checking whether it is a floating type
= return ( hsMarshalTy
, cTy
, \val cont -> [| $cont (realToFrac $val) |]
, \argName -> [cexp| $id:(show argName) |]
)
| cTy == [cty| typename BOOL |]
= return ( [t| C.CSChar |]
, cTy
, \val cont -> [| $cont (C.toBool $val) |]
, \argName -> [cexp| $id:(show argName) |]
)
{-
| cTy == [cty| typename CString * |]
= return ( [t| C.CString |]
, [cty| char * |]
, \val cont -> [| do { str <- C.peekCString $val; C.free $val; $cont str } |]
, \argName ->
let arg = show argName
in
[cexp|
( $id:arg )
? ({ typename NSUInteger maxLen = [$id:arg maximumLengthOfBytesUsingEncoding:NSUTF8StringEncoding] + 1;
char *buffer = malloc (maxLen);
if (![$id:arg getCString:buffer maxLength:maxLen encoding:NSUTF8StringEncoding])
*buffer = '\0';
buffer;
})
: nil
|]
)
-}
| cTy == [cty| const char * |]
= return ( [t| C.CString |]
, [cty| const char * |]
-- , \val cont -> [| C.withCString $val $cont |]
, \val cont -> [| C.peekCString $val >>= $cont |]
, \argName -> [cexp| ($id:(show argName)) |]
)
| cTy == [cty| typename HsStablePtr |]
= return ( [t| C.StablePtr $(return hsTy) |]
, cTy
, \val cont -> [| do { C.deRefStablePtr $val >>= $cont } |]
, \argName -> [cexp| $id:(show argName) |]
)
| cTy == [cty| void |]
= return ( [t| () |]
, [cty| void |]
, \val cont -> [| $cont $val |]
, \argName -> [cexp| $id:(show argName) |]
)
| otherwise
= reportErrorAndFail C11 $ "cannot marshall '" ++ prettyQC cTy ++ "' to '" ++ TH.pprint hsTy ++ "'"
cIntegralMap :: Map QC.Type TypeQ
cIntegralMap = Map.fromList
[ ([cty| char |], [t| C.CChar |])
, ([cty| signed char |], [t| C.CChar |])
, ([cty| unsigned char |], [t| C.CUChar |])
, ([cty| short |], [t| C.CShort |])
, ([cty| unsigned short |], [t| C.CUShort |])
, ([cty| int |], [t| C.CInt |])
, ([cty| unsigned int |], [t| C.CUInt |])
, ([cty| long |], [t| C.CLong |])
, ([cty| unsigned long |], [t| C.CULong |])
, ([cty| long long |], [t| C.CLLong |])
, ([cty| unsigned long long |], [t| C.CULLong |])
]
cFloatingMap :: Map QC.Type TypeQ
cFloatingMap = Map.fromList
[ ([cty| float |] , [t| C.CFloat |])
, ([cty| double |], [t| C.CDouble |])
]
| beni55/language-c-inline | Language/C/Inline/C/Marshal.hs | bsd-3-clause | 19,319 | 14 | 20 | 7,462 | 3,617 | 2,142 | 1,475 | 285 | 8 |
{-# LANGUAGE TemplateHaskell, TypeOperators, MultiParamTypeClasses,
FlexibleInstances, FlexibleContexts, UndecidableInstances, GADTs,
ConstraintKinds #-}
--------------------------------------------------------------------------------
-- |
-- Module : Examples.Multi.Desugar
-- Copyright : (c) 2011 Patrick Bahr, Tom Hvitved
-- License : BSD3
-- Maintainer : Tom Hvitved <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC Extensions)
--
-- Desugaring
--
-- The example illustrates how to compose a term homomorphism and an algebra,
-- exemplified via a desugaring term homomorphism and an evaluation algebra.
-- The example also illustrates how to lift a term homomorphism to products,
-- exemplified via a desugaring term homomorphism lifted to terms annotated with
-- source position information.
--
--------------------------------------------------------------------------------
module Examples.Multi.Desugar where
import Data.Comp.Multi
import Data.Comp.Multi.Derive
import Data.Comp.Multi.Desugar
import Examples.Multi.Common
import Examples.Multi.Eval
-- Signature for syntactic sugar
data Sugar a i where
Neg :: a Int -> Sugar a Int
Swap :: a (i,j) -> Sugar a (j,i)
-- Source position information (line number, column number)
data Pos = Pos Int Int
deriving (Eq, Show)
-- Signature for the simple expression language
type SigP = Op :&: Pos :+: Value :&: Pos
-- Signature for the simple expression language, extended with syntactic sugar
type Sig' = Sugar :+: Op :+: Value
type SigP' = Sugar :&: Pos :+: Op :&: Pos :+: Value :&: Pos
-- Derive boilerplate code using Template Haskell (GHC 7 needed)
$(derive [makeHFunctor, makeHTraversable, makeHFoldable, makeEqHF, makeShowHF,
makeOrdHF, smartConstructors, smartAConstructors]
[''Sugar])
instance (Op :<: v, Value :<: v, HFunctor v) => Desugar Sugar v where
desugHom' (Neg x) = iConst (-1) `iMult` x
desugHom' (Swap x) = iSnd x `iPair` iFst x
-- Compose the evaluation algebra and the desugaring homomorphism to an
-- algebra
evalDesug :: Term Sig' :-> Term Value
evalDesug = cata (evalAlg `compAlg` (desugHom :: Hom Sig' Sig))
-- Example: evalEx = iPair (iConst 2) (iConst 1)
evalEx :: Term Value (Int,Int)
evalEx = evalDesug $ iSwap $ iPair (iConst 1) (iConst 2)
-- Example: desugPEx = iAPair (Pos 1 0)
-- (iASnd (Pos 1 0) (iAPair (Pos 1 1)
-- (iAConst (Pos 1 2) 1)
-- (iAConst (Pos 1 3) 2)))
-- (iAFst (Pos 1 0) (iAPair (Pos 1 1)
-- (iAConst (Pos 1 2) 1)
-- (iAConst (Pos 1 3) 2)))
desugPEx :: Term SigP (Int,Int)
desugPEx = desugarA (iASwap (Pos 1 0) (iAPair (Pos 1 1) (iAConst (Pos 1 2) 1)
(iAConst (Pos 1 3) 2))
:: Term SigP' (Int,Int))
| spacekitteh/compdata | examples/Examples/Multi/Desugar.hs | bsd-3-clause | 3,046 | 0 | 13 | 814 | 528 | 305 | 223 | 31 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[WwLib]{A library for the ``worker\/wrapper'' back-end to the strictness analyser}
-}
{-# LANGUAGE CPP #-}
module ETA.StrAnal.WwLib ( mkWwBodies, mkWWstr, mkWorkerArgs
, deepSplitProductType_maybe, findTypeShape
) where
#include "HsVersions.h"
import ETA.Core.CoreSyn
import ETA.Core.CoreUtils ( exprType, mkCast )
import ETA.BasicTypes.Id ( Id, idType, mkSysLocal, idDemandInfo, setIdDemandInfo,
setIdUnfolding,
setIdInfo, idOneShotInfo, setIdOneShotInfo
)
import ETA.BasicTypes.IdInfo ( vanillaIdInfo )
import ETA.BasicTypes.DataCon
import ETA.BasicTypes.Demand
import ETA.Core.MkCore ( mkRuntimeErrorApp, aBSENT_ERROR_ID )
import ETA.BasicTypes.MkId ( voidArgId, voidPrimId )
import ETA.Prelude.TysPrim ( voidPrimTy )
import ETA.Prelude.TysWiredIn ( tupleCon )
import ETA.Types.Type
import ETA.Types.Coercion hiding ( substTy, substTyVarBndr )
import ETA.Types.FamInstEnv
import ETA.BasicTypes.BasicTypes ( TupleSort(..), OneShotInfo(..), worstOneShot )
import ETA.BasicTypes.Literal ( absentLiteralOf )
import ETA.Types.TyCon
import ETA.BasicTypes.UniqSupply
import ETA.BasicTypes.Unique
import ETA.Utils.Maybes
import ETA.Utils.Util
import ETA.Utils.Outputable
import ETA.Main.DynFlags
import ETA.Utils.FastString
{-
************************************************************************
* *
\subsection[mkWrapperAndWorker]{@mkWrapperAndWorker@}
* *
************************************************************************
Here's an example. The original function is:
\begin{verbatim}
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
From this, we want to produce:
\begin{verbatim}
-- wrapper (an unfolding)
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
I# x# -> $wg a x# ys
-- call the worker; don't forget the type args!
-- worker
$wg :: forall a . Int# -> [a] -> a
$wg = \/\ a -> \ x# ys ->
let
x = I# x#
in
case x of -- note: body of g moved intact
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
Something we have to be careful about: Here's an example:
\begin{verbatim}
-- "f" strictness: U(P)U(P)
f (I# a) (I# b) = a +# b
g = f -- "g" strictness same as "f"
\end{verbatim}
\tr{f} will get a worker all nice and friendly-like; that's good.
{\em But we don't want a worker for \tr{g}}, even though it has the
same strictness as \tr{f}. Doing so could break laziness, at best.
Consequently, we insist that the number of strictness-info items is
exactly the same as the number of lambda-bound arguments. (This is
probably slightly paranoid, but OK in practice.) If it isn't the
same, we ``revise'' the strictness info, so that we won't propagate
the unusable strictness-info into the interfaces.
************************************************************************
* *
\subsection{The worker wrapper core}
* *
************************************************************************
@mkWwBodies@ is called when doing the worker\/wrapper split inside a module.
-}
mkWwBodies :: DynFlags
-> FamInstEnvs
-> Type -- Type of original function
-> [Demand] -- Strictness of original function
-> DmdResult -- Info about function result
-> [OneShotInfo] -- One-shot-ness of the function, value args only
-> UniqSM (Maybe ([Demand], -- Demands for worker (value) args
Id -> CoreExpr, -- Wrapper body, lacking only the worker Id
CoreExpr -> CoreExpr)) -- Worker body, lacking the original function rhs
-- wrap_fn_args E = \x y -> E
-- work_fn_args E = E x y
-- wrap_fn_str E = case x of { (a,b) ->
-- case a of { (a1,a2) ->
-- E a1 a2 b y }}
-- work_fn_str E = \a2 a2 b y ->
-- let a = (a1,a2) in
-- let x = (a,b) in
-- E
mkWwBodies dflags fam_envs fun_ty demands res_info one_shots
= do { let arg_info = demands `zip` (one_shots ++ repeat NoOneShotInfo)
all_one_shots = foldr (worstOneShot . snd) OneShotLam arg_info
; (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs emptyTvSubst fun_ty arg_info
; (useful1, work_args, wrap_fn_str, work_fn_str) <- mkWWstr dflags fam_envs wrap_args
-- Do CPR w/w. See Note [Always do CPR w/w]
; (useful2, wrap_fn_cpr, work_fn_cpr, cpr_res_ty) <- mkWWcpr fam_envs res_ty res_info
; let (work_lam_args, work_call_args) = mkWorkerArgs dflags work_args all_one_shots cpr_res_ty
worker_args_dmds = [idDemandInfo v | v <- work_call_args, isId v]
wrapper_body = wrap_fn_args . wrap_fn_cpr . wrap_fn_str . applyToVars work_call_args . Var
worker_body = mkLams work_lam_args. work_fn_str . work_fn_cpr . work_fn_args
; if useful1 && not (only_one_void_argument) || useful2
then return (Just (worker_args_dmds, wrapper_body, worker_body))
else return Nothing
}
-- We use an INLINE unconditionally, even if the wrapper turns out to be
-- something trivial like
-- fw = ...
-- f = __inline__ (coerce T fw)
-- The point is to propagate the coerce to f's call sites, so even though
-- f's RHS is now trivial (size 1) we still want the __inline__ to prevent
-- fw from being inlined into f's RHS
where
-- Note [Do not split void functions]
only_one_void_argument
| [d] <- demands
, Just (arg_ty1, _) <- splitFunTy_maybe fun_ty
, isAbsDmd d && isVoidTy arg_ty1
= True
| otherwise
= False
{-
Note [Always do CPR w/w]
~~~~~~~~~~~~~~~~~~~~~~~~
At one time we refrained from doing CPR w/w for thunks, on the grounds that
we might duplicate work. But that is already handled by the demand analyser,
which doesn't give the CPR proprety if w/w might waste work: see
Note [CPR for thunks] in DmdAnal.
And if something *has* been given the CPR property and we don't w/w, it's
a disaster, because then the enclosing function might say it has the CPR
property, but now doesn't and there a cascade of disaster. A good example
is Trac #5920.
************************************************************************
* *
\subsection{Making wrapper args}
* *
************************************************************************
During worker-wrapper stuff we may end up with an unlifted thing
which we want to let-bind without losing laziness. So we
add a void argument. E.g.
f = /\a -> \x y z -> E::Int# -- E does not mention x,y,z
==>
fw = /\ a -> \void -> E
f = /\ a -> \x y z -> fw realworld
We use the state-token type which generates no code.
-}
mkWorkerArgs :: DynFlags -> [Var]
-> OneShotInfo -- Whether all arguments are one-shot
-> Type -- Type of body
-> ([Var], -- Lambda bound args
[Var]) -- Args at call site
mkWorkerArgs dflags args all_one_shot res_ty
| any isId args || not needsAValueLambda
= (args, args)
| otherwise
= (args ++ [newArg], args ++ [voidPrimId])
where
needsAValueLambda =
isUnLiftedType res_ty
|| not (gopt Opt_FunToThunk dflags)
-- see Note [Protecting the last value argument]
-- see Note [All One-Shot Arguments of a Worker]
newArg = setIdOneShotInfo voidArgId all_one_shot
{-
Note [Protecting the last value argument]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the user writes (\_ -> E), they might be intentionally disallowing
the sharing of E. Since absence analysis and worker-wrapper are keen
to remove such unused arguments, we add in a void argument to prevent
the function from becoming a thunk.
The user can avoid adding the void argument with the -ffun-to-thunk
flag. However, this can create sharing, which may be bad in two ways. 1) It can
create a space leak. 2) It can prevent inlining *under a lambda*. If w/w
removes the last argument from a function f, then f now looks like a thunk, and
so f can't be inlined *under a lambda*.
Note [All One-Shot Arguments of a Worker]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sometimes, derived join-points are just lambda-lifted thunks, whose
only argument is of the unit type and is never used. This might
interfere with the absence analysis, basing on which results these
never-used arguments are eliminated in the worker. The additional
argument `all_one_shot` of `mkWorkerArgs` is to prevent this.
Example. Suppose we have
foo = \p(one-shot) q(one-shot). y + 3
Then we drop the unused args to give
foo = \pq. $wfoo void#
$wfoo = \void(one-shot). y + 3
But suppse foo didn't have all one-shot args:
foo = \p(not-one-shot) q(one-shot). expensive y + 3
Then we drop the unused args to give
foo = \pq. $wfoo void#
$wfoo = \void(not-one-shot). y + 3
If we made the void-arg one-shot we might inline an expensive
computation for y, which would be terrible!
************************************************************************
* *
\subsection{Coercion stuff}
* *
************************************************************************
We really want to "look through" coerces.
Reason: I've seen this situation:
let f = coerce T (\s -> E)
in \x -> case x of
p -> coerce T' f
q -> \s -> E2
r -> coerce T' f
If only we w/w'd f, we'd get
let f = coerce T (\s -> fw s)
fw = \s -> E
in ...
Now we'll inline f to get
let fw = \s -> E
in \x -> case x of
p -> fw
q -> \s -> E2
r -> fw
Now we'll see that fw has arity 1, and will arity expand
the \x to get what we want.
-}
-- mkWWargs just does eta expansion
-- is driven off the function type and arity.
-- It chomps bites off foralls, arrows, newtypes
-- and keeps repeating that until it's satisfied the supplied arity
mkWWargs :: TvSubst -- Freshening substitution to apply to the type
-- See Note [Freshen type variables]
-> Type -- The type of the function
-> [(Demand,OneShotInfo)] -- Demands and one-shot info for value arguments
-> UniqSM ([Var], -- Wrapper args
CoreExpr -> CoreExpr, -- Wrapper fn
CoreExpr -> CoreExpr, -- Worker fn
Type) -- Type of wrapper body
mkWWargs subst fun_ty arg_info
| null arg_info
= return ([], id, id, substTy subst fun_ty)
| ((dmd,one_shot):arg_info') <- arg_info
, Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty
= do { uniq <- getUniqueM
; let arg_ty' = substTy subst arg_ty
id = mk_wrap_arg uniq arg_ty' dmd one_shot
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst fun_ty' arg_info'
; return (id : wrap_args,
Lam id . wrap_fn_args,
work_fn_args . (`App` varToCoreExpr id),
res_ty) }
| Just (tv, fun_ty') <- splitForAllTy_maybe fun_ty
= do { let (subst', tv') = substTyVarBndr subst tv
-- This substTyVarBndr clones the type variable when necy
-- See Note [Freshen type variables]
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst' fun_ty' arg_info
; return (tv' : wrap_args,
Lam tv' . wrap_fn_args,
work_fn_args . (`App` Type (mkTyVarTy tv')),
res_ty) }
| Just (co, rep_ty) <- topNormaliseNewType_maybe fun_ty
-- The newtype case is for when the function has
-- a newtype after the arrow (rare)
--
-- It's also important when we have a function returning (say) a pair
-- wrapped in a newtype, at least if CPR analysis can look
-- through such newtypes, which it probably can since they are
-- simply coerces.
= do { (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst rep_ty arg_info
; return (wrap_args,
\e -> Cast (wrap_fn_args e) (mkSymCo co),
\e -> work_fn_args (Cast e co),
res_ty) }
| otherwise
= WARN( True, ppr fun_ty ) -- Should not happen: if there is a demand
return ([], id, id, substTy subst fun_ty) -- then there should be a function arrow
applyToVars :: [Var] -> CoreExpr -> CoreExpr
applyToVars vars fn = mkVarApps fn vars
mk_wrap_arg :: Unique -> Type -> Demand -> OneShotInfo -> Id
mk_wrap_arg uniq ty dmd one_shot
= mkSysLocal (fsLit "w") uniq ty
`setIdDemandInfo` dmd
`setIdOneShotInfo` one_shot
{-
Note [Freshen type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wen we do a worker/wrapper split, we must not use shadowed names,
else we'll get
f = /\ a /\a. fw a a
which is obviously wrong. Type variables can can in principle shadow,
within a type (e.g. forall a. a -> forall a. a->a). But type
variables *are* mentioned in <blah>, so we must substitute.
That's why we carry the TvSubst through mkWWargs
************************************************************************
* *
\subsection{Strictness stuff}
* *
************************************************************************
-}
mkWWstr :: DynFlags
-> FamInstEnvs
-> [Var] -- Wrapper args; have their demand info on them
-- *Includes type variables*
-> UniqSM (Bool, -- Is this useful
[Var], -- Worker args
CoreExpr -> CoreExpr, -- Wrapper body, lacking the worker call
-- and without its lambdas
-- This fn adds the unboxing
CoreExpr -> CoreExpr) -- Worker body, lacking the original body of the function,
-- and lacking its lambdas.
-- This fn does the reboxing
mkWWstr _ _ []
= return (False, [], nop_fn, nop_fn)
mkWWstr dflags fam_envs (arg : args) = do
(useful1, args1, wrap_fn1, work_fn1) <- mkWWstr_one dflags fam_envs arg
(useful2, args2, wrap_fn2, work_fn2) <- mkWWstr dflags fam_envs args
return (useful1 || useful2, args1 ++ args2, wrap_fn1 . wrap_fn2, work_fn1 . work_fn2)
{-
Note [Unpacking arguments with product and polymorphic demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The argument is unpacked in a case if it has a product type and has a
strict *and* used demand put on it. I.e., arguments, with demands such
as the following ones:
<S,U(U, L)>
<S(L,S),U>
will be unpacked, but
<S,U> or <B,U>
will not, because the pieces aren't used. This is quite important otherwise
we end up unpacking massive tuples passed to the bottoming function. Example:
f :: ((Int,Int) -> String) -> (Int,Int) -> a
f g pr = error (g pr)
main = print (f fst (1, error "no"))
Does 'main' print "error 1" or "error no"? We don't really want 'f'
to unbox its second argument. This actually happened in GHC's onwn
source code, in Packages.applyPackageFlag, which ended up un-boxing
the enormous DynFlags tuple, and being strict in the
as-yet-un-filled-in pkgState files.
-}
----------------------
-- mkWWstr_one wrap_arg = (useful, work_args, wrap_fn, work_fn)
-- * wrap_fn assumes wrap_arg is in scope,
-- brings into scope work_args (via cases)
-- * work_fn assumes work_args are in scope, a
-- brings into scope wrap_arg (via lets)
mkWWstr_one :: DynFlags -> FamInstEnvs -> Var
-> UniqSM (Bool, [Var], CoreExpr -> CoreExpr, CoreExpr -> CoreExpr)
mkWWstr_one dflags fam_envs arg
| isTyVar arg
= return (False, [arg], nop_fn, nop_fn)
-- See Note [Worker-wrapper for bottoming functions]
| isAbsDmd dmd
, Just work_fn <- mk_absent_let dflags arg
-- Absent case. We can't always handle absence for arbitrary
-- unlifted types, so we need to choose just the cases we can
--- (that's what mk_absent_let does)
= return (True, [], nop_fn, work_fn)
-- See Note [Worthy functions for Worker-Wrapper split]
| isSeqDmd dmd -- `seq` demand; evaluate in wrapper in the hope
-- of dropping seqs in the worker
= let arg_w_unf = arg `setIdUnfolding` evaldUnfolding
-- Tell the worker arg that it's sure to be evaluated
-- so that internal seqs can be dropped
in return (True, [arg_w_unf], mk_seq_case arg, nop_fn)
-- Pass the arg, anyway, even if it is in theory discarded
-- Consider
-- f x y = x `seq` y
-- x gets a (Eval (Poly Abs)) demand, but if we fail to pass it to the worker
-- we ABSOLUTELY MUST record that x is evaluated in the wrapper.
-- Something like:
-- f x y = x `seq` fw y
-- fw y = let x{Evald} = error "oops" in (x `seq` y)
-- If we don't pin on the "Evald" flag, the seq doesn't disappear, and
-- we end up evaluating the absent thunk.
-- But the Evald flag is pretty weird, and I worry that it might disappear
-- during simplification, so for now I've just nuked this whole case
| isStrictDmd dmd
, Just cs <- splitProdDmd_maybe dmd
-- See Note [Unpacking arguments with product and polymorphic demands]
, Just (data_con, inst_tys, inst_con_arg_tys, co)
<- deepSplitProductType_maybe fam_envs (idType arg)
, cs `equalLength` inst_con_arg_tys
-- See Note [mkWWstr and unsafeCoerce]
= do { (uniq1:uniqs) <- getUniquesM
; let unpk_args = zipWith mk_ww_local uniqs inst_con_arg_tys
unpk_args_w_ds = zipWithEqual "mkWWstr" set_worker_arg_info unpk_args cs
unbox_fn = mkUnpackCase (Var arg) co uniq1
data_con unpk_args
rebox_fn = Let (NonRec arg con_app)
con_app = mkConApp2 data_con inst_tys unpk_args `mkCast` mkSymCo co
; (_, worker_args, wrap_fn, work_fn) <- mkWWstr dflags fam_envs unpk_args_w_ds
; return (True, worker_args, unbox_fn . wrap_fn, work_fn . rebox_fn) }
-- Don't pass the arg, rebox instead
| otherwise -- Other cases
= return (False, [arg], nop_fn, nop_fn)
where
dmd = idDemandInfo arg
one_shot = idOneShotInfo arg
-- If the wrapper argument is a one-shot lambda, then
-- so should (all) the corresponding worker arguments be
-- This bites when we do w/w on a case join point
set_worker_arg_info worker_arg demand
= worker_arg `setIdDemandInfo` demand
`setIdOneShotInfo` one_shot
----------------------
nop_fn :: CoreExpr -> CoreExpr
nop_fn body = body
{-
Note [mkWWstr and unsafeCoerce]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
By using unsafeCoerce, it is possible to make the number of demands fail to
match the number of constructor arguments; this happened in Trac #8037.
If so, the worker/wrapper split doesn't work right and we get a Core Lint
bug. The fix here is simply to decline to do w/w if that happens.
************************************************************************
* *
Type scrutiny that is specfic to demand analysis
* *
************************************************************************
Note [Do not unpack class dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have
f :: Ord a => [a] -> Int -> a
{-# INLINABLE f #-}
and we worker/wrapper f, we'll get a worker with an INLINALBE pragma
(see Note [Worker-wrapper for INLINABLE functions] in WorkWrap), which
can still be specialised by the type-class specialiser, something like
fw :: Ord a => [a] -> Int# -> a
BUT if f is strict in the Ord dictionary, we might unpack it, to get
fw :: (a->a->Bool) -> [a] -> Int# -> a
and the type-class specialiser can't specialise that. An example is
Trac #6056.
Moreover, dictinoaries can have a lot of fields, so unpacking them can
increase closure sizes.
Conclusion: don't unpack dictionaries.
-}
deepSplitProductType_maybe :: FamInstEnvs -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitProductType_maybe ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitProductType_maybe fam_envs ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkReflCo Representational ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, Just con <- isDataProductTyCon_maybe tc
, not (isClassTyCon tc) -- See Note [Do not unpack class dictionaries]
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitProductType_maybe _ _ = Nothing
deepSplitCprType_maybe :: FamInstEnvs -> ConTag -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitCprType_maybe n ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitCprType_maybe fam_envs con_tag ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkReflCo Representational ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, isDataTyCon tc
, let cons = tyConDataCons tc
, cons `lengthAtLeast` con_tag -- This might not be true if we import the
-- type constructor via a .hs-bool file (#8743)
, let con = cons !! (con_tag - fIRST_TAG)
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitCprType_maybe _ _ _ = Nothing
findTypeShape :: FamInstEnvs -> Type -> TypeShape
-- Uncover the arrow and product shape of a type
-- The data type TypeShape is defined in Demand
-- See Note [Trimming a demand to a type] in Demand
findTypeShape fam_envs ty
| Just (_, ty') <- splitForAllTy_maybe ty
= findTypeShape fam_envs ty'
| Just (tc, tc_args) <- splitTyConApp_maybe ty
, Just con <- isDataProductTyCon_maybe tc
= TsProd (map (findTypeShape fam_envs) $ dataConInstArgTys con tc_args)
| Just (_, res) <- splitFunTy_maybe ty
= TsFun (findTypeShape fam_envs res)
| Just (_, ty') <- topNormaliseType_maybe fam_envs ty
= findTypeShape fam_envs ty'
| otherwise
= TsUnk
{-
************************************************************************
* *
\subsection{CPR stuff}
* *
************************************************************************
@mkWWcpr@ takes the worker/wrapper pair produced from the strictness
info and adds in the CPR transformation. The worker returns an
unboxed tuple containing non-CPR components. The wrapper takes this
tuple and re-produces the correct structured output.
The non-CPR results appear ordered in the unboxed tuple as if by a
left-to-right traversal of the result structure.
-}
mkWWcpr :: FamInstEnvs
-> Type -- function body type
-> DmdResult -- CPR analysis results
-> UniqSM (Bool, -- Is w/w'ing useful?
CoreExpr -> CoreExpr, -- New wrapper
CoreExpr -> CoreExpr, -- New worker
Type) -- Type of worker's body
mkWWcpr fam_envs body_ty res
= case returnsCPR_maybe res of
Nothing -> return (False, id, id, body_ty) -- No CPR info
Just con_tag | Just stuff <- deepSplitCprType_maybe fam_envs con_tag body_ty
-> mkWWcpr_help stuff
| otherwise
-- See Note [non-algebraic or open body type warning]
-> WARN( True, text "mkWWcpr: non-algebraic or open body type" <+> ppr body_ty )
return (False, id, id, body_ty)
mkWWcpr_help :: (DataCon, [Type], [Type], Coercion)
-> UniqSM (Bool, CoreExpr -> CoreExpr, CoreExpr -> CoreExpr, Type)
mkWWcpr_help (data_con, inst_tys, arg_tys, co)
| [arg_ty1] <- arg_tys
, isUnLiftedType arg_ty1
-- Special case when there is a single result of unlifted type
--
-- Wrapper: case (..call worker..) of x -> C x
-- Worker: case ( ..body.. ) of C x -> x
= do { (work_uniq : arg_uniq : _) <- getUniquesM
; let arg = mk_ww_local arg_uniq arg_ty1
con_app = mkConApp2 data_con inst_tys [arg] `mkCast` mkSymCo co
; return ( True
, \ wkr_call -> Case wkr_call arg (exprType con_app) [(DEFAULT, [], con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con [arg] (Var arg)
, arg_ty1 ) }
| otherwise -- The general case
-- Wrapper: case (..call worker..) of (# a, b #) -> C a b
-- Worker: case ( ...body... ) of C a b -> (# a, b #)
= do { (work_uniq : uniqs) <- getUniquesM
; let (wrap_wild : args) = zipWith mk_ww_local uniqs (ubx_tup_ty : arg_tys)
ubx_tup_con = tupleCon UnboxedTuple (length arg_tys)
ubx_tup_ty = exprType ubx_tup_app
ubx_tup_app = mkConApp2 ubx_tup_con arg_tys args
con_app = mkConApp2 data_con inst_tys args `mkCast` mkSymCo co
; return (True
, \ wkr_call -> Case wkr_call wrap_wild (exprType con_app) [(DataAlt ubx_tup_con, args, con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con args ubx_tup_app
, ubx_tup_ty ) }
mkUnpackCase :: CoreExpr -> Coercion -> Unique -> DataCon -> [Id] -> CoreExpr -> CoreExpr
-- (mkUnpackCase e co uniq Con args body)
-- returns
-- case e |> co of bndr { Con args -> body }
mkUnpackCase (Tick tickish e) co uniq con args body -- See Note [Profiling and unpacking]
= Tick tickish (mkUnpackCase e co uniq con args body)
mkUnpackCase scrut co uniq boxing_con unpk_args body
= Case casted_scrut bndr (exprType body)
[(DataAlt boxing_con, unpk_args, body)]
where
casted_scrut = scrut `mkCast` co
bndr = mk_ww_local uniq (exprType casted_scrut)
{-
Note [non-algebraic or open body type warning]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are a few cases where the W/W transformation is told that something
returns a constructor, but the type at hand doesn't really match this. One
real-world example involves unsafeCoerce:
foo = IO a
foo = unsafeCoerce c_exit
foreign import ccall "c_exit" c_exit :: IO ()
Here CPR will tell you that `foo` returns a () constructor for sure, but trying
to create a worker/wrapper for type `a` obviously fails.
(This was a real example until ee8e792 in libraries/base.)
It does not seem feasible to avoid all such cases already in the analyser (and
after all, the analysis is not really wrong), so we simply do nothing here in
mkWWcpr. But we still want to emit warning with -DDEBUG, to hopefully catch
other cases where something went avoidably wrong.
Note [Profiling and unpacking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the original function looked like
f = \ x -> {-# SCC "foo" #-} E
then we want the CPR'd worker to look like
\ x -> {-# SCC "foo" #-} (case E of I# x -> x)
and definitely not
\ x -> case ({-# SCC "foo" #-} E) of I# x -> x)
This transform doesn't move work or allocation
from one cost centre to another.
Later [SDM]: presumably this is because we want the simplifier to
eliminate the case, and the scc would get in the way? I'm ok with
including the case itself in the cost centre, since it is morally
part of the function (post transformation) anyway.
************************************************************************
* *
\subsection{Utilities}
* *
************************************************************************
Note [Absent errors]
~~~~~~~~~~~~~~~~~~~~
We make a new binding for Ids that are marked absent, thus
let x = absentError "x :: Int"
The idea is that this binding will never be used; but if it
buggily is used we'll get a runtime error message.
Coping with absence for *unlifted* types is important; see, for
example, Trac #4306. For these we find a suitable literal,
using Literal.absentLiteralOf. We don't have literals for
every primitive type, so the function is partial.
[I did try the experiment of using an error thunk for unlifted
things too, relying on the simplifier to drop it as dead code,
by making absentError
(a) *not* be a bottoming Id,
(b) be "ok for speculation"
But that relies on the simplifier finding that it really
is dead code, which is fragile, and indeed failed when
profiling is on, which disables various optimisations. So
using a literal will do.]
-}
mk_absent_let :: DynFlags -> Id -> Maybe (CoreExpr -> CoreExpr)
mk_absent_let dflags arg
| not (isUnLiftedType arg_ty)
= Just (Let (NonRec arg abs_rhs))
| Just tc <- tyConAppTyCon_maybe arg_ty
, Just lit <- absentLiteralOf tc
= Just (Let (NonRec arg (Lit lit)))
| arg_ty `eqType` voidPrimTy
= Just (Let (NonRec arg (Var voidPrimId)))
| otherwise
= WARN( True, ptext (sLit "No absent value for") <+> ppr arg_ty )
Nothing
where
arg_ty = idType arg
abs_rhs = mkRuntimeErrorApp aBSENT_ERROR_ID arg_ty msg
msg = showSDoc dflags (ppr arg <+> ppr (idType arg))
mk_seq_case :: Id -> CoreExpr -> CoreExpr
mk_seq_case arg body = Case (Var arg) (sanitiseCaseBndr arg) (exprType body) [(DEFAULT, [], body)]
sanitiseCaseBndr :: Id -> Id
-- The argument we are scrutinising has the right type to be
-- a case binder, so it's convenient to re-use it for that purpose.
-- But we *must* throw away all its IdInfo. In particular, the argument
-- will have demand info on it, and that demand info may be incorrect for
-- the case binder. e.g. case ww_arg of ww_arg { I# x -> ... }
-- Quite likely ww_arg isn't used in '...'. The case may get discarded
-- if the case binder says "I'm demanded". This happened in a situation
-- like (x+y) `seq` ....
sanitiseCaseBndr id = id `setIdInfo` vanillaIdInfo
mk_ww_local :: Unique -> Type -> Id
mk_ww_local uniq ty = mkSysLocal (fsLit "ww") uniq ty
| alexander-at-github/eta | compiler/ETA/StrAnal/WwLib.hs | bsd-3-clause | 32,006 | 0 | 15 | 9,473 | 3,898 | 2,135 | 1,763 | 264 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import qualified Data.ByteString.Lazy.Char8 as B
import System.IO
import Data.Aeson
import System.Environment
import Examples
assignment :: ToJSON a => B.ByteString -> a -> B.ByteString
assignment varname value = B.concat [ varname, " = ", encode value, ";"]
main = do
args <- getArgs
case args of
[dir] -> do
readExamples dir >>= B.putStr . assignment "examples"
_ -> do
hPutStrLn stderr "Usage: bundle-examples <dir> > examples.js"
| psibi/incredible | logic/examples/bundle-examples.hs | mit | 524 | 0 | 14 | 124 | 145 | 76 | 69 | 15 | 2 |
{-# OPTIONS_GHC -Wall #-}
module Canonicalize.Environment where
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Maybe as Maybe
import qualified Data.Set as Set
import AST.Expression.General (saveEnvName)
import qualified AST.Module.Name as ModuleName
import qualified AST.Pattern as P
import qualified AST.Type as Type
import qualified AST.Variable as Var
import Elm.Utils ((|>))
-- ENVIRONMENT
data Environment = Env
{ _home :: ModuleName.Canonical
, _values :: Dict Var.Canonical
, _adts :: Dict Var.Canonical
, _aliases :: Dict (Var.Canonical, [String], Type.Canonical)
, _patterns :: Dict (Var.Canonical, Int)
}
type Dict a =
Map.Map String (Set.Set a)
fromPatches :: ModuleName.Canonical -> [Patch] -> Environment
fromPatches moduleName patches =
addPatches
patches
(Env moduleName Map.empty Map.empty Map.empty Map.empty)
addPattern :: P.Pattern ann var -> Environment -> Environment
addPattern pattern env =
let patches =
map (\x -> Value x (Var.local x)) (P.boundVarList pattern)
in
addPatches patches env
-- PATCHES
data Patch
= Value String Var.Canonical
| Union String Var.Canonical
| Alias String (Var.Canonical, [String], Type.Canonical)
| Pattern String (Var.Canonical, Int)
-- ADD PATCH TO ENVIRONMENT
addPatches :: [Patch] -> Environment -> Environment
addPatches patches env =
List.foldl' (flip addPatch) env patches
addPatch :: Patch -> Environment -> Environment
addPatch patch env =
case patch of
Value name var ->
env { _values = insert name var (_values env) }
Union name var ->
env { _adts = insert name var (_adts env) }
Alias name var ->
env { _aliases = insert name var (_aliases env) }
Pattern name var ->
env { _patterns = insert name var (_patterns env) }
insert :: (Ord a) => String -> a -> Dict a -> Dict a
insert key value =
Map.insertWith Set.union key (Set.singleton value)
-- PATCH HELPERS
builtinPatches :: [Patch]
builtinPatches =
concat
[ map (patch Value) (tupleNames ++ [saveEnvName])
, map (patch Union) (tupleNames ++ ["List","Int","Float","Char","Bool","String"])
, map (patternPatch) (tuples ++ [ ("::", 2), ("[]", 0) ])
]
where
patch mkPatch name =
mkPatch name (Var.builtin name)
patternPatch (name, args) =
Pattern name (Var.builtin name, args)
tupleNames =
map fst tuples
tuples =
map toTuple [0..9]
toTuple :: Int -> (String, Int)
toTuple n =
("_Tuple" ++ show n, n)
-- TO TYPE DEALIASER
toDealiaser :: Environment -> Map.Map String String
toDealiaser (Env _ _ adts aliases _) =
let
dealiasAdt (localName, canonicalSet) =
case Set.toList canonicalSet of
[canonicalName] ->
Just (Var.toString canonicalName, localName)
_ ->
Nothing
dealiasAlias (localName, canonicalSet) =
case Set.toList canonicalSet of
[(canonicalName,_,_)] ->
Just (Var.toString canonicalName, localName)
_ ->
Nothing
adtPairs =
Maybe.mapMaybe dealiasAdt (Map.toList adts)
aliasPairs =
Maybe.mapMaybe dealiasAlias (Map.toList aliases)
add (key,value) dict =
Map.insertWith (\v v' -> if length v < length v' then v else v') key value dict
in
adtPairs ++ aliasPairs
|> foldr add Map.empty
| laszlopandy/elm-compiler | src/Canonicalize/Environment.hs | bsd-3-clause | 3,454 | 0 | 15 | 859 | 1,153 | 629 | 524 | 92 | 4 |
-- Copyright : Daan Leijen (c) 1999, [email protected]
-- HWT Group (c) 2003, [email protected]
-- License : BSD-style
module Opaleye.SQLite.Internal.HaskellDB.Sql.Generate (SqlGenerator(..)) where
import Opaleye.SQLite.Internal.HaskellDB.PrimQuery
import Opaleye.SQLite.Internal.HaskellDB.Sql
import qualified Data.List.NonEmpty as NEL
data SqlGenerator = SqlGenerator
{
sqlUpdate :: TableName -> [PrimExpr] -> Assoc -> SqlUpdate,
sqlDelete :: TableName -> [PrimExpr] -> SqlDelete,
sqlInsert :: TableName -> [Attribute] -> NEL.NonEmpty [PrimExpr] -> SqlInsert,
sqlExpr :: PrimExpr -> SqlExpr,
sqlLiteral :: Literal -> String,
-- | Turn a string into a quoted string. Quote characters
-- and any escaping are handled by this function.
sqlQuote :: String -> String
}
| bergmark/haskell-opaleye | opaleye-sqlite/src/Opaleye/SQLite/Internal/HaskellDB/Sql/Generate.hs | bsd-3-clause | 889 | 0 | 13 | 205 | 155 | 99 | 56 | 11 | 0 |
import System.IO (hFlush, stdout)
import Control.Monad (mapM)
import Control.Monad.Except (runExceptT)
import Control.Monad.Trans (liftIO)
import qualified Data.Map as Map
import qualified Data.Traversable as DT
import Readline (readline, load_history)
import Types
import Reader (read_str)
import Printer (_pr_str)
import Env (Env, env_new, env_get, env_set)
-- read
mal_read :: String -> IOThrows MalVal
mal_read str = read_str str
-- eval
eval_ast :: MalVal -> Env -> IOThrows MalVal
eval_ast sym@(MalSymbol _) env = env_get env sym
eval_ast ast@(MalList lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalList new_lst m
eval_ast ast@(MalVector lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalVector new_lst m
eval_ast ast@(MalHashMap lst m) env = do
new_hm <- DT.mapM (\x -> (eval x env)) lst
return $ MalHashMap new_hm m
eval_ast ast env = return ast
let_bind :: Env -> [MalVal] -> IOThrows Env
let_bind env [] = return env
let_bind env (b:e:xs) = do
evaled <- eval e env
x <- liftIO $ env_set env b evaled
let_bind env xs
apply_ast :: MalVal -> Env -> IOThrows MalVal
apply_ast ast@(MalList [] _) env = do
return ast
apply_ast ast@(MalList (MalSymbol "def!" : args) _) env = do
case args of
(a1@(MalSymbol _): a2 : []) -> do
evaled <- eval a2 env
liftIO $ env_set env a1 evaled
_ -> throwStr "invalid def!"
apply_ast ast@(MalList (MalSymbol "let*" : args) _) env = do
case args of
(a1 : a2 : []) -> do
params <- (_to_list a1)
let_env <- liftIO $ env_new $ Just env
let_bind let_env params
eval a2 let_env
_ -> throwStr "invalid let*"
apply_ast ast@(MalList _ _) env = do
el <- eval_ast ast env
case el of
(MalList ((Func (Fn f) _) : rest) _) ->
f $ rest
el ->
throwStr $ "invalid apply: " ++ (show el)
eval :: MalVal -> Env -> IOThrows MalVal
eval ast env = do
case ast of
(MalList _ _) -> apply_ast ast env
_ -> eval_ast ast env
-- print
mal_print :: MalVal -> String
mal_print exp = show exp
-- repl
add [MalNumber a, MalNumber b] = return $ MalNumber $ a + b
add _ = throwStr $ "illegal arguments to +"
sub [MalNumber a, MalNumber b] = return $ MalNumber $ a - b
sub _ = throwStr $ "illegal arguments to -"
mult [MalNumber a, MalNumber b] = return $ MalNumber $ a * b
mult _ = throwStr $ "illegal arguments to *"
divd [MalNumber a, MalNumber b] = return $ MalNumber $ a `div` b
divd _ = throwStr $ "illegal arguments to /"
rep :: Env -> String -> IOThrows String
rep env line = do
ast <- mal_read line
exp <- eval ast env
return $ mal_print exp
repl_loop :: Env -> IO ()
repl_loop env = do
line <- readline "user> "
case line of
Nothing -> return ()
Just "" -> repl_loop env
Just str -> do
res <- runExceptT $ rep env str
out <- case res of
Left (StringError str) -> return $ "Error: " ++ str
Left (MalValError mv) -> return $ "Error: " ++ (show mv)
Right val -> return val
putStrLn out
hFlush stdout
repl_loop env
main = do
load_history
repl_env <- env_new Nothing
env_set repl_env (MalSymbol "+") $ _func add
env_set repl_env (MalSymbol "-") $ _func sub
env_set repl_env (MalSymbol "*") $ _func mult
env_set repl_env (MalSymbol "/") $ _func divd
repl_loop repl_env
| 0gajun/mal | haskell/step3_env.hs | mpl-2.0 | 3,548 | 0 | 18 | 1,011 | 1,432 | 701 | 731 | 98 | 5 |
{-# LANGUAGE CPP, DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Program.Find
-- Copyright : Duncan Coutts 2013
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- A somewhat extended notion of the normal program search path concept.
--
-- Usually when finding executables we just want to look in the usual places
-- using the OS's usual method for doing so. In Haskell the normal OS-specific
-- method is captured by 'findExecutable'. On all common OSs that makes use of
-- a @PATH@ environment variable, (though on Windows it is not just the @PATH@).
--
-- However it is sometimes useful to be able to look in additional locations
-- without having to change the process-global @PATH@ environment variable.
-- So we need an extension of the usual 'findExecutable' that can look in
-- additional locations, either before, after or instead of the normal OS
-- locations.
--
module Distribution.Simple.Program.Find (
-- * Program search path
ProgramSearchPath,
ProgramSearchPathEntry(..),
defaultProgramSearchPath,
findProgramOnSearchPath,
programSearchPathAsPATHVar,
getSystemSearchPath,
) where
import Distribution.Verbosity
import Distribution.Simple.Utils
import Distribution.System
import Distribution.Compat.Environment
import Distribution.Compat.Binary
import qualified System.Directory as Directory
( findExecutable )
import System.FilePath as FilePath
( (</>), (<.>), splitSearchPath, searchPathSeparator, getSearchPath
, takeDirectory )
import Data.List
( nub )
import GHC.Generics
#if defined(mingw32_HOST_OS)
import qualified System.Win32 as Win32
#endif
-- | A search path to use when locating executables. This is analogous
-- to the unix @$PATH@ or win32 @%PATH%@ but with the ability to use
-- the system default method for finding executables ('findExecutable' which
-- on unix is simply looking on the @$PATH@ but on win32 is a bit more
-- complicated).
--
-- The default to use is @[ProgSearchPathDefault]@ but you can add extra dirs
-- either before, after or instead of the default, e.g. here we add an extra
-- dir to search after the usual ones.
--
-- > ['ProgramSearchPathDefault', 'ProgramSearchPathDir' dir]
--
type ProgramSearchPath = [ProgramSearchPathEntry]
data ProgramSearchPathEntry =
ProgramSearchPathDir FilePath -- ^ A specific dir
| ProgramSearchPathDefault -- ^ The system default
deriving (Eq, Generic)
instance Binary ProgramSearchPathEntry
defaultProgramSearchPath :: ProgramSearchPath
defaultProgramSearchPath = [ProgramSearchPathDefault]
findProgramOnSearchPath :: Verbosity -> ProgramSearchPath
-> FilePath -> IO (Maybe (FilePath, [FilePath]))
findProgramOnSearchPath verbosity searchpath prog = do
debug verbosity $ "Searching for " ++ prog ++ " in path."
res <- tryPathElems [] searchpath
case res of
Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path")
Just (path, _) -> debug verbosity ("Found " ++ prog ++ " at "++ path)
return res
where
tryPathElems :: [[FilePath]] -> [ProgramSearchPathEntry]
-> IO (Maybe (FilePath, [FilePath]))
tryPathElems _ [] = return Nothing
tryPathElems tried (pe:pes) = do
res <- tryPathElem pe
case res of
(Nothing, notfoundat) -> tryPathElems (notfoundat : tried) pes
(Just foundat, notfoundat) -> return (Just (foundat, alltried))
where
alltried = concat (reverse (notfoundat : tried))
tryPathElem :: ProgramSearchPathEntry -> IO (Maybe FilePath, [FilePath])
tryPathElem (ProgramSearchPathDir dir) =
findFirstExe [ dir </> prog <.> ext | ext <- exeExtensions ]
-- On windows, getSystemSearchPath is not guaranteed 100% correct so we
-- use findExecutable and then approximate the not-found-at locations.
tryPathElem ProgramSearchPathDefault | buildOS == Windows = do
mExe <- findExecutable prog
syspath <- getSystemSearchPath
case mExe of
Nothing ->
let notfoundat = [ dir </> prog | dir <- syspath ] in
return (Nothing, notfoundat)
Just foundat -> do
let founddir = takeDirectory foundat
notfoundat = [ dir </> prog
| dir <- takeWhile (/= founddir) syspath ]
return (Just foundat, notfoundat)
-- On other OSs we can just do the simple thing
tryPathElem ProgramSearchPathDefault = do
dirs <- getSystemSearchPath
findFirstExe [ dir </> prog <.> ext | dir <- dirs, ext <- exeExtensions ]
findFirstExe :: [FilePath] -> IO (Maybe FilePath, [FilePath])
findFirstExe = go []
where
go fs' [] = return (Nothing, reverse fs')
go fs' (f:fs) = do
isExe <- doesExecutableExist f
if isExe
then return (Just f, reverse fs')
else go (f:fs') fs
-- | Interpret a 'ProgramSearchPath' to construct a new @$PATH@ env var.
-- Note that this is close but not perfect because on Windows the search
-- algorithm looks at more than just the @%PATH%@.
programSearchPathAsPATHVar :: ProgramSearchPath -> IO String
programSearchPathAsPATHVar searchpath = do
ess <- mapM getEntries searchpath
return (intercalate [searchPathSeparator] (concat ess))
where
getEntries (ProgramSearchPathDir dir) = return [dir]
getEntries ProgramSearchPathDefault = do
env <- getEnvironment
return (maybe [] splitSearchPath (lookup "PATH" env))
-- | Get the system search path. On Unix systems this is just the @$PATH@ env
-- var, but on windows it's a bit more complicated.
--
getSystemSearchPath :: IO [FilePath]
getSystemSearchPath = fmap nub $ do
#if defined(mingw32_HOST_OS)
processdir <- takeDirectory `fmap` Win32.getModuleFileName Win32.nullHANDLE
currentdir <- Win32.getCurrentDirectory
systemdir <- Win32.getSystemDirectory
windowsdir <- Win32.getWindowsDirectory
pathdirs <- FilePath.getSearchPath
let path = processdir : currentdir
: systemdir : windowsdir
: pathdirs
return path
#else
FilePath.getSearchPath
#endif
#ifdef MIN_VERSION_directory
#if MIN_VERSION_directory(1,2,1)
#define HAVE_directory_121
#endif
#endif
findExecutable :: FilePath -> IO (Maybe FilePath)
#ifdef HAVE_directory_121
findExecutable = Directory.findExecutable
#else
findExecutable prog = do
-- With directory < 1.2.1 'findExecutable' doesn't check that the path
-- really refers to an executable.
mExe <- Directory.findExecutable prog
case mExe of
Just exe -> do
exeExists <- doesExecutableExist exe
if exeExists
then return mExe
else return Nothing
_ -> return mExe
#endif
| tolysz/prepare-ghcjs | spec-lts8/cabal/Cabal/Distribution/Simple/Program/Find.hs | bsd-3-clause | 6,915 | 0 | 22 | 1,567 | 1,193 | 648 | 545 | 94 | 9 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
module Data.Extensible.Embeddable where
import Control.Monad (mplus)
import Data.Extensible.Sum
import Data.Extensible.Sum1
import qualified Data.Extensible.Sum2 as S
import Data.Extensible.Sum2 ((:>+:), (:+:))
class Embeddable a b where
embed :: a -> b
instance {-# INCOHERENT #-} (b :>|: a) => Embeddable a b where
embed = lft
instance {-# INCOHERENT #-} (Embeddable a c, Embeddable b c) => Embeddable (a :|: b) c where
embed (DataL x) = embed x
embed (DataR x) = embed x
instance {-# INCOHERENT #-} (b :>||: a) => Embeddable (a x) (b x) where
embed = lft1
instance {-# INCOHERENT #-} (Embeddable (a x) (c x), Embeddable (b x) (c x)) => Embeddable ((a :||: b) x) (c x) where
embed (InL x) = embed x
embed (InR x) = embed x
instance {-# INCOHERENT #-} (b :>+: a) => Embeddable (a x y) (b x y) where
embed = S.lft2
instance {-# INCOHERENT #-} (Embeddable (a x y) (c x y), Embeddable (b x y) (c x y)) => Embeddable ((a :+: b) x y) (c x y) where
embed (S.InL x) = embed x
embed (S.InR x) = embed x
class Projectable a b where
project :: a -> Maybe b
instance {-# INCOHERENT #-} (b :>|: a) => Projectable b a where
project = peek
instance {-# INCOHERENT #-} (Projectable c a, Projectable c b) => Projectable c (a :|: b) where
project y = (DataL <$> project y) `mplus` (DataR <$> project y)
instance {-# INCOHERENT #-} (b :>||: a) => Projectable (b x) (a x) where
project = peek1
instance {-# INCOHERENT #-} (Projectable (c x) (a x), Projectable (c x) (b x))
=> Projectable (c x) ((a :||: b) x) where
project y = (InL <$> project y) `mplus` (InR <$> project y)
-- higher kinded projectable class
class Projectable1 a b where
project1 :: a x -> Maybe (b x)
instance {-# INCOHERENT #-} (b :>||: a) => Projectable1 b a where
project1 = peek1
instance {-# INCOHERENT #-} (Projectable1 c a, Projectable1 c b)
=> Projectable1 c (a :||: b) where
project1 y = (InL <$> project1 y) `mplus` (InR <$> project1 y)
| jadaska/extensible-sp | src/Data/Extensible/Embeddable.hs | mit | 2,290 | 0 | 10 | 494 | 894 | 479 | 415 | 49 | 0 |
module Handlers.LegacyPlayoffs
( getLegacyPlayoffsR
) where
import Hockey.Database
import Hockey.Environment
import Hockey.Types (Season(..))
import Models.LegacyJson
import Yesod
getLegacyPlayoffsR :: HandlerT site IO Value
getLegacyPlayoffsR =
liftIO $ do
e <- env
p <- selectPeriods (database e) (year e) Playoffs
s <- selectSeeds (database e) (year e)
g <- selectGamesForSeason (database e) (year e) Playoffs
e <- selectEvents (database e) (year e) Playoffs
returnJson $ PlayoffsResponse p s g e
| petester42/haskell-hockey | web/Handlers/LegacyPlayoffs.hs | mit | 532 | 0 | 11 | 101 | 189 | 95 | 94 | 16 | 1 |
import Language
import Heap
import Parser
import Compiler
import Data.List (partition)
import Data.Either (partitionEithers)
import System.Environment
import System.Exit
import System.FilePath.Posix
options = [("tco", TCO)
,("O0", OZ)
]
parseArg :: String -> Either String Option
parseArg ('-':op) = maybe err Right lu
where
lu = lookup op options
err = Left $ "CL option " ++ op ++ " is not supported"
parseArg f = Right $ File f
dealWithBadCLAs :: [String] -> IO ()
dealWithBadCLAs [] = return ()
dealWithBadCLAs xs = putStr (unlines xs) >> exitFailure
splitFiles :: [Option] -> ([String], [Option])
splitFiles xs = ([s | File s <- ys], zs)
where
(ys, zs) = partition isFile xs
main = do
-- Parse args and exit if a bad argument is given
xs <- getArgs
let (es, as) = partitionEithers $ fmap parseArg xs
(fs, os) = splitFiles as
dealWithBadCLAs es
-- read source files
sources <- sequence $ fmap readFile fs
let outputFiles = fmap (flip replaceExtension "gcode") fs
sequence $ zipWith (writeGCodeFile os) outputFiles sources
| jmct/IterativeCompiler | frontend/frontend.hs | mit | 1,112 | 0 | 12 | 255 | 394 | 207 | 187 | 30 | 1 |
module Geometry.Sphere
( volume
, area
) where
volume :: Floating a => a -> a
volume r = 4/3 * pi * r^3
area :: Floating a => a -> a
area r = 4 * pi * r^2 | RAFIRAF/HASKELL | Geometry/Sphere.hs | mit | 158 | 0 | 8 | 43 | 90 | 47 | 43 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-------------------------------------------------------------------------------
-- |
-- Module : Correct.Core.hs
-- Note :
--
-- A simple spelling corrector, based off of
-- http://norvig.com/spell-correct.html
-------------------------------------------------------------------------------
module Correct.Core
( correct
, edits1
, knownEdits2
, lowercase
, parse
, train
) where
import Control.Arrow ((&&&))
import qualified Data.Array.Unboxed as AU
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Internal as BI
import Data.Char (toLower)
import Data.List (foldl')
import qualified Data.Set as Set
import qualified Data.Trie as Trie
import Data.Trie.Convenience (insertWith, lookupWithDefault)
import Data.Word (Word8)
import qualified Data.Word as W
import Text.Regex.Posix ((=~))
-------------------------------------------------------------------------------
-- Helper functions for ByteString
toLowerW8 :: AU.UArray W.Word8 W.Word8
toLowerW8 = AU.listArray (0,255) (map (BI.c2w . toLower) ['\0'..'\255'])
lowercase :: ByteString -> ByteString
lowercase = B.map (\x -> toLowerW8 AU.! x)
-------------------------------------------------------------------------------
-- Functions to load the training data
type Words = Trie.Trie Int
parse :: ByteString -> [ByteString]
parse = map lowercase . concat . flip (=~) ("[a-zA-Z]+" :: ByteString)
train :: [ByteString] -> Words
train = foldl' (\t w -> insertWith (+) w 1 t) Trie.empty
-------------------------------------------------------------------------------
-- Functions for collecting possible edits
letters :: [Word8]
letters = map (BI.c2w . toLower) (['\65'..'\90'] ++ ['\97'..'\122'])
splits :: ByteString -> [(ByteString, ByteString)]
splits w = map (`B.splitAt` w) [0..B.length w]
-- Don't include whole word
splits' :: ByteString -> [(ByteString, ByteString)]
splits' w = map (`B.splitAt` w) [1..B.length w - 1]
deletes :: ByteString -> [ByteString]
deletes w = if B.null w
then []
else splits' w >>= \(first, second) ->
return $ B.concat [first, B.tail second]
inserts :: ByteString -> [ByteString]
inserts w = splits w >>= \(first, second) ->
letters >>= \letter ->
return $ B.concat [first, B.cons letter second]
replaces :: ByteString -> [ByteString]
replaces w = if B.null w
then []
else let strs = if B.length w == 1
then return $ B.pack letters
else splits' w >>= \(first, second) ->
letters >>= \letter ->
let second' = B.cons letter (B.tail second) in
return $ B.concat [first, second']
in filter (w /=) strs
transposes :: ByteString -> [ByteString]
transposes w = if B.length w <= 1
then []
else splits' w >>= \(first, second) ->
return $ B.concat [ B.init first
, B.pack [ B.head second
, B.last first
]
, B.tail second
]
known :: Words -> Set.Set ByteString -> Set.Set ByteString
known ws = Set.filter (`Trie.member` ws)
edits1 :: ByteString -> Set.Set ByteString
edits1 w = foldl' (\acc f -> Set.union (foldl' (flip Set.insert) Set.empty
(f w)) acc)
Set.empty [deletes, inserts, replaces, transposes]
knownEdits2 :: Words -> ByteString -> Set.Set ByteString
knownEdits2 ws w = Set.fold (\x acc -> Set.union acc (known ws . edits1 $ x))
Set.empty (edits1 w)
-------------------------------------------------------------------------------
-- Main spelling suggestion function
correct :: Words -> ByteString -> ByteString
correct ws w = snd .
Set.findMax .
Set.map (flip (lookupWithDefault 1) ws &&& id) .
head .
filter (not . Set.null) .
map ($ w) $
[ known ws . Set.singleton
, known ws . edits1
, knownEdits2 ws
, Set.singleton
]
| danielcnorris/haskell-spelling-corrector | src/Correct/Core.hs | mit | 4,624 | 0 | 22 | 1,525 | 1,226 | 682 | 544 | 83 | 3 |
Subsets and Splits