code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-}
-- | Invitation API.
module Web.Mackerel.Api.Invitation (createInvitation, revokeInvitation) where
import Data.Aeson (Value)
import qualified Data.HashMap.Lazy as HM
import Network.HTTP.Types (StdMethod(..))
import Web.Mackerel.Client
import Web.Mackerel.Internal.Api
import Web.Mackerel.Types.Invitation
createInvitation :: Client -> InvitationCreate -> IO (Either ApiError Invitation)
createInvitation client invitation
= request client POST "/api/v0/invitations" [] (Just invitation) (createHandler id)
revokeInvitation :: Client -> String -> IO (Either ApiError ())
revokeInvitation client email
= request client POST "/api/v0/invitations/revoke" [] (Just (HM.singleton "email" email :: HM.HashMap String String)) (createHandler ((\_ -> ()) :: Value -> ()))
| itchyny/mackerel-client-hs | src/Web/Mackerel/Api/Invitation.hs | mit | 822 | 0 | 11 | 97 | 238 | 134 | 104 | 14 | 1 |
module Anagram (anagramsFor) where
import Data.Char (toLower)
import Data.List (sort)
anagramsFor :: String -> [String] -> [String]
anagramsFor xs = filter (\ys -> f ys /= f xs && g ys == g xs)
where
f = fmap toLower
g = sort . f
| genos/online_problems | exercism/haskell/anagram/src/Anagram.hs | mit | 237 | 0 | 11 | 51 | 110 | 59 | 51 | 7 | 1 |
-- Power of two. Primitive Recursion.
module PowerOfTwo where
powerOfTwo :: Integer -> Integer
powerOfTwo exponent
| exponent < 0 = error "Negative exponent."
| otherwise = powerOfTwo' exponent
where
powerOfTwo' :: Integer -> Integer
powerOfTwo' 0 = 1
-- powerOfTwo' 1 = 2 -- There would be an attempt to match this in every recursion step.
-- Under what condition(s) could this mathching be an advantage?
-- Under what condition(s) could this matching be a disadvantage?
-- (Keywords: recursion pattern, parallelism, complex data structures ...)
powerOfTwo' exponent
= 2 * powerOfTwo' (exponent - 1)
{- GHCi>
powerOfTwo 0
powerOfTwo 1
powerOfTwo 2
-}
-- 1
-- 2
-- 4
| pascal-knodel/haskell-craft | Examples/· Recursion/· Primitive Recursion/Calculations/PowerOfTwo.hs | mit | 745 | 0 | 10 | 188 | 96 | 52 | 44 | 9 | 2 |
module Graph (Node(..), qnode, Weight, Index, Edge(..), qedge, Graph(..),
egraph, addNode, addNodeByName, addEdgeByNames,
removeEdgeByNames, removeNodeByName, nodeAtIndex, getIndex) where
import qualified Data.Map as M
import Data.List
------------------
---- Node ----
------------------
data Node = Node {name :: String, nattributes :: M.Map String String}
instance Show Node where
show (Node name attrs) = " <Node name=\"" ++ name ++ "\">"
-- nodes are equal if name is equal
instance Eq Node where
x == y = (name x) == (name y)
-- returns Node with empty attribute map
qnode :: String -> Node
qnode s = Node s M.empty
--------------------------------------------
------------------
---- Edge ----
------------------
type Weight = Double
type Index = Int
data Edge = Edge {fromIndex :: Index,
toIndex :: Index,
weight :: Weight,
eattributes :: M.Map String String}
instance Show Edge where
show (Edge from to weight attr) = edgeTemplate from to weight
edgeTemplate :: Show a => a -> a -> Weight -> String
edgeTemplate f t w = " <Edge from=" ++ (show f) ++
" to=" ++ (show t) ++ " weight=" ++
(show w) ++ ">"
showEdgeWithNames :: Graph -> Edge -> String
showEdgeWithNames g edge@(Edge f t w _) = edgeTemplate (nameAtIndex g f)
(nameAtIndex g t) w
-- edges are equal if endpoints are equal
instance Eq Edge where
x == y = (fromIndex x) == (fromIndex y) && (toIndex x) == (toIndex y)
-- returns Edge with empty attribute map
qedge :: Index -> Index -> Weight -> Edge
qedge f t w = Edge f t w M.empty
--------------------------------------------
-------------------
---- Graph ----
-------------------
data Graph = Graph {nodes :: [Node], edges :: [Edge]}
instance Show Graph where
show graph@(Graph nodes edges) = "<Graph>\n <Nodes>\n" ++
(unlines $ map show nodes) ++ " </Nodes>\n" ++
" <Edges>\n" ++
(unlines . (map (showEdgeWithNames graph))) edges ++
" </Edges>\n</Graph>\n"
egraph = Graph [] []
--------------------------------------------
------------------------------
---- Graph Operations ----
------------------------------
nameAtIndex :: Graph -> Index -> String
nameAtIndex g i = name $ nodeAtIndex g i
nodeAtIndex :: Graph -> Index -> Node
nodeAtIndex g i = (nodes g) !! i
findNode :: Node -> Graph -> Maybe Int
findNode node = elemIndex node . nodes
findEdgeFromNames :: String -> String -> Graph -> Maybe Int
findEdgeFromNames f t g = elemIndex (qedge (whereIs f) (whereIs t) 1) (edges g)
where whereIs s = getIndex s g
getIndex :: String -> Graph -> Index
getIndex name graph = case findNode (qnode name) graph of
Just i -> i
Nothing -> error $ "no such node: " ++ name
addNode :: Node -> Graph -> Graph
addNode node graph
| any (== node) (nodes graph) = error "node already exists"
| otherwise = Graph (node : (nodes graph)) (edges graph)
addNodeByName :: String -> Graph -> Graph
addNodeByName n g = Graph ((qnode n):(nodes g)) (edges g)
addEdgeByNames :: String -> String -> Weight -> Graph -> Graph
addEdgeByNames f t w g = case findEdgeFromNames f t g of
Nothing -> Graph (nodes g) (newEdge : (edges g))
_ -> error "edge already exists"
where newEdge = (qedge (getIndex f g) (getIndex t g) w)
-- doesn't warn if edge doesn't exist
removeEdgeByNames :: String -> String -> Graph -> Graph
removeEdgeByNames f t g = Graph (nodes g)
(filter (/= (qedge (whereIs f) (whereIs t) 1)) (edges g))
where whereIs s = getIndex s g
removeNodeByName :: String -> Graph -> Graph
removeNodeByName n g = Graph (filter (/= (qnode n)) (nodes g))
(map (decrementIfHigherIndex loc)
(removeConnectedEdges loc (edges g)))
where loc = getIndex n g
removeConnectedEdges :: Index -> [Edge] -> [Edge]
removeConnectedEdges i es = filter (not . (connectedTo i)) es
connectedTo :: Index -> Edge -> Bool
connectedTo index edge = (fromIndex edge == index) || (toIndex edge == index)
decrementIfHigherIndex :: Index -> Edge -> Edge
decrementIfHigherIndex i edge@(Edge from to weight attrs) =
Edge (decIfHigher from) (decIfHigher to) weight attrs
where decIfHigher j = if j > i then j-1 else j
--------------------------------------------
| tonyfischetti/graph | Graph.hs | mit | 4,574 | 0 | 14 | 1,216 | 1,513 | 795 | 718 | 80 | 2 |
-- |
-- Module : $Header$
-- Description : Transform an algorithm language term into single static assignment form
-- Copyright : (c) Justus Adam 2017. All Rights Reserved.
-- License : EPL-1.0
-- Maintainer : [email protected], [email protected]
-- Stability : experimental
-- Portability : portable
-- This source code is licensed under the terms described in the associated LICENSE.TXT file
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TupleSections #-}
module Ohua.ALang.Passes.SSA where
import Ohua.Prelude
import Data.Functor.Foldable
import qualified Data.HashMap.Strict as HM
import Control.Category ((>>>))
import Control.Lens (non, at)
import Ohua.ALang.Lang
import Ohua.ALang.Util
type LocalScope = HM.HashMap Binding Binding
ssaResolve :: MonadReader LocalScope m => Binding -> m Binding
ssaResolve bnd = view $ at bnd . non bnd
-- | Generate a new name for the provided binding and run the inner
-- computation with that name in scope to replace the provided binding
-- Returns both the generated binding and the result of the inner
-- computation
--
-- Passing in the computation which is to be executed in the modified
-- environment makes this function a bit harder to use (or rather the
-- code using it less readable) because it does a lot of passing
-- functions as arguments, however it very nicely encapsulates the
-- scope changes which means they will never leak from where they are
-- supposed to be applied
ssaRename ::
(MonadGenBnd m, MonadReader LocalScope m)
=> Binding
-> (Binding -> m a)
-> m a
ssaRename oldBnd cont = do
newBnd <- generateBindingWith oldBnd
local (HM.insert oldBnd newBnd) $ cont newBnd
performSSA :: MonadOhua m => Expression -> m Expression
performSSA = flip runReaderT mempty . ssa
ssa :: (MonadOhua m, MonadReader LocalScope m)
=> Expression
-> m Expression
ssa =
cata $ \case
VarF bnd -> Var <$> ssaResolve bnd
LambdaF v body -> ssaRename v $ \v' -> Lambda v' <$> body
LetF v val body -> ssaRename v $ \v' -> Let v' <$> val <*> body
e -> embed <$> sequence e
-- Check if an expression is in ssa form. Returns @Nothing@ if it is
-- SSA Returns @Just aBinding@ where @aBinding@ is a binding which was
-- defined (at least) twice
isSSA :: Expression -> [Binding]
isSSA e = [b | (b, count) <- HM.toList counts, count > 1]
where
counts = HM.fromListWith (+) [(b, 1 :: Word) | b <- definedBindings e]
checkSSA :: MonadOhua m => Expression -> m ()
checkSSA = isSSA >>> \case
[] -> return ()
other -> throwErrorDebugS $ mkMsg other
where
mkMsg bnd = "Redefinition of bindings " <> show bnd
| ohua-dev/ohua-core | core/src/Ohua/ALang/Passes/SSA.hs | epl-1.0 | 2,674 | 0 | 13 | 550 | 564 | 303 | 261 | -1 | -1 |
{-# OPTIONS -w -O0 #-}
{- |
Module : Fpl/ATC_Fpl.der.hs
Description : generated Typeable, ShATermConvertible instances
Copyright : (c) DFKI Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable(overlapping Typeable instances)
Automatic derivation of instances via DrIFT-rule Typeable, ShATermConvertible
for the type(s):
'Fpl.As.FplExt'
'Fpl.As.FplSortItem'
'Fpl.As.FplOpItem'
'Fpl.As.FunDef'
'Fpl.As.TermExt'
'Fpl.Sign.SignExt'
-}
{-
Generated by 'genRules' (automatic rule generation for DrIFT). Don't touch!!
dependency files:
Fpl/As.hs
Fpl/Sign.hs
-}
module Fpl.ATC_Fpl () where
import ATerm.Lib
import CASL.AS_Basic_CASL
import CASL.ATC_CASL
import CASL.Formula
import CASL.OpItem
import CASL.Sign
import CASL.SortItem
import CASL.ToDoc
import Common.AS_Annotation
import Common.AnnoState
import Common.Doc
import Common.Doc as Doc
import Common.DocUtils
import Common.Id
import Common.Keywords
import Common.Lexer
import Common.Parsec
import Common.Token hiding (innerList)
import Data.List
import Data.List (delete)
import Data.Maybe (isNothing)
import Data.Ord
import Data.Typeable
import Fpl.As
import Fpl.Sign
import Text.ParserCombinators.Parsec
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
{-! for Fpl.As.FplExt derive : Typeable !-}
{-! for Fpl.As.FplSortItem derive : Typeable !-}
{-! for Fpl.As.FplOpItem derive : Typeable !-}
{-! for Fpl.As.FunDef derive : Typeable !-}
{-! for Fpl.As.TermExt derive : Typeable !-}
{-! for Fpl.Sign.SignExt derive : Typeable !-}
{-! for Fpl.As.FplExt derive : ShATermConvertible !-}
{-! for Fpl.As.FplSortItem derive : ShATermConvertible !-}
{-! for Fpl.As.FplOpItem derive : ShATermConvertible !-}
{-! for Fpl.As.FunDef derive : ShATermConvertible !-}
{-! for Fpl.As.TermExt derive : ShATermConvertible !-}
{-! for Fpl.Sign.SignExt derive : ShATermConvertible !-}
-- Generated by DrIFT, look but don't touch!
instance ShATermConvertible TermExt where
toShATermAux att0 xv = case xv of
FixDef a -> do
(att1, a') <- toShATerm' att0 a
return $ addATerm (ShAAppl "FixDef" [a'] []) att1
Case a b c -> do
(att1, a') <- toShATerm' att0 a
(att2, b') <- toShATerm' att1 b
(att3, c') <- toShATerm' att2 c
return $ addATerm (ShAAppl "Case" [a', b', c'] []) att3
Let a b c -> do
(att1, a') <- toShATerm' att0 a
(att2, b') <- toShATerm' att1 b
(att3, c') <- toShATerm' att2 c
return $ addATerm (ShAAppl "Let" [a', b', c'] []) att3
IfThenElse a b c d -> do
(att1, a') <- toShATerm' att0 a
(att2, b') <- toShATerm' att1 b
(att3, c') <- toShATerm' att2 c
(att4, d') <- toShATerm' att3 d
return $ addATerm (ShAAppl "IfThenElse" [a', b', c', d'] []) att4
EqTerm a b c -> do
(att1, a') <- toShATerm' att0 a
(att2, b') <- toShATerm' att1 b
(att3, c') <- toShATerm' att2 c
return $ addATerm (ShAAppl "EqTerm" [a', b', c'] []) att3
BoolTerm a -> do
(att1, a') <- toShATerm' att0 a
return $ addATerm (ShAAppl "BoolTerm" [a'] []) att1
fromShATermAux ix att0 = case getShATerm ix att0 of
ShAAppl "FixDef" [a] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
(att1, FixDef a') }
ShAAppl "Case" [a, b, c] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
case fromShATerm' b att1 of
{ (att2, b') ->
case fromShATerm' c att2 of
{ (att3, c') ->
(att3, Case a' b' c') }}}
ShAAppl "Let" [a, b, c] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
case fromShATerm' b att1 of
{ (att2, b') ->
case fromShATerm' c att2 of
{ (att3, c') ->
(att3, Let a' b' c') }}}
ShAAppl "IfThenElse" [a, b, c, d] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
case fromShATerm' b att1 of
{ (att2, b') ->
case fromShATerm' c att2 of
{ (att3, c') ->
case fromShATerm' d att3 of
{ (att4, d') ->
(att4, IfThenElse a' b' c' d') }}}}
ShAAppl "EqTerm" [a, b, c] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
case fromShATerm' b att1 of
{ (att2, b') ->
case fromShATerm' c att2 of
{ (att3, c') ->
(att3, EqTerm a' b' c') }}}
ShAAppl "BoolTerm" [a] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
(att1, BoolTerm a') }
u -> fromShATermError "TermExt" u
instance ShATermConvertible FunDef where
toShATermAux att0 xv = case xv of
FunDef a b c d -> do
(att1, a') <- toShATerm' att0 a
(att2, b') <- toShATerm' att1 b
(att3, c') <- toShATerm' att2 c
(att4, d') <- toShATerm' att3 d
return $ addATerm (ShAAppl "FunDef" [a', b', c', d'] []) att4
fromShATermAux ix att0 = case getShATerm ix att0 of
ShAAppl "FunDef" [a, b, c, d] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
case fromShATerm' b att1 of
{ (att2, b') ->
case fromShATerm' c att2 of
{ (att3, c') ->
case fromShATerm' d att3 of
{ (att4, d') ->
(att4, FunDef a' b' c' d') }}}}
u -> fromShATermError "FunDef" u
instance ShATermConvertible FplOpItem where
toShATermAux att0 xv = case xv of
FunOp a -> do
(att1, a') <- toShATerm' att0 a
return $ addATerm (ShAAppl "FunOp" [a'] []) att1
CaslOpItem a -> do
(att1, a') <- toShATerm' att0 a
return $ addATerm (ShAAppl "CaslOpItem" [a'] []) att1
fromShATermAux ix att0 = case getShATerm ix att0 of
ShAAppl "FunOp" [a] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
(att1, FunOp a') }
ShAAppl "CaslOpItem" [a] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
(att1, CaslOpItem a') }
u -> fromShATermError "FplOpItem" u
instance ShATermConvertible FplSortItem where
toShATermAux att0 xv = case xv of
FreeType a -> do
(att1, a') <- toShATerm' att0 a
return $ addATerm (ShAAppl "FreeType" [a'] []) att1
CaslSortItem a -> do
(att1, a') <- toShATerm' att0 a
return $ addATerm (ShAAppl "CaslSortItem" [a'] []) att1
fromShATermAux ix att0 = case getShATerm ix att0 of
ShAAppl "FreeType" [a] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
(att1, FreeType a') }
ShAAppl "CaslSortItem" [a] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
(att1, CaslSortItem a') }
u -> fromShATermError "FplSortItem" u
instance ShATermConvertible FplExt where
toShATermAux att0 xv = case xv of
FplSortItems a b -> do
(att1, a') <- toShATerm' att0 a
(att2, b') <- toShATerm' att1 b
return $ addATerm (ShAAppl "FplSortItems" [a', b'] []) att2
FplOpItems a b -> do
(att1, a') <- toShATerm' att0 a
(att2, b') <- toShATerm' att1 b
return $ addATerm (ShAAppl "FplOpItems" [a', b'] []) att2
fromShATermAux ix att0 = case getShATerm ix att0 of
ShAAppl "FplSortItems" [a, b] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
case fromShATerm' b att1 of
{ (att2, b') ->
(att2, FplSortItems a' b') }}
ShAAppl "FplOpItems" [a, b] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
case fromShATerm' b att1 of
{ (att2, b') ->
(att2, FplOpItems a' b') }}
u -> fromShATermError "FplExt" u
_tcTermExtTc :: TyCon
_tcTermExtTc = mkTyCon "Fpl.As.TermExt"
instance Typeable TermExt where
typeOf _ = mkTyConApp _tcTermExtTc []
_tcFunDefTc :: TyCon
_tcFunDefTc = mkTyCon "Fpl.As.FunDef"
instance Typeable FunDef where
typeOf _ = mkTyConApp _tcFunDefTc []
_tcFplOpItemTc :: TyCon
_tcFplOpItemTc = mkTyCon "Fpl.As.FplOpItem"
instance Typeable FplOpItem where
typeOf _ = mkTyConApp _tcFplOpItemTc []
_tcFplSortItemTc :: TyCon
_tcFplSortItemTc = mkTyCon "Fpl.As.FplSortItem"
instance Typeable FplSortItem where
typeOf _ = mkTyConApp _tcFplSortItemTc []
_tcFplExtTc :: TyCon
_tcFplExtTc = mkTyCon "Fpl.As.FplExt"
instance Typeable FplExt where
typeOf _ = mkTyConApp _tcFplExtTc []
_tcSignExtTc :: TyCon
_tcSignExtTc = mkTyCon "Fpl.Sign.SignExt"
instance Typeable SignExt where
typeOf _ = mkTyConApp _tcSignExtTc []
instance ShATermConvertible SignExt where
toShATermAux att0 xv = case xv of
SignExt a -> do
(att1, a') <- toShATerm' att0 a
return $ addATerm (ShAAppl "SignExt" [a'] []) att1
fromShATermAux ix att0 = case getShATerm ix att0 of
ShAAppl "SignExt" [a] _ ->
case fromShATerm' a att0 of
{ (att1, a') ->
(att1, SignExt a') }
u -> fromShATermError "SignExt" u
| nevrenato/Hets_Fork | Fpl/ATC_Fpl.hs | gpl-2.0 | 8,641 | 0 | 22 | 2,163 | 2,862 | 1,505 | 1,357 | 218 | 1 |
module Model.Collision where
import Model.Types
class Collidable a where
getBBT :: a -> BoundingBoxTree
rayIntersection :: a -> Bool
rayIntersection bbt = False
withinDistance :: Translation -> GLfloat -> a -> Bool
withinDistance position distane bbt = False
collidesWith :: Collidable b => a -> b -> Bool
collidesWith bbta bbtb = False
data BoundingBox =
BoundingBox { bbMaxX :: GLfloat
, bbMinX :: GLfloat
, bbMaxY :: GLfloat
, bbMinY :: GLfloat
, bbMaxZ :: GLfloat
, bbMinZ :: GLfloat
} -- corners instead?
data BoundingBoxTree =
BoundingBoxTree BoundingBox BoundingBoxTree BoundingBoxTree
| BBTLeaf
| halvorgb/AO2D | src/Model/Collision.hs | gpl-3.0 | 757 | 0 | 9 | 248 | 167 | 94 | 73 | 20 | 0 |
-- file: ch02/roundToEven.hs
{- The authors use this as a example of a nonrecursive function to examine it's
definition -}
isOdd n = mod n 2 == 1
| craigem/RealWorldHaskell | ch02/RoundtoEven.hs | gpl-3.0 | 147 | 0 | 6 | 29 | 20 | 10 | 10 | 1 | 1 |
{-# LANGUAGE OverloadedLists, QuasiQuotes #-}
module Nirum.Constructs.Module ( Module (Module, docs, types)
, coreModule
, coreModulePath
, coreTypes
, imports
) where
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import qualified Data.Text as T
import Text.InterpolatedString.Perl6 (q)
import Nirum.Constructs (Construct (toCode))
import Nirum.Constructs.Annotation (empty)
import Nirum.Constructs.Declaration (Documented (docs))
import qualified Nirum.Constructs.DeclarationSet as DS
import Nirum.Constructs.Docs (Docs)
import Nirum.Constructs.Identifier (Identifier)
import Nirum.Constructs.ModulePath (ModulePath)
import Nirum.Constructs.TypeDeclaration ( JsonType (Boolean, Number, String)
, PrimitiveTypeIdentifier ( Bigint
, Binary
, Bool
, Date
, Datetime
, Decimal
, Float32
, Float64
, Int32
, Int64
, Text
, Url
, Uuid
)
, Type (PrimitiveType)
, TypeDeclaration ( Import
, TypeDeclaration
, type'
, typeAnnotations
, typename
)
)
data Module = Module { types :: DS.DeclarationSet TypeDeclaration
, docs :: Maybe Docs
} deriving (Eq, Ord, Show)
instance Construct Module where
toCode m@(Module types' docs') =
T.concat [ maybe "" ((`T.snoc` '\n') . toCode) docs'
, T.intercalate "\n" importCodes
, if null importCodes then "\n" else "\n\n"
, T.intercalate "\n\n" typeCodes
, "\n"
]
where
typeList :: [TypeDeclaration]
typeList = DS.toList types'
importIdentifiersToCode :: (Identifier, Identifier) -> T.Text
importIdentifiersToCode (i, s) = if i == s
then toCode i
else T.concat [ toCode s
, " as "
, toCode i
]
importCodes :: [T.Text]
importCodes =
[ T.concat [ "import ", toCode p, " ("
, T.intercalate ", " $
map importIdentifiersToCode $ S.toAscList i
, ");"
]
| (p, i) <- M.toAscList (imports m)
]
typeCodes :: [T.Text]
typeCodes = [ toCode t
| t <- typeList
, case t of
Import {} -> False
_ -> True
]
instance Documented Module where
docs (Module _ docs') = docs'
imports :: Module -> M.Map ModulePath (S.Set (Identifier, Identifier))
imports (Module decls _) =
M.fromListWith S.union [(p, [(i, s)]) | Import p i s _ <- DS.toList decls]
coreModulePath :: ModulePath
coreModulePath = ["core"]
coreModule :: Module
coreModule = Module coreTypes $ Just coreDocs
coreTypes :: DS.DeclarationSet TypeDeclaration
coreTypes =
-- number types
[ decl' "bigint" Bigint String
, decl' "decimal" Decimal String
, decl' "int32" Int32 Number
, decl' "int64" Int64 Number
, decl' "float32" Float32 Number
, decl' "float64" Float64 Number
-- string types
, decl' "text" Text String
, decl' "binary" Binary String
-- time types
, decl' "date" Date String
, decl' "datetime" Datetime String
-- et cetera
, decl' "bool" Bool Boolean
, decl' "uuid" Uuid String
, decl' "url" Url String
-- FIXME: deprecated
, decl' "uri" Url String
]
where
decl' name prim json =
TypeDeclaration { typename = name
, type' = PrimitiveType prim json
, typeAnnotations = empty
}
coreDocs :: Docs
coreDocs = [q|
Built-in types
==============
The core module is implicitly imported by every module so that built-in types
are available everywhere.
TBD.
|]
| spoqa/nirum | src/Nirum/Constructs/Module.hs | gpl-3.0 | 5,416 | 0 | 14 | 2,831 | 953 | 556 | 397 | 102 | 1 |
import Data.List
collatzR ::Int -> [Int]
collatzR 1 = [1]
collatzR n
| n `mod` 2 == 0 = n : collatzR (n `div` 2)
| otherwise = n : collatzR (3*n + 1)
biggestTuple [] a = a
biggestTuple (x:xs) a
| fst x > fst a = biggestTuple xs x
| otherwise = biggestTuple xs a
main = do
print $ biggestTuple ( map (\xs -> (length xs, xs !! 0)) $ map collatzR [999999, 999998..1]) (0, 0)
return ()
| rafaelcgs10/project-euler | p14.hs | gpl-3.0 | 423 | 0 | 15 | 123 | 235 | 118 | 117 | 13 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetHTTPProxies.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the list of TargetHttpProxy resources available to the
-- specified project.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetHttpProxies.list@.
module Network.Google.Resource.Compute.TargetHTTPProxies.List
(
-- * REST Resource
TargetHTTPProxiesListResource
-- * Creating a Request
, targetHTTPProxiesList
, TargetHTTPProxiesList
-- * Request Lenses
, thttpplReturnPartialSuccess
, thttpplOrderBy
, thttpplProject
, thttpplFilter
, thttpplPageToken
, thttpplMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetHttpProxies.list@ method which the
-- 'TargetHTTPProxiesList' request conforms to.
type TargetHTTPProxiesListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"targetHttpProxies" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] TargetHTTPProxyList
-- | Retrieves the list of TargetHttpProxy resources available to the
-- specified project.
--
-- /See:/ 'targetHTTPProxiesList' smart constructor.
data TargetHTTPProxiesList =
TargetHTTPProxiesList'
{ _thttpplReturnPartialSuccess :: !(Maybe Bool)
, _thttpplOrderBy :: !(Maybe Text)
, _thttpplProject :: !Text
, _thttpplFilter :: !(Maybe Text)
, _thttpplPageToken :: !(Maybe Text)
, _thttpplMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetHTTPProxiesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'thttpplReturnPartialSuccess'
--
-- * 'thttpplOrderBy'
--
-- * 'thttpplProject'
--
-- * 'thttpplFilter'
--
-- * 'thttpplPageToken'
--
-- * 'thttpplMaxResults'
targetHTTPProxiesList
:: Text -- ^ 'thttpplProject'
-> TargetHTTPProxiesList
targetHTTPProxiesList pThttpplProject_ =
TargetHTTPProxiesList'
{ _thttpplReturnPartialSuccess = Nothing
, _thttpplOrderBy = Nothing
, _thttpplProject = pThttpplProject_
, _thttpplFilter = Nothing
, _thttpplPageToken = Nothing
, _thttpplMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
thttpplReturnPartialSuccess :: Lens' TargetHTTPProxiesList (Maybe Bool)
thttpplReturnPartialSuccess
= lens _thttpplReturnPartialSuccess
(\ s a -> s{_thttpplReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
thttpplOrderBy :: Lens' TargetHTTPProxiesList (Maybe Text)
thttpplOrderBy
= lens _thttpplOrderBy
(\ s a -> s{_thttpplOrderBy = a})
-- | Project ID for this request.
thttpplProject :: Lens' TargetHTTPProxiesList Text
thttpplProject
= lens _thttpplProject
(\ s a -> s{_thttpplProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
thttpplFilter :: Lens' TargetHTTPProxiesList (Maybe Text)
thttpplFilter
= lens _thttpplFilter
(\ s a -> s{_thttpplFilter = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
thttpplPageToken :: Lens' TargetHTTPProxiesList (Maybe Text)
thttpplPageToken
= lens _thttpplPageToken
(\ s a -> s{_thttpplPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
thttpplMaxResults :: Lens' TargetHTTPProxiesList Word32
thttpplMaxResults
= lens _thttpplMaxResults
(\ s a -> s{_thttpplMaxResults = a})
. _Coerce
instance GoogleRequest TargetHTTPProxiesList where
type Rs TargetHTTPProxiesList = TargetHTTPProxyList
type Scopes TargetHTTPProxiesList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient TargetHTTPProxiesList'{..}
= go _thttpplProject _thttpplReturnPartialSuccess
_thttpplOrderBy
_thttpplFilter
_thttpplPageToken
(Just _thttpplMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy TargetHTTPProxiesListResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/TargetHTTPProxies/List.hs | mpl-2.0 | 7,533 | 0 | 19 | 1,588 | 758 | 454 | 304 | 114 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudResourceManager.TagBindings.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a TagBinding.
--
-- /See:/ <https://cloud.google.com/resource-manager Cloud Resource Manager API Reference> for @cloudresourcemanager.tagBindings.delete@.
module Network.Google.Resource.CloudResourceManager.TagBindings.Delete
(
-- * REST Resource
TagBindingsDeleteResource
-- * Creating a Request
, tagBindingsDelete
, TagBindingsDelete
-- * Request Lenses
, tbdXgafv
, tbdUploadProtocol
, tbdAccessToken
, tbdUploadType
, tbdName
, tbdCallback
) where
import Network.Google.Prelude
import Network.Google.ResourceManager.Types
-- | A resource alias for @cloudresourcemanager.tagBindings.delete@ method which the
-- 'TagBindingsDelete' request conforms to.
type TagBindingsDeleteResource =
"v3" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Operation
-- | Deletes a TagBinding.
--
-- /See:/ 'tagBindingsDelete' smart constructor.
data TagBindingsDelete =
TagBindingsDelete'
{ _tbdXgafv :: !(Maybe Xgafv)
, _tbdUploadProtocol :: !(Maybe Text)
, _tbdAccessToken :: !(Maybe Text)
, _tbdUploadType :: !(Maybe Text)
, _tbdName :: !Text
, _tbdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TagBindingsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tbdXgafv'
--
-- * 'tbdUploadProtocol'
--
-- * 'tbdAccessToken'
--
-- * 'tbdUploadType'
--
-- * 'tbdName'
--
-- * 'tbdCallback'
tagBindingsDelete
:: Text -- ^ 'tbdName'
-> TagBindingsDelete
tagBindingsDelete pTbdName_ =
TagBindingsDelete'
{ _tbdXgafv = Nothing
, _tbdUploadProtocol = Nothing
, _tbdAccessToken = Nothing
, _tbdUploadType = Nothing
, _tbdName = pTbdName_
, _tbdCallback = Nothing
}
-- | V1 error format.
tbdXgafv :: Lens' TagBindingsDelete (Maybe Xgafv)
tbdXgafv = lens _tbdXgafv (\ s a -> s{_tbdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
tbdUploadProtocol :: Lens' TagBindingsDelete (Maybe Text)
tbdUploadProtocol
= lens _tbdUploadProtocol
(\ s a -> s{_tbdUploadProtocol = a})
-- | OAuth access token.
tbdAccessToken :: Lens' TagBindingsDelete (Maybe Text)
tbdAccessToken
= lens _tbdAccessToken
(\ s a -> s{_tbdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
tbdUploadType :: Lens' TagBindingsDelete (Maybe Text)
tbdUploadType
= lens _tbdUploadType
(\ s a -> s{_tbdUploadType = a})
-- | Required. The name of the TagBinding. This is a String of the form:
-- \`tagBindings\/{id}\` (e.g.
-- \`tagBindings\/%2F%2Fcloudresourcemanager.googleapis.com%2Fprojects%2F123\/tagValues\/456\`).
tbdName :: Lens' TagBindingsDelete Text
tbdName = lens _tbdName (\ s a -> s{_tbdName = a})
-- | JSONP
tbdCallback :: Lens' TagBindingsDelete (Maybe Text)
tbdCallback
= lens _tbdCallback (\ s a -> s{_tbdCallback = a})
instance GoogleRequest TagBindingsDelete where
type Rs TagBindingsDelete = Operation
type Scopes TagBindingsDelete =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient TagBindingsDelete'{..}
= go _tbdName _tbdXgafv _tbdUploadProtocol
_tbdAccessToken
_tbdUploadType
_tbdCallback
(Just AltJSON)
resourceManagerService
where go
= buildClient
(Proxy :: Proxy TagBindingsDeleteResource)
mempty
| brendanhay/gogol | gogol-resourcemanager/gen/Network/Google/Resource/CloudResourceManager/TagBindings/Delete.hs | mpl-2.0 | 4,614 | 0 | 15 | 1,031 | 696 | 407 | 289 | 100 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.PacketMirrorings.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a PacketMirroring resource in the specified project and region
-- using the data included in the request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.packetMirrorings.insert@.
module Network.Google.Resource.Compute.PacketMirrorings.Insert
(
-- * REST Resource
PacketMirroringsInsertResource
-- * Creating a Request
, packetMirroringsInsert
, PacketMirroringsInsert
-- * Request Lenses
, pmiRequestId
, pmiProject
, pmiPayload
, pmiRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.packetMirrorings.insert@ method which the
-- 'PacketMirroringsInsert' request conforms to.
type PacketMirroringsInsertResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"packetMirrorings" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] PacketMirroring :>
Post '[JSON] Operation
-- | Creates a PacketMirroring resource in the specified project and region
-- using the data included in the request.
--
-- /See:/ 'packetMirroringsInsert' smart constructor.
data PacketMirroringsInsert =
PacketMirroringsInsert'
{ _pmiRequestId :: !(Maybe Text)
, _pmiProject :: !Text
, _pmiPayload :: !PacketMirroring
, _pmiRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PacketMirroringsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pmiRequestId'
--
-- * 'pmiProject'
--
-- * 'pmiPayload'
--
-- * 'pmiRegion'
packetMirroringsInsert
:: Text -- ^ 'pmiProject'
-> PacketMirroring -- ^ 'pmiPayload'
-> Text -- ^ 'pmiRegion'
-> PacketMirroringsInsert
packetMirroringsInsert pPmiProject_ pPmiPayload_ pPmiRegion_ =
PacketMirroringsInsert'
{ _pmiRequestId = Nothing
, _pmiProject = pPmiProject_
, _pmiPayload = pPmiPayload_
, _pmiRegion = pPmiRegion_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
pmiRequestId :: Lens' PacketMirroringsInsert (Maybe Text)
pmiRequestId
= lens _pmiRequestId (\ s a -> s{_pmiRequestId = a})
-- | Project ID for this request.
pmiProject :: Lens' PacketMirroringsInsert Text
pmiProject
= lens _pmiProject (\ s a -> s{_pmiProject = a})
-- | Multipart request metadata.
pmiPayload :: Lens' PacketMirroringsInsert PacketMirroring
pmiPayload
= lens _pmiPayload (\ s a -> s{_pmiPayload = a})
-- | Name of the region for this request.
pmiRegion :: Lens' PacketMirroringsInsert Text
pmiRegion
= lens _pmiRegion (\ s a -> s{_pmiRegion = a})
instance GoogleRequest PacketMirroringsInsert where
type Rs PacketMirroringsInsert = Operation
type Scopes PacketMirroringsInsert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient PacketMirroringsInsert'{..}
= go _pmiProject _pmiRegion _pmiRequestId
(Just AltJSON)
_pmiPayload
computeService
where go
= buildClient
(Proxy :: Proxy PacketMirroringsInsertResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/PacketMirrorings/Insert.hs | mpl-2.0 | 4,927 | 0 | 17 | 1,112 | 559 | 335 | 224 | 87 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.OrderDocuments.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets one order document by ID.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.orderDocuments.get@.
module Network.Google.Resource.DFAReporting.OrderDocuments.Get
(
-- * REST Resource
OrderDocumentsGetResource
-- * Creating a Request
, orderDocumentsGet
, OrderDocumentsGet
-- * Request Lenses
, odgXgafv
, odgUploadProtocol
, odgAccessToken
, odgUploadType
, odgProFileId
, odgId
, odgProjectId
, odgCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.orderDocuments.get@ method which the
-- 'OrderDocumentsGet' request conforms to.
type OrderDocumentsGetResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"projects" :>
Capture "projectId" (Textual Int64) :>
"orderDocuments" :>
Capture "id" (Textual Int64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] OrderDocument
-- | Gets one order document by ID.
--
-- /See:/ 'orderDocumentsGet' smart constructor.
data OrderDocumentsGet =
OrderDocumentsGet'
{ _odgXgafv :: !(Maybe Xgafv)
, _odgUploadProtocol :: !(Maybe Text)
, _odgAccessToken :: !(Maybe Text)
, _odgUploadType :: !(Maybe Text)
, _odgProFileId :: !(Textual Int64)
, _odgId :: !(Textual Int64)
, _odgProjectId :: !(Textual Int64)
, _odgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrderDocumentsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'odgXgafv'
--
-- * 'odgUploadProtocol'
--
-- * 'odgAccessToken'
--
-- * 'odgUploadType'
--
-- * 'odgProFileId'
--
-- * 'odgId'
--
-- * 'odgProjectId'
--
-- * 'odgCallback'
orderDocumentsGet
:: Int64 -- ^ 'odgProFileId'
-> Int64 -- ^ 'odgId'
-> Int64 -- ^ 'odgProjectId'
-> OrderDocumentsGet
orderDocumentsGet pOdgProFileId_ pOdgId_ pOdgProjectId_ =
OrderDocumentsGet'
{ _odgXgafv = Nothing
, _odgUploadProtocol = Nothing
, _odgAccessToken = Nothing
, _odgUploadType = Nothing
, _odgProFileId = _Coerce # pOdgProFileId_
, _odgId = _Coerce # pOdgId_
, _odgProjectId = _Coerce # pOdgProjectId_
, _odgCallback = Nothing
}
-- | V1 error format.
odgXgafv :: Lens' OrderDocumentsGet (Maybe Xgafv)
odgXgafv = lens _odgXgafv (\ s a -> s{_odgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
odgUploadProtocol :: Lens' OrderDocumentsGet (Maybe Text)
odgUploadProtocol
= lens _odgUploadProtocol
(\ s a -> s{_odgUploadProtocol = a})
-- | OAuth access token.
odgAccessToken :: Lens' OrderDocumentsGet (Maybe Text)
odgAccessToken
= lens _odgAccessToken
(\ s a -> s{_odgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
odgUploadType :: Lens' OrderDocumentsGet (Maybe Text)
odgUploadType
= lens _odgUploadType
(\ s a -> s{_odgUploadType = a})
-- | User profile ID associated with this request.
odgProFileId :: Lens' OrderDocumentsGet Int64
odgProFileId
= lens _odgProFileId (\ s a -> s{_odgProFileId = a})
. _Coerce
-- | Order document ID.
odgId :: Lens' OrderDocumentsGet Int64
odgId
= lens _odgId (\ s a -> s{_odgId = a}) . _Coerce
-- | Project ID for order documents.
odgProjectId :: Lens' OrderDocumentsGet Int64
odgProjectId
= lens _odgProjectId (\ s a -> s{_odgProjectId = a})
. _Coerce
-- | JSONP
odgCallback :: Lens' OrderDocumentsGet (Maybe Text)
odgCallback
= lens _odgCallback (\ s a -> s{_odgCallback = a})
instance GoogleRequest OrderDocumentsGet where
type Rs OrderDocumentsGet = OrderDocument
type Scopes OrderDocumentsGet =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient OrderDocumentsGet'{..}
= go _odgProFileId _odgProjectId _odgId _odgXgafv
_odgUploadProtocol
_odgAccessToken
_odgUploadType
_odgCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy OrderDocumentsGetResource)
mempty
| brendanhay/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/OrderDocuments/Get.hs | mpl-2.0 | 5,538 | 0 | 21 | 1,401 | 920 | 529 | 391 | 129 | 1 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, TypeSynonymInstances #-}
{-# LANGUAGE ParallelListComp, PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.LayoutHints
-- Copyright : (c) David Roundy <[email protected]>
-- License : BSD
--
-- Maintainer : none
-- Stability : unstable
-- Portability : unportable
--
-- Make layouts respect size hints.
-----------------------------------------------------------------------------
module XMonad.Layout.LayoutHints
( -- * usage
-- $usage
layoutHints
, layoutHintsWithPlacement
, layoutHintsToCenter
, LayoutHints
, LayoutHintsToCenter
, hintsEventHook
) where
import XMonad(LayoutClass(runLayout), mkAdjust, Window,
Dimension, Position, Rectangle(Rectangle), D,
X, refresh, Event(..), propertyNotify, wM_NORMAL_HINTS,
(<&&>), io, applySizeHints, whenX, isClient, withDisplay,
getWindowAttributes, getWMNormalHints, WindowAttributes(..))
import qualified XMonad.StackSet as W
import XMonad.Layout.Decoration(isInStack)
import XMonad.Layout.LayoutModifier(ModifiedLayout(..),
LayoutModifier(modifyLayout, redoLayout, modifierDescription))
import XMonad.Util.Types(Direction2D(..))
import Control.Applicative((<$>))
import Control.Arrow(Arrow((***), first, second))
import Control.Monad(join)
import Data.Function(on)
import Data.List(sortBy)
import Data.Monoid(All(..))
import Data.Set (Set)
import qualified Data.Set as Set
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Layout.LayoutHints
--
-- Then edit your @layoutHook@ by adding the 'layoutHints' layout modifier
-- to some layout:
--
-- > myLayout = layoutHints (Tall 1 (3/100) (1/2)) ||| Full ||| etc..
-- > main = xmonad defaultConfig { layoutHook = myLayout }
--
-- Or, to center the adapted window in its available area:
--
-- > myLayout = layoutHintsWithPlacement (0.5, 0.5) (Tall 1 (3/100) (1/2))
-- > ||| Full ||| etc..
--
-- Or, to make a reasonable attempt to eliminate gaps between windows:
--
-- > myLayout = layoutHintsToCenter (Tall 1 (3/100) (1/2))
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
--
-- To make XMonad reflect changes in window hints immediately, add
-- 'hintsEventHook' to your 'handleEventHook'.
--
-- > myHandleEventHook = hintsEventHook <+> ...
-- >
-- > main = xmonad defaultConfig { handleEventHook = myHandleEventHook
-- > , ... }
layoutHints :: (LayoutClass l a) => l a -> ModifiedLayout LayoutHints l a
layoutHints = ModifiedLayout (LayoutHints (0, 0))
-- | @layoutHintsWithPlacement (rx, ry) layout@ will adapt the sizes of a layout's
-- windows according to their size hints, and position them inside their
-- originally assigned area according to the @rx@ and @ry@ parameters.
-- (0, 0) places the window at the top left, (1, 0) at the top right, (0.5, 0.5)
-- at the center, etc.
layoutHintsWithPlacement :: (LayoutClass l a) => (Double, Double)
-> l a -> ModifiedLayout LayoutHints l a
layoutHintsWithPlacement rs = ModifiedLayout (LayoutHints rs)
-- | @layoutHintsToCenter layout@ applies hints, sliding the window to the
-- center of the screen and expanding its neighbors to fill the gaps. Windows
-- are never expanded in a way that increases overlap.
--
-- @layoutHintsToCenter@ only makes one pass at resizing the neighbors of
-- hinted windows, so with some layouts (ex. the arrangement with two 'Mirror'
-- 'Tall' stacked vertically), @layoutHintsToCenter@ may leave some gaps.
-- Simple layouts like 'Tall' are unaffected.
layoutHintsToCenter :: (LayoutClass l a) => l a -> ModifiedLayout LayoutHintsToCenter l a
layoutHintsToCenter = ModifiedLayout LayoutHintsToCenter
data LayoutHints a = LayoutHints (Double, Double)
deriving (Read, Show)
instance LayoutModifier LayoutHints Window where
modifierDescription _ = ""
redoLayout _ _ Nothing xs = return (xs, Nothing)
redoLayout (LayoutHints al) _ (Just s) xs
= do xs' <- mapM (\x@(_, r) -> second (placeRectangle al r) <$> applyHint x) xs
return (xs', Nothing)
where
applyHint (w,r@(Rectangle a b c d)) = do
adj <- mkAdjust w
let (c',d') = adj (c,d)
return (w, if isInStack s w then Rectangle a b c' d' else r)
-- | @placeRectangle (rx, ry) r0 r@ will return a new rectangle with the same dimensions
-- as @r@, but positioned inside of @r0@ as specified by the (rx, ry) parameters (see
-- 'layoutHintsWithPlacement').
placeRectangle :: RealFrac r => (r, r) -> Rectangle -> Rectangle -> Rectangle
placeRectangle (rx, ry) (Rectangle x0 y0 w h) (Rectangle _ _ dx dy)
= Rectangle (align x0 dx w rx) (align y0 dy h ry) dx dy
where align :: RealFrac r => Position -> Dimension -> Dimension -> r -> Position
align z0 dz d r = z0 + truncate (fromIntegral (d - dz) * r)
fitting :: [Rectangle] -> Int
fitting rects = sum $ do
r <- rects
return $ length $ filter (touching r) rects
applyOrder :: Rectangle -> [((Window, Rectangle),t)] -> [[((Window, Rectangle),t)]]
applyOrder root wrs = do
-- perhaps it would just be better to take all permutations, or apply the
-- resizing multiple times
f <- [maximum, minimum, sum, sum . map sq]
return $ sortBy (compare `on` (f . distance)) wrs
where distFC = uncurry ((+) `on` sq) . pairWise (-) (center root)
distance = map distFC . corners . snd . fst
pairWise f (a,b) (c,d) = (f a c, f b d)
sq = join (*)
data LayoutHintsToCenter a = LayoutHintsToCenter deriving (Read, Show)
instance LayoutModifier LayoutHintsToCenter Window where
modifyLayout _ ws@(W.Workspace _ _ Nothing) r = runLayout ws r
modifyLayout _ ws@(W.Workspace _ _ (Just st)) r = do
(arrs,ol) <- runLayout ws r
flip (,) ol
. head . reverse . sortBy (compare `on` (fitting . map snd))
. map (applyHints st r) . applyOrder r
<$> mapM (\x -> fmap ((,) x) $ mkAdjust (fst x)) arrs
-- apply hints to first, grow adjacent windows
applyHints :: W.Stack Window -> Rectangle -> [((Window, Rectangle),(D -> D))] -> [(Window, Rectangle)]
applyHints _ _ [] = []
applyHints s root (((w,lrect@(Rectangle a b c d)),adj):xs) =
let (c',d') = adj (c,d)
redr = placeRectangle (centerPlacement root lrect :: (Double,Double)) lrect
$ if isInStack s w then Rectangle a b c' d' else lrect
ds = (fromIntegral c - fromIntegral c',fromIntegral d - fromIntegral d')
growOther' r = growOther ds lrect (freeDirs root lrect) r
mapSnd f = map (first $ second f)
next = applyHints s root $ mapSnd growOther' xs
in (w,redr):next
growOther :: (Position, Position) -> Rectangle -> Set Direction2D -> Rectangle -> Rectangle
growOther ds lrect fds r
| dirs <- flipDir <$> Set.toList (Set.intersection adj fds)
, not $ any (uncurry opposite) $ cross dirs =
foldr (flip grow ds) r dirs
| otherwise = r
where
adj = adjacent lrect r
cross xs = [ (a,b) | a <- xs, b <- xs ]
flipDir :: Direction2D -> Direction2D
flipDir d = case d of { L -> R; U -> D; R -> L; D -> U }
opposite :: Direction2D -> Direction2D -> Bool
opposite x y = flipDir x == y
-- | Leave the opposite edges where they were
grow :: Direction2D -> (Position,Position) -> Rectangle -> Rectangle
grow L (px,_ ) (Rectangle x y w h) = Rectangle (x-px) y (w+fromIntegral px) h
grow U (_ ,py) (Rectangle x y w h) = Rectangle x (y-py) w (h+fromIntegral py)
grow R (px,_ ) (Rectangle x y w h) = Rectangle x y (w+fromIntegral px) h
grow D (_ ,py) (Rectangle x y w h) = Rectangle x y w (h+fromIntegral py)
comparingEdges :: ([Position] -> [Position] -> Bool) -> Rectangle -> Rectangle -> Set Direction2D
comparingEdges surrounds r1 r2 = Set.fromList $ map fst $ filter snd [ (\k -> (dir,k)) $
any and [[dir `elem` [R,L], allEq [a,c,w,y], [b,d] `surrounds` [x,z]]
,[dir `elem` [U,D], allEq [b,d,x,z], [a,c] `surrounds` [w,y]]]
| ((a,b),(c,d)) <- edge $ corners r1
| ((w,x),(y,z)) <- edge $ delay 2 $ corners r2
| dir <- [U,R,D,L]]
where edge (x:xs) = zip (x:xs) (xs ++ [x])
edge [] = []
delay n xs = drop n xs ++ take n xs
allEq = all (uncurry (==)) . edge
-- | in what direction is the second window from the first that can expand if the
-- first is shrunk, assuming that the root window is fully covered:
-- one direction for a common edge
-- two directions for a common corner
adjacent :: Rectangle -> Rectangle -> Set Direction2D
adjacent = comparingEdges (all . onClosedInterval)
-- | True whenever two edges touch. not (Set.null $ adjacent x y) ==> touching x y
touching :: Rectangle -> Rectangle -> Bool
touching a b = not . Set.null $ comparingEdges c a b
where c x y = any (onClosedInterval x) y || any (onClosedInterval y) x
onClosedInterval :: Ord a => [a] -> a -> Bool
onClosedInterval bds x = minimum bds <= x && maximum bds >= x
-- | starting top left going clockwise
corners :: Rectangle -> [(Position, Position)]
corners (Rectangle x y w h) = [(x,y)
,(x+fromIntegral w, y)
,(x+fromIntegral w, y+fromIntegral h)
,(x, y+fromIntegral h)]
center :: Rectangle -> (Position, Position)
center (Rectangle x y w h) = (avg x w, avg y h)
where avg a b = a + fromIntegral b `div` 2
centerPlacement :: RealFrac r => Rectangle -> Rectangle -> (r, r)
centerPlacement = centerPlacement' clamp
where clamp n = case signum n of
0 -> 0.5
1 -> 1
_ -> 0
freeDirs :: Rectangle -> Rectangle -> Set Direction2D
freeDirs root = Set.fromList . uncurry (++) . (lr *** ud)
. centerPlacement' signum root
where
lr 1 = [L]
lr (-1) = [R]
lr _ = [L,R]
ud 1 = [U]
ud (-1) = [D]
ud _ = [U,D]
centerPlacement' :: (Position -> r) -> Rectangle -> Rectangle -> (r, r)
centerPlacement' cf root assigned
= (cf $ cx - cwx, cf $ cy - cwy)
where (cx,cy) = center root
(cwx,cwy) = center assigned
-- | Event hook that refreshes the layout whenever a window changes its hints.
hintsEventHook :: Event -> X All
hintsEventHook (PropertyEvent { ev_event_type = t, ev_atom = a, ev_window = w })
| t == propertyNotify && a == wM_NORMAL_HINTS = do
whenX (isClient w <&&> hintsMismatch w) $ refresh
return (All True)
hintsEventHook _ = return (All True)
-- | True if the window's current size does not satisfy its size hints.
hintsMismatch :: Window -> X Bool
hintsMismatch w = withDisplay $ \d -> io $ do
wa <- getWindowAttributes d w
sh <- getWMNormalHints d w
let dim = (fromIntegral $ wa_width wa, fromIntegral $ wa_height wa)
return $ dim /= applySizeHints 0 sh dim
| duckwork/dots | xmonad/lib/XMonad/Layout/LayoutHints.hs | unlicense | 11,241 | 3 | 17 | 2,751 | 3,440 | 1,883 | 1,557 | 159 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
module Lib
( startApp
) where
import Control.Applicative (liftA2)
import Control.Monad (liftM, replicateM)
import Control.Monad.IO.Class (liftIO)
import Data.Aeson
import Data.Aeson.TH
import qualified Data.ByteString.Lazy as BS
import qualified Data.Map.Strict as M
import Data.Maybe (fromJust)
import Network.Wai
import Network.Wai.Handler.Warp
import Network.Wai.Middleware.Cors (simpleCors)
import Servant
import System.IO.Memoize (eagerlyOnce)
import System.Random (randomRIO)
-- | First, the machinery around generating questions.
type Answer = String
data Question = Question
{ questionText :: String
, answers :: [Answer]
, correctAnswer :: Int
} deriving (Eq, Show)
$(deriveJSON defaultOptions ''Question)
data Token = Start | Stop | Word String deriving (Eq, Ord)
-- | As part of our deserialization, we'll need to be able to create Tokens
-- | from Strings, so we need a Read instance.
instance Read Token where
readsPrec _ "__START__" = [(Start, "")]
readsPrec _ "__STOP__" = [(Stop, "")]
readsPrec _ w = [(Word w, "")]
-- | Given a token, finds a next token in an effectful way. (Typically random.)
type GetNextToken = Token -> IO Token
-- | Given a starting token, and an effectful getNext function, returns the list
-- | of tokens resulting from calling getNext until it reaches Stop.
tokensFrom :: Token -> GetNextToken -> IO [Token]
tokensFrom startToken getNext = do
nextToken <- getNext startToken -- nextToken :: Token
case nextToken of
Stop -> return []
token -> liftA2 (:) (pure token) (tokensFrom token getNext)
-- | Converts a list of tokens into a string, putting spaces between words
-- | but not before punctuation.
smartJoin :: [Token] -> String
smartJoin = dropWhile (== ' ') . concat . addSeparators
where
addSeparators = concatMap addSeparator
addSeparator token = case token of
Word w | w `elem` ["?", ",", "."] -> ["", w]
Word w -> [" ", w]
_ -> []
-- | Given an effectful nextToken function, returns a randomly generated sentence.
generate :: GetNextToken -> IO String
generate = fmap smartJoin . tokensFrom Start
-- | Generates a random Question, given an Int indicating how many answers to
-- | generate, a GetNextToken function for the questions themselves,
-- | and a second GetNextToken function for answers.
randomQuestion :: Int -> GetNextToken -> GetNextToken -> IO Question
randomQuestion numAnswers getNextQuestionToken getNextAnswerToken =
Question <$> generate getNextQuestionToken
<*> replicateM numAnswers (generate getNextAnswerToken)
<*> randomRIO (0, numAnswers - 1)
-- In particular, we will be using our transition map in order to implement
-- getNextToken. This means we need to define a type for it, load it from a
-- file, and create functions for using it.
type Transitions = M.Map Token [Token]
-- | Loads some (JSON-serialized) transitions from a file. `decode`
-- | will return a `Maybe (M.map Text [Text])`, which we then need to pull out
-- | of the Maybe and convert to a M.map Token [Token]
loadTransitions :: String -> IO Transitions
loadTransitions = fmap (textToTokens . fromJust . decode) . BS.readFile
where textToTokens = M.map (map read) . M.mapKeys read
questionTransitions :: IO Transitions
questionTransitions = loadTransitions "questions.json"
answerTransitions :: IO Transitions
answerTransitions = loadTransitions "answers.json"
-- | Picks a random element of a list, which better not be empty!
pick :: [a] -> IO a
pick xs = do
idx <- randomRIO (0, length xs - 1) -- choose a random index
return (xs !! idx) -- return that element of the list
-- | An implementation of GetNextToken based on a Transitions map.
randomNextToken :: Transitions -> GetNextToken
randomNextToken transitions token =
case M.lookup token transitions of
Just tokens -> pick tokens
_ -> return Stop
-- | And now we have to implement the servant parts.
type API = "question" :> Get '[JSON] Question
startApp :: IO ()
startApp = do
-- loading / creating the transitions is expensive, so we do it eagerlyOnce
cachedQt <- eagerlyOnce questionTransitions
cachedAt <- eagerlyOnce answerTransitions
run 8080 $ simpleCors $ app cachedQt cachedAt
app :: IO Transitions -> IO Transitions -> Application
app cachedQt cachedAt = serve api (server cachedQt cachedAt)
server :: IO Transitions -> IO Transitions -> Server API
server cachedQt cachedAt = liftIO $ do
qt <- cachedQt
at <- cachedAt
randomQuestion 4 (randomNextToken qt) (randomNextToken at)
api :: Proxy API
api = Proxy
| joelgrus/science-questions | haskell-servant/src/Lib.hs | unlicense | 4,810 | 0 | 14 | 978 | 1,048 | 566 | 482 | 85 | 3 |
{-|
Copyright : (C) 2015, University of Twente
License : BSD2 (see the file LICENSE)
Maintainer : Christiaan Baaij <[email protected]>
= SOP: Sum-of-Products, sorta
The arithmetic operation for 'GHC.TypeLits.Nat' are, addition
(@'GHC.TypeLits.+'@), subtraction (@'GHC.TypeLits.-'@), multiplication
(@'GHC.TypeLits.*'@), and exponentiation (@'GHC.TypeLits.^'@). This means we
cannot write expressions in a canonical SOP normal form. We can get rid of
subtraction by working with integers, and translating @a - b@ to @a + (-1)*b@.
Exponentation cannot be getten rid of that way. So we define the following
grammar for our canonical SOP-like normal form of arithmetic expressions:
@
SOP ::= Product \'+\' SOP | Product
Product ::= Symbol \'*\' Product | Symbol
Symbol ::= Integer
| Var
| Var \'^\' Product
| SOP \'^\' ProductE
ProductE ::= SymbolE \'*\' ProductE | SymbolE
SymbolE ::= Var
| Var \'^\' Product
| SOP \'^\' ProductE
@
So a valid SOP terms are:
@
x*y + y^2
(x+y)^(k*z)
@
, but,
@
(x*y)^2
@
is not, and should be:
@
x^2 * y^2
@
Exponents are thus not allowed to have products, so for example, the expression:
@
(x + 2)^(y + 2)
@
in valid SOP form is:
@
4*x*(2 + x)^y + 4*(2 + x)^y + (2 + x)^y*x^2
@
Also, exponents can only be integer values when the base is a variable. Although
not enforced by the grammar, the exponentials are flatted as far as possible in
SOP form. So:
@
(x^y)^z
@
is flattened to:
@
x^(y*z)
@
-}
module GHC.TypeLits.Normalise.SOP
( -- * SOP types
Symbol (..)
, Product (..)
, SOP (..)
-- * Simplification
, reduceExp
, mergeS
, mergeP
, mergeSOPAdd
, mergeSOPMul
, normaliseExp
)
where
-- External
import Data.Either (partitionEithers)
import Data.List (sort)
-- GHC API
import Outputable (Outputable (..), (<+>), text, hcat, integer, punctuate)
data Symbol v c
= I Integer -- ^ Integer constant
| C c -- ^ Non-integer constant
| E (SOP v c) (Product v c) -- ^ Exponentiation
| V v -- ^ Variable
deriving (Eq,Ord)
newtype Product v c = P { unP :: [Symbol v c] }
deriving (Eq,Ord)
newtype SOP v c = S { unS :: [Product v c] }
deriving (Ord)
instance (Eq v, Eq c) => Eq (SOP v c) where
(S []) == (S [P [I 0]]) = True
(S [P [I 0]]) == (S []) = True
(S ps1) == (S ps2) = ps1 == ps2
instance (Outputable v, Outputable c) => Outputable (SOP v c) where
ppr = hcat . punctuate (text " + ") . map ppr . unS
instance (Outputable v, Outputable c) => Outputable (Product v c) where
ppr = hcat . punctuate (text " * ") . map ppr . unP
instance (Outputable v, Outputable c) => Outputable (Symbol v c) where
ppr (I i) = integer i
ppr (C c) = ppr c
ppr (V s) = ppr s
ppr (E b e) = case (pprSimple b, pprSimple (S [e])) of
(bS,eS) -> bS <+> text "^" <+> eS
where
pprSimple (S [P [I i]]) = integer i
pprSimple (S [P [V v]]) = ppr v
pprSimple sop = text "(" <+> ppr sop <+> text ")"
mergeWith :: (a -> a -> Either a a) -> [a] -> [a]
mergeWith _ [] = []
mergeWith op (f:fs) = case partitionEithers $ map (`op` f) fs of
([],_) -> f : mergeWith op fs
(updated,untouched) -> mergeWith op (updated ++ untouched)
-- | reduce exponentials
--
-- Performs the following rewrites:
--
-- @
-- x^0 ==> 1
-- 0^x ==> 0
-- 2^3 ==> 8
-- (k ^ i) ^ j ==> k ^ (i * j)
-- @
reduceExp :: (Ord v, Ord c) => Symbol v c -> Symbol v c
reduceExp (E _ (P [(I 0)])) = I 1 -- x^0 ==> 1
reduceExp (E (S [P [I 0]]) _ ) = I 0 -- 0^x ==> 0
reduceExp (E (S [P [(I i)]]) (P [(I j)]))
| j >= 0 = I (i ^ j) -- 2^3 ==> 8
-- (k ^ i) ^ j ==> k ^ (i * j)
reduceExp (E (S [P [(E k i)]]) j) = case normaliseExp k (S [e]) of
(S [P [s]]) -> s
_ -> E k e
where
e = P . sort . map reduceExp $ mergeWith mergeS (unP i ++ unP j)
reduceExp s = s
-- | Merge two symbols of a Product term
--
-- Performs the following rewrites:
--
-- @
-- 8 * 7 ==> 56
-- 1 * x ==> x
-- x * 1 ==> x
-- 0 * x ==> 0
-- x * 0 ==> 0
-- x * x^4 ==> x^5
-- x^4 * x ==> x^5
-- y*y ==> y^2
-- @
mergeS :: (Ord v, Ord c) => Symbol v c -> Symbol v c
-> Either (Symbol v c) (Symbol v c)
mergeS (I i) (I j) = Left (I (i * j)) -- 8 * 7 ==> 56
mergeS (I 1) r = Left r -- 1 * x ==> x
mergeS l (I 1) = Left l -- x * 1 ==> x
mergeS (I 0) _ = Left (I 0) -- 0 * x ==> 0
mergeS _ (I 0) = Left (I 0) -- x * 0 ==> 0
-- x * x^4 ==> x^5
mergeS s (E (S [P [s']]) (P [I i]))
| s == s'
= Left (E (S [P [s']]) (P [I (i + 1)]))
-- x^4 * x ==> x^5
mergeS (E (S [P [s']]) (P [I i])) s
| s == s'
= Left (E (S [P [s']]) (P [I (i + 1)]))
-- y*y ==> y^2
mergeS l r
| l == r
= case normaliseExp (S [P [l]]) (S [P [I 2]]) of
(S [P [e]]) -> Left e
_ -> Right l
mergeS l _ = Right l
-- | Merge two products of a SOP term
--
-- Performs the following rewrites:
--
-- @
-- 2xy + 3xy ==> 5xy
-- 2xy + xy ==> 3xy
-- xy + 2xy ==> 3xy
-- xy + xy ==> 2xy
-- @
mergeP :: (Eq v, Eq c) => Product v c -> Product v c
-> Either (Product v c) (Product v c)
-- 2xy + 3xy ==> 5xy
mergeP (P ((I i):is)) (P ((I j):js))
| is == js = Left . P $ (I (i + j)) : is
-- 2xy + xy ==> 3xy
mergeP (P ((I i):is)) (P js)
| is == js = Left . P $ (I (i + 1)) : is
-- xy + 2xy ==> 3xy
mergeP (P is) (P ((I j):js))
| is == js = Left . P $ (I (j + 1)) : is
-- xy + xy ==> 2xy
mergeP (P is) (P js)
| is == js = Left . P $ (I 2) : is
| otherwise = Right $ P is
-- | Expand or Simplify 'complex' exponentials
--
-- Performs the following rewrites:
--
-- @
-- b^1 ==> b
-- 2^(y^2) ==> 4^y
-- (x + 2)^2 ==> x^2 + 4xy + 4
-- (x + 2)^(2x) ==> (x^2 + 4xy + 4)^x
-- (x + 2)^(y + 2) ==> 4x(2 + x)^y + 4(2 + x)^y + (2 + x)^yx^2
-- @
normaliseExp :: (Ord v, Ord c) => SOP v c -> SOP v c -> SOP v c
-- b^1 ==> b
normaliseExp b (S [P [I 1]]) = b
-- x^(2xy) ==> x^(2xy)
normaliseExp b@(S [P [V _]]) (S [e]) = S [P [E b e]]
-- 2^(y^2) ==> 4^y
normaliseExp b@(S [P [_]]) (S [e@(P [_])]) = S [P [reduceExp (E b e)]]
-- (x + 2)^2 ==> x^2 + 4xy + 4
normaliseExp b (S [P [(I i)]]) =
foldr1 mergeSOPMul (replicate (fromInteger i) b)
-- (x + 2)^(2x) ==> (x^2 + 4xy + 4)^x
normaliseExp b (S [P (e@(I _):es)]) =
normaliseExp (normaliseExp b (S [P [e]])) (S [P es])
-- (x + 2)^(xy) ==> (x+2)^(xy)
normaliseExp b (S [e]) = S [P [reduceExp (E b e)]]
-- (x + 2)^(y + 2) ==> 4x(2 + x)^y + 4(2 + x)^y + (2 + x)^yx^2
normaliseExp b (S e) = foldr1 mergeSOPMul (map (normaliseExp b . S . (:[])) e)
zeroP :: Product v c -> Bool
zeroP (P ((I 0):_)) = True
zeroP _ = False
-- | Simplifies SOP terms using
--
-- * 'mergeS'
-- * 'mergeP'
-- * 'reduceExp'
simplifySOP :: (Ord v, Ord c) => SOP v c -> SOP v c
simplifySOP
= S
. sort . filter (not . zeroP)
. mergeWith mergeP
. map (P . sort . map reduceExp . mergeWith mergeS . unP)
. unS
-- | Merge two SOP terms by additions
mergeSOPAdd :: (Ord v, Ord c) => SOP v c -> SOP v c -> SOP v c
mergeSOPAdd (S sop1) (S sop2) = simplifySOP $ S (sop1 ++ sop2)
-- | Merge two SOP terms by multiplication
mergeSOPMul :: (Ord v, Ord c) => SOP v c -> SOP v c -> SOP v c
mergeSOPMul (S sop1) (S sop2)
= simplifySOP
. S
$ concatMap (zipWith (\p1 p2 -> P (unP p1 ++ unP p2)) sop1 . repeat) sop2
| mstksg/ghc-typelits-natnormalise | src/GHC/TypeLits/Normalise/SOP.hs | bsd-2-clause | 7,597 | 0 | 15 | 2,292 | 2,834 | 1,483 | 1,351 | 113 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Network.MyTardis.Types where
import qualified Data.Map as M
import System.FilePath.Posix (takeFileName)
-- | An experiment that has been identified on the local filesystem (e.g. a collection
-- of DICOM files).
data IdentifiedExperiment = IdentifiedExperiment
{ ideDescription :: String -- ^ Experiment description.
, ideInstitutionName :: String -- ^ Institution name.
, ideTitle :: String -- ^ Experiment title.
, ideMetadataMaps :: [(String, M.Map String String)] -- ^ Metadata attribute maps. The first component of the tuple is the schema name (a URL).
}
deriving (Show)
instance Eq IdentifiedExperiment where
(IdentifiedExperiment desc1 instName1 title1 _) == (IdentifiedExperiment desc2 instName2 title2 _) = (desc1, instName1, title1) == (desc2, instName2, title2)
-- | A dataset that has been identified on the local filesystem.
data IdentifiedDataset = IdentifiedDataset
{ iddDescription :: String -- ^ Dataset description.
, iddExperiments :: [String] -- ^ List of experiment resource URIs.
, iddMetadataMaps :: [(String, M.Map String String)] -- ^ Metadata attribute maps. The first component of the tuple is the schema name (a URL).
}
deriving (Show)
instance Eq IdentifiedDataset where
(IdentifiedDataset desc1 exprs1 _) == (IdentifiedDataset desc2 exprs2 _) = (desc1, exprs1) == (desc2, exprs2)
-- | A file that has been identified on the local filesystem.
data IdentifiedFile = IdentifiedFile
{ idfDatasetURL :: String -- ^ Resource URI for the dataset.
, idfFilePath :: String -- ^ Full path to the file.
, idfMd5sum :: String -- ^ Md5sum of the file.
, idfSize :: Integer -- ^ Size of the file in bytes.
, idfMetadataMaps :: [(String, M.Map String String)] -- ^ Metadata attribute maps. The first component of the tuple is the schema name (a URL).
}
deriving (Show)
-- | Two identified files are considered equal if they are in the same dataset (same 'idfDatasetURL') and
-- have the filename (using 'takeFileName').
instance Eq IdentifiedFile where
(IdentifiedFile datasetURL filePath _ _ _) == (IdentifiedFile datasetURL' filePath' _ _ _) = datasetURL == datasetURL' && takeFileName filePath == takeFileName filePath'
| carlohamalainen/imagetrove-uploader | src/Network/MyTardis/Types.hs | bsd-2-clause | 2,386 | 0 | 12 | 537 | 401 | 236 | 165 | 28 | 0 |
module PowerSeries where
import Data.List(unfoldr)
import Util((...))
zipWithDefaults :: (a -> b -> c) -> (a -> c) -> (b -> c) -> [a] -> [b] -> [c]
zipWithDefaults f g h (x:xs) (y:ys) = f x y : zipWithDefaults f g h xs ys
zipWithDefaults _ g _ xs [] = map g xs
zipWithDefaults _ _ h [] ys = map h ys
reverseInits :: [a] -> [[a]]
reverseInits xs = unfoldr f ([], xs)
where
f :: ([a], [a]) -> Maybe ([a], ([a],[a]))
f (_, []) = Nothing
f (ys, x:xs) =
let ys' = x:ys
in Just (ys', (ys',xs))
-- only works if second argument is infinite
simpleConvolve :: [a] -> [b] -> [[(a,b)]]
simpleConvolve xs = map (zip xs) . reverseInits
--takes an infinite list of infinite lists and gives an infinite list of finite lists
diagonalize :: [[a]] -> [[a]]
diagonalize = foldr f []
where
f :: [a] -> [[a]] -> [[a]]
f [] ys = [] : ys
f (x:xs) ys = [x] : zipWithDefaults (:) (: []) id xs ys
grid :: [a] -> [b] -> [[(b,a)]]
grid xs = map (\y -> map ((,) y) xs)
-- works on finite and infinite arguments in both places.
convolve :: [a] -> [b] -> [[(b,a)]]
convolve = diagonalize ... grid
seriesMult :: (Num a) => [a] -> [a] -> [a]
seriesMult = map (sum . map (uncurry (*))) ... convolve
seriesAdd :: (Num a) => [a] -> [a] -> [a]
seriesAdd = zipWithDefaults (+) id id
seriesOne :: [Int]
seriesOne = 1 : repeat 0
seriesZero :: [Int]
seriesZero = repeat 0
seriesShift = (:) 0
data PS = PS [Int]
instance Num PS where
(+) (PS xs) (PS ys) = PS (seriesAdd xs ys)
(*) (PS xs) (PS ys) = PS (seriesMult xs ys)
negate (PS xs) = PS (map negate xs)
signum = undefined
abs = undefined
fromInteger n = PS [fromInteger n]
data EOPS = Zero
| Both [Int]
| Even [Int]
| Odd [Int]
deriving Show
instance Num EOPS where
(+) = eoAdd
(*) = eoMult
negate = eoMap negate
signum = undefined
abs = undefined
fromInteger 0 = Zero
fromInteger n = Even [fromInteger n]
-- f(x^2) + x g(x^2)
alternate :: [a] -> [a] -> [a]
alternate = concat ... zipWith f
where
f x y = [x,y]
evenPart :: [a] -> [a]
evenPart [] = []
evenPart [x] = [x]
evenPart (x:_:z) = x : evenPart z
oddPart :: [a] -> [a]
oddPart [] = []
oddPart [_] = []
oddPart (_:y:z) = y : oddPart z
split :: EOPS -> (EOPS, EOPS)
split Zero = (Zero, Zero)
split (Even xs) = (Even xs, Zero)
split (Odd xs) = (Zero, Odd xs)
split (Both xs) = (Even (evenPart xs), Odd (oddPart xs))
expand :: EOPS -> [Int]
expand Zero = repeat 0
expand (Both xs) = xs
expand (Even xs) = alternate xs (repeat 0)
expand (Odd xs) = alternate (repeat 0) xs
eoAdd :: EOPS -> EOPS -> EOPS
eoAdd Zero y = y
eoAdd x Zero = x
eoAdd (Even xs) (Even ys) = Even (seriesAdd xs ys)
eoAdd (Odd xs) (Odd ys) = Odd (seriesAdd xs ys)
eoAdd x y = Both (seriesAdd (expand x) (expand y))
eoMult :: EOPS -> EOPS -> EOPS
eoMult Zero _ = Zero
eoMult _ Zero = Zero
eoMult (Even xs) (Even ys) = Even (seriesMult xs ys)
eoMult (Even xs) (Odd ys) = Odd (seriesMult xs ys)
eoMult (Odd xs) (Even ys) = Odd (seriesMult xs ys)
eoMult (Odd xs) (Odd ys) = Even (0 : seriesMult xs ys)
eoMult x y = Both (seriesMult (expand x) (expand y))
eoMap :: (Int -> Int) -> EOPS -> EOPS
eoMap _ Zero = Zero
eoMap f (Even xs) = Even (map f xs)
eoMap f (Odd xs) = Odd (map f xs)
eoMap f (Both xs) = Both (map f xs)
eoShift Zero = Zero
eoShift (Even xs) = Odd xs
eoShift (Odd xs) = Even $ 0 : xs
eoShift (Both xs) = Both $ 0 : xs | cullina/Extractor | src/PowerSeries.hs | bsd-3-clause | 3,669 | 0 | 11 | 1,088 | 1,905 | 1,017 | 888 | 99 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Aws.Aws
( -- * Logging
LogLevel(..)
, Logger
, defaultLog
-- * Configuration
, Configuration(..)
, baseConfiguration
, dbgConfiguration
-- * Transaction runners
-- ** Safe runners
, aws
, awsRef
, pureAws
, simpleAws
, mgrAws
-- ** Unsafe runners
, unsafeAws
, unsafeAwsRef
-- ** URI runners
, awsUri
-- * Iterated runners
--, awsIteratedAll
, awsIteratedSource
, awsIteratedList
)
where
-- (TimeInfo (Timestamp))
import Aws.Core
import Aws.Core.Credentials (Credentials (..),
loadCredentialsDefault)
import Control.Applicative
import qualified Control.Exception.Lifted as E
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans
import Control.Monad.Trans.Resource
import qualified Data.ByteString as B
import qualified Data.CaseInsensitive as CI
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import Data.IORef
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import qualified Network.HTTP.Conduit as HTTP
import System.IO (stderr)
-- RPR
--import qualified Data.ByteString.UTF8 as BS8 (toString)
-- | The severity of a log message, in rising order.
data LogLevel
= Debug
| Info
| Warning
| Error
deriving (Show, Eq, Ord)
-- | The interface for any logging function. Takes log level and a log message, and can perform an arbitrary
-- IO action.
type Logger = LogLevel -> T.Text -> IO ()
-- | The default logger @defaultLog minLevel@, which prints log messages above level @minLevel@ to @stderr@.
defaultLog :: LogLevel -> Logger
defaultLog minLevel lev t | lev >= minLevel = T.hPutStrLn stderr $ T.concat [T.pack $ show lev, ": ", t]
| otherwise = return ()
-- | The configuration for an AWS request. You can use multiple configurations in parallel, even over the same HTTP
-- connection manager.
data Configuration
= Configuration {
-- | Whether to restrict the signature validity with a plain timestamp, or with explicit expiration
-- (absolute or relative).
timeInfo :: TimeInfo
-- | AWS access credentials.
, credentials :: Credentials
-- | The error / message logger.
, logger :: Logger
}
-- | The default configuration, with credentials loaded from environment variable or configuration file
-- (see 'loadCredentialsDefault').
baseConfiguration :: MonadIO io => io Configuration
baseConfiguration = liftIO $ do
Just cr <- loadCredentialsDefault
return Configuration { timeInfo = Timestamp,
credentials = cr,
logger = defaultLog Warning }
-- TODO: better error handling when credentials cannot be loaded
-- | Debug configuration, which logs much more verbosely.
dbgConfiguration :: MonadIO io => io Configuration
dbgConfiguration = do
c <- baseConfiguration
return c { logger = defaultLog Debug }
-- | Run an AWS transaction, with HTTP manager and metadata wrapped in a 'Response'.
--
-- All errors are caught and wrapped in the 'Response' value.
--
-- Metadata is logged at level 'Info'.
--
-- Usage (with existing 'HTTP.Manager'):
-- @
-- resp <- aws cfg serviceCfg manager request
-- @
aws :: (Transaction r a)
=> Configuration
-> ServiceConfiguration r NormalQuery
-> HTTP.Manager
-> r
-> ResourceT IO (Response (ResponseMetadata a) a)
aws = unsafeAws
-- | Run an AWS transaction, with HTTP manager and metadata returned in an 'IORef'.
--
-- Errors are not caught, and need to be handled with exception handlers.
--
-- Metadata is not logged.
--
-- Usage (with existing 'HTTP.Manager'):
-- @
-- ref <- newIORef mempty;
-- resp <- awsRef cfg serviceCfg manager request
-- @
-- Unfortunately, the ";" above seems necessary, as haddock does not want to split lines for me.
awsRef :: (Transaction r a)
=> Configuration
-> ServiceConfiguration r NormalQuery
-> HTTP.Manager
-> IORef (ResponseMetadata a)
-> r
-> ResourceT IO a
awsRef = unsafeAwsRef
-- | Run an AWS transaction, with HTTP manager and without metadata.
--
-- Metadata is logged at level 'Info'.
--
-- Usage (with existing 'HTTP.Manager'):
-- @
-- resp <- aws cfg serviceCfg manager request
-- @
pureAws :: (Transaction r a)
=> Configuration
-> ServiceConfiguration r NormalQuery
-> HTTP.Manager
-> r
-> ResourceT IO a
pureAws cfg scfg mgr req = readResponseIO =<< aws cfg scfg mgr req
-- | Run an AWS transaction, /without/ HTTP manager and without metadata.
--
-- Metadata is logged at level 'Info'.
--
-- Note that this is potentially less efficient than using 'aws', because HTTP connections cannot be re-used.
--
-- Usage:
-- @
-- resp <- simpleAws cfg serviceCfg request
-- @
simpleAws :: (Transaction r a, AsMemoryResponse a, MonadIO io)
=> Configuration
-> ServiceConfiguration r NormalQuery
-> r
-> io (MemoryResponse a)
simpleAws cfg scfg request
= liftIO $ HTTP.withManager $ \manager ->
loadToMemory =<< readResponseIO =<< aws cfg scfg manager request
-- | Run an AWS transaction, /with/ an HTTP manager and without metadata.
-- Cause really this is what you are going to do like all the f'en time.
-- Metadata is logged at level 'Info'.
-- Usage:
-- @
-- resp <- mgrAws cfg serviceCfg mgr request
-- @
mgrAws :: (Transaction r a, AsMemoryResponse a, MonadIO io)
=> Configuration
-> ServiceConfiguration r NormalQuery
-> HTTP.Manager
-> r
-> io (MemoryResponse a)
mgrAws cfg scfg mgr request
= liftIO $ runResourceT $ do
loadToMemory =<< readResponseIO =<< aws cfg scfg mgr request
-- | Run an AWS transaction, without enforcing that response and request type form a valid transaction pair.
--
-- This is especially useful for debugging and development, you should not have to use it in production.
--
-- All errors are caught and wrapped in the 'Response' value.
--
-- Metadata is wrapped in the Response, and also logged at level 'Info'.
unsafeAws
:: (ResponseConsumer r a,
Monoid (ResponseMetadata a),
Loggable (ResponseMetadata a),
SignQuery r) =>
Configuration -> ServiceConfiguration r NormalQuery -> HTTP.Manager -> r -> ResourceT IO (Response (ResponseMetadata a) a)
unsafeAws cfg scfg manager request = do
metadataRef <- liftIO $ newIORef mempty
let catchAll :: ResourceT IO a -> ResourceT IO (Either E.SomeException a)
catchAll = E.handle (return . Left) . fmap Right
resp <- catchAll $
unsafeAwsRef cfg scfg manager metadataRef request
metadata <- liftIO $ readIORef metadataRef
liftIO $ logger cfg Info $ "Response metadata: " `mappend` toLogText metadata
return $ Response metadata resp
-- | Run an AWS transaction, without enforcing that response and request type form a valid transaction pair.
--
-- This is especially useful for debugging and development, you should not have to use it in production.
--
-- Errors are not caught, and need to be handled with exception handlers.
--
-- Metadata is put in the 'IORef', but not logged.
unsafeAwsRef
:: (ResponseConsumer r a,
Monoid (ResponseMetadata a),
SignQuery r) =>
Configuration -> ServiceConfiguration r NormalQuery -> HTTP.Manager -> IORef (ResponseMetadata a) -> r -> ResourceT IO a
unsafeAwsRef cfg info manager metadataRef request = do
sd <- liftIO $ signatureData <$> timeInfo <*> credentials $ cfg
let q = signQuery request info sd
-- RPR
liftIO $ putStrLn "String to sign: "
liftIO $ putStrLn $ show (sqStringToSign q)
-- liftIO $ putStrLn $ BS8.toString (sqStringToSign q)
liftIO $ logger cfg Debug $ T.pack $ "String to sign: " ++ show (sqStringToSign q)
httpRequest <- liftIO $ queryToHttpRequest q
liftIO $ putStrLn $ show httpRequest
liftIO $ logger cfg Debug $ T.pack $ "Host: " ++ show (HTTP.host httpRequest)
resp <- do
hresp <- HTTP.http httpRequest manager
forM_ (HTTP.responseHeaders hresp) $ \(hname,hvalue) -> liftIO $ do
logger cfg Debug $ T.decodeUtf8 $ "Response header '" `mappend` CI.original hname `mappend` "': '" `mappend` hvalue `mappend` "'"
responseConsumer request metadataRef hresp
return resp
-- | Run a URI-only AWS transaction. Returns a URI that can be sent anywhere. Does not work with all requests.
--
-- Usage:
-- @
-- uri <- awsUri cfg request
-- @
awsUri :: (SignQuery request, MonadIO io)
=> Configuration -> ServiceConfiguration request UriOnlyQuery -> request -> io B.ByteString
awsUri cfg info request = liftIO $ do
let ti = timeInfo cfg
cr = credentials cfg
sd <- signatureData ti cr
let q = signQuery request info sd
logger cfg Debug $ T.pack $ "String to sign: " ++ show (sqStringToSign q)
return $ queryToUri q
{-
-- | Run an iterated AWS transaction. May make multiple HTTP requests.
awsIteratedAll :: (IteratedTransaction r a)
=> Configuration
-> ServiceConfiguration r NormalQuery
-> HTTP.Manager
-> r
-> ResourceT IO (Response [ResponseMetadata a] a)
awsIteratedAll cfg scfg manager req_ = go req_ Nothing
where go request prevResp = do Response meta respAttempt <- aws cfg scfg manager request
case maybeCombineIteratedResponse prevResp <$> respAttempt of
f@(Failure _) -> return (Response [meta] f)
s@(Success resp) ->
case nextIteratedRequest request resp of
Nothing ->
return (Response [meta] s)
Just nextRequest ->
mapMetadata (meta:) `liftM` go nextRequest (Just resp)
-}
awsIteratedSource :: (IteratedTransaction r a)
=> Configuration
-> ServiceConfiguration r NormalQuery
-> HTTP.Manager
-> r
-> C.Producer (ResourceT IO) (Response (ResponseMetadata a) a)
awsIteratedSource cfg scfg manager req_ = go req_
where go request = do resp <- lift $ aws cfg scfg manager request
C.yield resp
case responseResult resp of
Left _ -> return ()
Right x ->
case nextIteratedRequest request x of
Nothing -> return ()
Just nextRequest -> go nextRequest
awsIteratedList :: (IteratedTransaction r a, ListResponse a i)
=> Configuration
-> ServiceConfiguration r NormalQuery
-> HTTP.Manager
-> r
-> C.Producer (ResourceT IO) i
awsIteratedList cfg scfg manager req
= awsIteratedSource cfg scfg manager req
C.=$=
CL.concatMapM (fmap listResponse . readResponseIO)
| RayRacine/aws | Aws/Aws.hs | bsd-3-clause | 11,518 | 0 | 21 | 3,277 | 2,031 | 1,081 | 950 | 174 | 3 |
module Identifiers.Infer.LegacyMove
where
import Identifiers.Infer.Parser
import qualified Data.Text as T
import Base.Types
-- Lots of boilerplate but the disadvantages of Template Haskell made me move over to this
title' :: [MediaType] -> Maybe T.Text
title' (x:xs) = case x of
(Title a) -> Just a
_ -> title' xs
title' [] = Nothing
year' :: [MediaType] -> Maybe Int
year' (x:xs) = case x of
(Year a) -> Just a
_ -> year' xs
year' [] = Nothing
res' :: [MediaType] -> Maybe ResolutionDer
res' (x:xs) = case x of
(Resolution a) -> Just a
_ -> res' xs
res' [] = Nothing
source' :: [MediaType] -> Maybe SourceDer
source' (x:xs) = case x of
(Source a) -> Just a
_ -> source' xs
source' [] = Nothing
codec' :: [MediaType] -> Maybe CodecDer
codec' (x:xs) = case x of
(Codec a) -> Just a
_ -> codec' xs
codec' [] = Nothing
part' :: [MediaType] -> Maybe Int
part' (x:xs) = case x of
(Part a) -> Just a
_ -> part' xs
part' [] = Nothing
runtime' :: [MediaType] -> Maybe Int
runtime' (x:xs) = case x of
(Runtime a) -> Just a
_ -> part' xs
runtime' [] = Nothing
imdbID' :: [MediaType] -> Maybe String
imdbID' (x:xs) = case x of
(IMDBid a) -> Just a
_ -> imdbID' xs
imdbID' [] = Nothing
imdbRating' :: [MediaType] -> Maybe Float
imdbRating' (x:xs) = case x of
(IMDBRating a) -> Just a
_ -> imdbRating' xs
imdbRating' [] = Nothing
synopsis' :: [MediaType] -> Maybe T.Text
synopsis' (x:xs) = case x of
(Synopsis a) -> Just a
_ -> synopsis' xs
synopsis' [] = Nothing
actors' :: [MediaType] -> Maybe [Actor]
actors' (x:xs) = case x of
(Actors a) -> Just a
_ -> actors' xs
actors' [] = Nothing
episode' :: [MediaType] -> Maybe Int
episode' (x:xs) = case x of
(EpisodeNo a) -> Just a
_ -> episode' xs
episode' [] = Nothing
season' :: [MediaType] -> Maybe Int
season' (x:xs) = case x of
(SeasonNo a) -> Just a
_ -> season' xs
season' [] = Nothing
poster' :: [MediaType] -> Maybe (Either String String)
poster' (x:xs) = case x of
(Poster a) -> Just a
_ -> poster' xs
poster' [] = Nothing
| Reboare/Duchess | src/Identifiers/Infer/LegacyMove.hs | bsd-3-clause | 2,097 | 0 | 9 | 510 | 994 | 503 | 491 | 74 | 2 |
module Talks.Free.Prelude (
module X
) where
import Prelude as X (IO, Int, Eq, Ord, Show, (.), show, (==), putStrLn, ($), const, read, id, print)
import Control.Applicative as X ((<$>), (<*>), (*>), (<*), pure)
import Control.Monad as X (void, when, unless, liftM, (>>=), return)
import Data.Functor as X (Functor, fmap)
import Data.Traversable as X (mapM)
import Data.Foldable as X (mapM_)
import Data.Maybe as X
| markhibberd/fp-syd-free | src/Talks/Free/Prelude.hs | bsd-3-clause | 417 | 0 | 5 | 67 | 175 | 123 | 52 | 9 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Render.GLTModeT where
import Control.Lens (makeLenses)
import qualified Data.ByteString as B
import Types
data GLTModeT =
GLTModeT { _gltmName :: B.ByteString
, _gltmMode :: Int
}
makeLenses ''GLTModeT
| ksaveljev/hake-2 | src/Render/GLTModeT.hs | bsd-3-clause | 270 | 0 | 9 | 60 | 59 | 36 | 23 | 9 | 0 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1996-1998
TcHsSyn: Specialisations of the @HsSyn@ syntax for the typechecker
This module is an extension of @HsSyn@ syntax, for use in the type
checker.
-}
{-# LANGUAGE CPP, TupleSections #-}
{-# LANGUAGE CPP, TypeFamilies #-}
module TcHsSyn (
-- * Extracting types from HsSyn
hsLitType, hsLPatType, hsPatType,
-- * Other HsSyn functions
mkHsDictLet, mkHsApp,
mkHsAppTy, mkHsCaseAlt,
shortCutLit, hsOverLitName,
conLikeResTy,
-- * re-exported from TcMonad
TcId, TcIdSet,
-- * Zonking
-- | For a description of "zonking", see Note [What is zonking?]
-- in TcMType
zonkTopDecls, zonkTopExpr, zonkTopLExpr,
zonkTopBndrs, zonkTyBndrsX,
zonkTyVarBindersX, zonkTyVarBinderX,
emptyZonkEnv, mkEmptyZonkEnv,
zonkTcTypeToType, zonkTcTypeToTypes, zonkTyVarOcc,
zonkCoToCo, zonkSigType,
zonkEvBinds, zonkTcEvBinds
) where
#include "HsVersions.h"
import GhcPrelude
import HsSyn
import Id
import IdInfo
import TcRnMonad
import PrelNames
import TcType
import TcMType
import TcEvidence
import TysPrim
import TyCon ( isUnboxedTupleTyCon )
import TysWiredIn
import Type
import Coercion
import ConLike
import DataCon
import HscTypes
import Name
import NameEnv
import Var
import VarEnv
import DynFlags
import Literal
import BasicTypes
import Maybes
import SrcLoc
import Bag
import Outputable
import Util
import UniqFM
import Control.Monad
import Data.List ( partition )
import Control.Arrow ( second )
{-
************************************************************************
* *
Extracting the type from HsSyn
* *
************************************************************************
-}
hsLPatType :: OutPat GhcTc -> Type
hsLPatType (L _ pat) = hsPatType pat
hsPatType :: Pat GhcTc -> Type
hsPatType (ParPat pat) = hsLPatType pat
hsPatType (WildPat ty) = ty
hsPatType (VarPat (L _ var)) = idType var
hsPatType (BangPat pat) = hsLPatType pat
hsPatType (LazyPat pat) = hsLPatType pat
hsPatType (LitPat lit) = hsLitType lit
hsPatType (AsPat var _) = idType (unLoc var)
hsPatType (ViewPat _ _ ty) = ty
hsPatType (ListPat _ ty Nothing) = mkListTy ty
hsPatType (ListPat _ _ (Just (ty,_))) = ty
hsPatType (PArrPat _ ty) = mkPArrTy ty
hsPatType (TuplePat _ bx tys) = mkTupleTy bx tys
hsPatType (SumPat _ _ _ tys) = mkSumTy tys
hsPatType (ConPatOut { pat_con = L _ con, pat_arg_tys = tys })
= conLikeResTy con tys
hsPatType (SigPatOut _ ty) = ty
hsPatType (NPat _ _ _ ty) = ty
hsPatType (NPlusKPat _ _ _ _ _ ty) = ty
hsPatType (CoPat _ _ ty) = ty
hsPatType p = pprPanic "hsPatType" (ppr p)
hsLitType :: HsLit p -> TcType
hsLitType (HsChar _ _) = charTy
hsLitType (HsCharPrim _ _) = charPrimTy
hsLitType (HsString _ _) = stringTy
hsLitType (HsStringPrim _ _) = addrPrimTy
hsLitType (HsInt _ _) = intTy
hsLitType (HsIntPrim _ _) = intPrimTy
hsLitType (HsWordPrim _ _) = wordPrimTy
hsLitType (HsInt64Prim _ _) = int64PrimTy
hsLitType (HsWord64Prim _ _) = word64PrimTy
hsLitType (HsInteger _ _ ty) = ty
hsLitType (HsRat _ _ ty) = ty
hsLitType (HsFloatPrim _ _) = floatPrimTy
hsLitType (HsDoublePrim _ _) = doublePrimTy
-- Overloaded literals. Here mainly because it uses isIntTy etc
shortCutLit :: DynFlags -> OverLitVal -> TcType -> Maybe (HsExpr GhcTcId)
shortCutLit dflags (HsIntegral int@(IL src neg i)) ty
| isIntTy ty && inIntRange dflags i = Just (HsLit (HsInt def int))
| isWordTy ty && inWordRange dflags i = Just (mkLit wordDataCon (HsWordPrim src i))
| isIntegerTy ty = Just (HsLit (HsInteger src i ty))
| otherwise = shortCutLit dflags (HsFractional (integralFractionalLit neg i)) ty
-- The 'otherwise' case is important
-- Consider (3 :: Float). Syntactically it looks like an IntLit,
-- so we'll call shortCutIntLit, but of course it's a float
-- This can make a big difference for programs with a lot of
-- literals, compiled without -O
shortCutLit _ (HsFractional f) ty
| isFloatTy ty = Just (mkLit floatDataCon (HsFloatPrim def f))
| isDoubleTy ty = Just (mkLit doubleDataCon (HsDoublePrim def f))
| otherwise = Nothing
shortCutLit _ (HsIsString src s) ty
| isStringTy ty = Just (HsLit (HsString src s))
| otherwise = Nothing
mkLit :: DataCon -> HsLit GhcTc -> HsExpr GhcTc
mkLit con lit = HsApp (nlHsDataCon con) (nlHsLit lit)
------------------------------
hsOverLitName :: OverLitVal -> Name
-- Get the canonical 'fromX' name for a particular OverLitVal
hsOverLitName (HsIntegral {}) = fromIntegerName
hsOverLitName (HsFractional {}) = fromRationalName
hsOverLitName (HsIsString {}) = fromStringName
{-
************************************************************************
* *
\subsection[BackSubst-HsBinds]{Running a substitution over @HsBinds@}
* *
************************************************************************
The rest of the zonking is done *after* typechecking.
The main zonking pass runs over the bindings
a) to convert TcTyVars to TyVars etc, dereferencing any bindings etc
b) convert unbound TcTyVar to Void
c) convert each TcId to an Id by zonking its type
The type variables are converted by binding mutable tyvars to immutable ones
and then zonking as normal.
The Ids are converted by binding them in the normal Tc envt; that
way we maintain sharing; eg an Id is zonked at its binding site and they
all occurrences of that Id point to the common zonked copy
It's all pretty boring stuff, because HsSyn is such a large type, and
the environment manipulation is tiresome.
-}
-- Confused by zonking? See Note [What is zonking?] in TcMType.
type UnboundTyVarZonker = TcTyVar -> TcM Type
-- How to zonk an unbound type variable
-- The TcTyVar is
-- (a) a MetaTv
-- (b) Flexi and
-- (c) its kind is already zonked
-- Note [Zonking the LHS of a RULE]
-- | A ZonkEnv carries around several bits.
-- The UnboundTyVarZonker just zaps unbouned meta-tyvars to Any (as
-- defined in zonkTypeZapping), except on the LHS of rules. See
-- Note [Zonking the LHS of a RULE].
--
-- The (TyCoVarEnv TyVar) and is just an optimisation: when binding a
-- tyvar or covar, we zonk the kind right away and add a mapping to
-- the env. This prevents re-zonking the kind at every occurrence. But
-- this is *just* an optimisation.
--
-- The final (IdEnv Var) optimises zonking for Ids. It is
-- knot-tied. We must be careful never to put coercion variables
-- (which are Ids, after all) in the knot-tied env, because coercions
-- can appear in types, and we sometimes inspect a zonked type in this
-- module.
--
-- Confused by zonking? See Note [What is zonking?] in TcMType.
data ZonkEnv
= ZonkEnv
UnboundTyVarZonker
(TyCoVarEnv TyVar)
(IdEnv Var) -- What variables are in scope
-- Maps an Id or EvVar to its zonked version; both have the same Name
-- Note that all evidence (coercion variables as well as dictionaries)
-- are kept in the ZonkEnv
-- Only *type* abstraction is done by side effect
-- Is only consulted lazily; hence knot-tying
instance Outputable ZonkEnv where
ppr (ZonkEnv _ _ty_env var_env) = pprUFM var_env (vcat . map ppr)
-- The EvBinds have to already be zonked, but that's usually the case.
emptyZonkEnv :: ZonkEnv
emptyZonkEnv = mkEmptyZonkEnv zonkTypeZapping
mkEmptyZonkEnv :: UnboundTyVarZonker -> ZonkEnv
mkEmptyZonkEnv zonker = ZonkEnv zonker emptyVarEnv emptyVarEnv
-- | Extend the knot-tied environment.
extendIdZonkEnvRec :: ZonkEnv -> [Var] -> ZonkEnv
extendIdZonkEnvRec (ZonkEnv zonk_ty ty_env id_env) ids
-- NB: Don't look at the var to decide which env't to put it in. That
-- would end up knot-tying all the env'ts.
= ZonkEnv zonk_ty ty_env (extendVarEnvList id_env [(id,id) | id <- ids])
-- Given coercion variables will actually end up here. That's OK though:
-- coercion variables are never looked up in the knot-tied env't, so zonking
-- them simply doesn't get optimised. No one gets hurt. An improvement (?)
-- would be to do SCC analysis in zonkEvBinds and then only knot-tie the
-- recursive groups. But perhaps the time it takes to do the analysis is
-- more than the savings.
extendZonkEnv :: ZonkEnv -> [Var] -> ZonkEnv
extendZonkEnv (ZonkEnv zonk_ty tyco_env id_env) vars
= ZonkEnv zonk_ty (extendVarEnvList tyco_env [(tv,tv) | tv <- tycovars])
(extendVarEnvList id_env [(id,id) | id <- ids])
where (tycovars, ids) = partition isTyCoVar vars
extendIdZonkEnv1 :: ZonkEnv -> Var -> ZonkEnv
extendIdZonkEnv1 (ZonkEnv zonk_ty ty_env id_env) id
= ZonkEnv zonk_ty ty_env (extendVarEnv id_env id id)
extendTyZonkEnv1 :: ZonkEnv -> TyVar -> ZonkEnv
extendTyZonkEnv1 (ZonkEnv zonk_ty ty_env id_env) tv
= ZonkEnv zonk_ty (extendVarEnv ty_env tv tv) id_env
setZonkType :: ZonkEnv -> UnboundTyVarZonker -> ZonkEnv
setZonkType (ZonkEnv _ ty_env id_env) zonk_ty
= ZonkEnv zonk_ty ty_env id_env
zonkEnvIds :: ZonkEnv -> TypeEnv
zonkEnvIds (ZonkEnv _ _ id_env) =
mkNameEnv [(getName id, AnId id) | id <- nonDetEltsUFM id_env]
-- It's OK to use nonDetEltsUFM here because we forget the ordering
-- immediately by creating a TypeEnv
zonkIdOcc :: ZonkEnv -> TcId -> Id
-- Ids defined in this module should be in the envt;
-- ignore others. (Actually, data constructors are also
-- not LocalVars, even when locally defined, but that is fine.)
-- (Also foreign-imported things aren't currently in the ZonkEnv;
-- that's ok because they don't need zonking.)
--
-- Actually, Template Haskell works in 'chunks' of declarations, and
-- an earlier chunk won't be in the 'env' that the zonking phase
-- carries around. Instead it'll be in the tcg_gbl_env, already fully
-- zonked. There's no point in looking it up there (except for error
-- checking), and it's not conveniently to hand; hence the simple
-- 'orElse' case in the LocalVar branch.
--
-- Even without template splices, in module Main, the checking of
-- 'main' is done as a separate chunk.
zonkIdOcc (ZonkEnv _zonk_ty _ty_env id_env) id
| isLocalVar id = lookupVarEnv id_env id `orElse`
id
| otherwise = id
zonkIdOccs :: ZonkEnv -> [TcId] -> [Id]
zonkIdOccs env ids = map (zonkIdOcc env) ids
-- zonkIdBndr is used *after* typechecking to get the Id's type
-- to its final form. The TyVarEnv give
zonkIdBndr :: ZonkEnv -> TcId -> TcM Id
zonkIdBndr env v
= do ty' <- zonkTcTypeToType env (idType v)
ensureNotLevPoly ty'
(text "In the type of binder" <+> quotes (ppr v))
return (modifyIdInfo (`setLevityInfoWithType` ty') (setIdType v ty'))
zonkIdBndrs :: ZonkEnv -> [TcId] -> TcM [Id]
zonkIdBndrs env ids = mapM (zonkIdBndr env) ids
zonkTopBndrs :: [TcId] -> TcM [Id]
zonkTopBndrs ids = zonkIdBndrs emptyZonkEnv ids
zonkFieldOcc :: ZonkEnv -> FieldOcc GhcTcId -> TcM (FieldOcc GhcTc)
zonkFieldOcc env (FieldOcc lbl sel) = fmap (FieldOcc lbl) $ zonkIdBndr env sel
zonkEvBndrsX :: ZonkEnv -> [EvVar] -> TcM (ZonkEnv, [Var])
zonkEvBndrsX = mapAccumLM zonkEvBndrX
zonkEvBndrX :: ZonkEnv -> EvVar -> TcM (ZonkEnv, EvVar)
-- Works for dictionaries and coercions
zonkEvBndrX env var
= do { var' <- zonkEvBndr env var
; return (extendZonkEnv env [var'], var') }
zonkEvBndr :: ZonkEnv -> EvVar -> TcM EvVar
-- Works for dictionaries and coercions
-- Does not extend the ZonkEnv
zonkEvBndr env var
= do { let var_ty = varType var
; ty <-
{-# SCC "zonkEvBndr_zonkTcTypeToType" #-}
zonkTcTypeToType env var_ty
; return (setVarType var ty) }
zonkEvVarOcc :: ZonkEnv -> EvVar -> TcM EvTerm
zonkEvVarOcc env v
| isCoVar v
= EvCoercion <$> zonkCoVarOcc env v
| otherwise
= return (EvId $ zonkIdOcc env v)
zonkTyBndrsX :: ZonkEnv -> [TcTyVar] -> TcM (ZonkEnv, [TyVar])
zonkTyBndrsX = mapAccumLM zonkTyBndrX
zonkTyBndrX :: ZonkEnv -> TcTyVar -> TcM (ZonkEnv, TyVar)
-- This guarantees to return a TyVar (not a TcTyVar)
-- then we add it to the envt, so all occurrences are replaced
zonkTyBndrX env tv
= ASSERT( isImmutableTyVar tv )
do { ki <- zonkTcTypeToType env (tyVarKind tv)
-- Internal names tidy up better, for iface files.
; let tv' = mkTyVar (tyVarName tv) ki
; return (extendTyZonkEnv1 env tv', tv') }
zonkTyVarBindersX :: ZonkEnv -> [TyVarBndr TcTyVar vis]
-> TcM (ZonkEnv, [TyVarBndr TyVar vis])
zonkTyVarBindersX = mapAccumLM zonkTyVarBinderX
zonkTyVarBinderX :: ZonkEnv -> TyVarBndr TcTyVar vis
-> TcM (ZonkEnv, TyVarBndr TyVar vis)
-- Takes a TcTyVar and guarantees to return a TyVar
zonkTyVarBinderX env (TvBndr tv vis)
= do { (env', tv') <- zonkTyBndrX env tv
; return (env', TvBndr tv' vis) }
zonkTopExpr :: HsExpr GhcTcId -> TcM (HsExpr GhcTc)
zonkTopExpr e = zonkExpr emptyZonkEnv e
zonkTopLExpr :: LHsExpr GhcTcId -> TcM (LHsExpr GhcTc)
zonkTopLExpr e = zonkLExpr emptyZonkEnv e
zonkTopDecls :: Bag EvBind
-> LHsBinds GhcTcId
-> [LRuleDecl GhcTcId] -> [LVectDecl GhcTcId] -> [LTcSpecPrag]
-> [LForeignDecl GhcTcId]
-> TcM (TypeEnv,
Bag EvBind,
LHsBinds GhcTc,
[LForeignDecl GhcTc],
[LTcSpecPrag],
[LRuleDecl GhcTc],
[LVectDecl GhcTc])
zonkTopDecls ev_binds binds rules vects imp_specs fords
= do { (env1, ev_binds') <- zonkEvBinds emptyZonkEnv ev_binds
; (env2, binds') <- zonkRecMonoBinds env1 binds
-- Top level is implicitly recursive
; rules' <- zonkRules env2 rules
; vects' <- zonkVects env2 vects
; specs' <- zonkLTcSpecPrags env2 imp_specs
; fords' <- zonkForeignExports env2 fords
; return (zonkEnvIds env2, ev_binds', binds', fords', specs', rules', vects') }
---------------------------------------------
zonkLocalBinds :: ZonkEnv -> HsLocalBinds GhcTcId
-> TcM (ZonkEnv, HsLocalBinds GhcTc)
zonkLocalBinds env EmptyLocalBinds
= return (env, EmptyLocalBinds)
zonkLocalBinds _ (HsValBinds (ValBindsIn {}))
= panic "zonkLocalBinds" -- Not in typechecker output
zonkLocalBinds env (HsValBinds (ValBindsOut binds sigs))
= do { (env1, new_binds) <- go env binds
; return (env1, HsValBinds (ValBindsOut new_binds sigs)) }
where
go env []
= return (env, [])
go env ((r,b):bs)
= do { (env1, b') <- zonkRecMonoBinds env b
; (env2, bs') <- go env1 bs
; return (env2, (r,b'):bs') }
zonkLocalBinds env (HsIPBinds (IPBinds binds dict_binds)) = do
new_binds <- mapM (wrapLocM zonk_ip_bind) binds
let
env1 = extendIdZonkEnvRec env [ n | L _ (IPBind (Right n) _) <- new_binds]
(env2, new_dict_binds) <- zonkTcEvBinds env1 dict_binds
return (env2, HsIPBinds (IPBinds new_binds new_dict_binds))
where
zonk_ip_bind (IPBind n e)
= do n' <- mapIPNameTc (zonkIdBndr env) n
e' <- zonkLExpr env e
return (IPBind n' e')
---------------------------------------------
zonkRecMonoBinds :: ZonkEnv -> LHsBinds GhcTcId -> TcM (ZonkEnv, LHsBinds GhcTc)
zonkRecMonoBinds env binds
= fixM (\ ~(_, new_binds) -> do
{ let env1 = extendIdZonkEnvRec env (collectHsBindsBinders new_binds)
; binds' <- zonkMonoBinds env1 binds
; return (env1, binds') })
---------------------------------------------
zonkMonoBinds :: ZonkEnv -> LHsBinds GhcTcId -> TcM (LHsBinds GhcTc)
zonkMonoBinds env binds = mapBagM (zonk_lbind env) binds
zonk_lbind :: ZonkEnv -> LHsBind GhcTcId -> TcM (LHsBind GhcTc)
zonk_lbind env = wrapLocM (zonk_bind env)
zonk_bind :: ZonkEnv -> HsBind GhcTcId -> TcM (HsBind GhcTc)
zonk_bind env bind@(PatBind { pat_lhs = pat, pat_rhs = grhss, pat_rhs_ty = ty})
= do { (_env, new_pat) <- zonkPat env pat -- Env already extended
; new_grhss <- zonkGRHSs env zonkLExpr grhss
; new_ty <- zonkTcTypeToType env ty
; return (bind { pat_lhs = new_pat, pat_rhs = new_grhss, pat_rhs_ty = new_ty }) }
zonk_bind env (VarBind { var_id = var, var_rhs = expr, var_inline = inl })
= do { new_var <- zonkIdBndr env var
; new_expr <- zonkLExpr env expr
; return (VarBind { var_id = new_var, var_rhs = new_expr, var_inline = inl }) }
zonk_bind env bind@(FunBind { fun_id = L loc var, fun_matches = ms
, fun_co_fn = co_fn })
= do { new_var <- zonkIdBndr env var
; (env1, new_co_fn) <- zonkCoFn env co_fn
; new_ms <- zonkMatchGroup env1 zonkLExpr ms
; return (bind { fun_id = L loc new_var, fun_matches = new_ms
, fun_co_fn = new_co_fn }) }
zonk_bind env (AbsBinds { abs_tvs = tyvars, abs_ev_vars = evs
, abs_ev_binds = ev_binds
, abs_exports = exports
, abs_binds = val_binds
, abs_sig = has_sig })
= ASSERT( all isImmutableTyVar tyvars )
do { (env0, new_tyvars) <- zonkTyBndrsX env tyvars
; (env1, new_evs) <- zonkEvBndrsX env0 evs
; (env2, new_ev_binds) <- zonkTcEvBinds_s env1 ev_binds
; (new_val_bind, new_exports) <- fixM $ \ ~(new_val_binds, _) ->
do { let env3 = extendIdZonkEnvRec env2 $
collectHsBindsBinders new_val_binds
; new_val_binds <- mapBagM (zonk_val_bind env3) val_binds
; new_exports <- mapM (zonk_export env3) exports
; return (new_val_binds, new_exports) }
; return (AbsBinds { abs_tvs = new_tyvars, abs_ev_vars = new_evs
, abs_ev_binds = new_ev_binds
, abs_exports = new_exports, abs_binds = new_val_bind
, abs_sig = has_sig }) }
where
zonk_val_bind env lbind
| has_sig
, L loc bind@(FunBind { fun_id = L mloc mono_id
, fun_matches = ms
, fun_co_fn = co_fn }) <- lbind
= do { new_mono_id <- updateVarTypeM (zonkTcTypeToType env) mono_id
-- Specifically /not/ zonkIdBndr; we do not
-- want to complain about a levity-polymorphic binder
; (env', new_co_fn) <- zonkCoFn env co_fn
; new_ms <- zonkMatchGroup env' zonkLExpr ms
; return $ L loc $
bind { fun_id = L mloc new_mono_id
, fun_matches = new_ms
, fun_co_fn = new_co_fn } }
| otherwise
= zonk_lbind env lbind -- The normal case
zonk_export env (ABE{ abe_wrap = wrap
, abe_poly = poly_id
, abe_mono = mono_id
, abe_prags = prags })
= do new_poly_id <- zonkIdBndr env poly_id
(_, new_wrap) <- zonkCoFn env wrap
new_prags <- zonkSpecPrags env prags
return (ABE{ abe_wrap = new_wrap
, abe_poly = new_poly_id
, abe_mono = zonkIdOcc env mono_id
, abe_prags = new_prags })
zonk_bind env (PatSynBind bind@(PSB { psb_id = L loc id
, psb_args = details
, psb_def = lpat
, psb_dir = dir }))
= do { id' <- zonkIdBndr env id
; details' <- zonkPatSynDetails env details
; (env1, lpat') <- zonkPat env lpat
; (_env2, dir') <- zonkPatSynDir env1 dir
; return $ PatSynBind $
bind { psb_id = L loc id'
, psb_args = details'
, psb_def = lpat'
, psb_dir = dir' } }
zonkPatSynDetails :: ZonkEnv
-> HsPatSynDetails (Located TcId)
-> TcM (HsPatSynDetails (Located Id))
zonkPatSynDetails env = traverse (wrapLocM $ zonkIdBndr env)
zonkPatSynDir :: ZonkEnv -> HsPatSynDir GhcTcId
-> TcM (ZonkEnv, HsPatSynDir GhcTc)
zonkPatSynDir env Unidirectional = return (env, Unidirectional)
zonkPatSynDir env ImplicitBidirectional = return (env, ImplicitBidirectional)
zonkPatSynDir env (ExplicitBidirectional mg) = do
mg' <- zonkMatchGroup env zonkLExpr mg
return (env, ExplicitBidirectional mg')
zonkSpecPrags :: ZonkEnv -> TcSpecPrags -> TcM TcSpecPrags
zonkSpecPrags _ IsDefaultMethod = return IsDefaultMethod
zonkSpecPrags env (SpecPrags ps) = do { ps' <- zonkLTcSpecPrags env ps
; return (SpecPrags ps') }
zonkLTcSpecPrags :: ZonkEnv -> [LTcSpecPrag] -> TcM [LTcSpecPrag]
zonkLTcSpecPrags env ps
= mapM zonk_prag ps
where
zonk_prag (L loc (SpecPrag id co_fn inl))
= do { (_, co_fn') <- zonkCoFn env co_fn
; return (L loc (SpecPrag (zonkIdOcc env id) co_fn' inl)) }
{-
************************************************************************
* *
\subsection[BackSubst-Match-GRHSs]{Match and GRHSs}
* *
************************************************************************
-}
zonkMatchGroup :: ZonkEnv
-> (ZonkEnv -> Located (body GhcTcId) -> TcM (Located (body GhcTc)))
-> MatchGroup GhcTcId (Located (body GhcTcId))
-> TcM (MatchGroup GhcTc (Located (body GhcTc)))
zonkMatchGroup env zBody (MG { mg_alts = L l ms, mg_arg_tys = arg_tys
, mg_res_ty = res_ty, mg_origin = origin })
= do { ms' <- mapM (zonkMatch env zBody) ms
; arg_tys' <- zonkTcTypeToTypes env arg_tys
; res_ty' <- zonkTcTypeToType env res_ty
; return (MG { mg_alts = L l ms', mg_arg_tys = arg_tys'
, mg_res_ty = res_ty', mg_origin = origin }) }
zonkMatch :: ZonkEnv
-> (ZonkEnv -> Located (body GhcTcId) -> TcM (Located (body GhcTc)))
-> LMatch GhcTcId (Located (body GhcTcId))
-> TcM (LMatch GhcTc (Located (body GhcTc)))
zonkMatch env zBody (L loc match@(Match { m_pats = pats, m_grhss = grhss }))
= do { (env1, new_pats) <- zonkPats env pats
; new_grhss <- zonkGRHSs env1 zBody grhss
; return (L loc (match { m_pats = new_pats, m_grhss = new_grhss })) }
-------------------------------------------------------------------------
zonkGRHSs :: ZonkEnv
-> (ZonkEnv -> Located (body GhcTcId) -> TcM (Located (body GhcTc)))
-> GRHSs GhcTcId (Located (body GhcTcId))
-> TcM (GRHSs GhcTc (Located (body GhcTc)))
zonkGRHSs env zBody (GRHSs grhss (L l binds)) = do
(new_env, new_binds) <- zonkLocalBinds env binds
let
zonk_grhs (GRHS guarded rhs)
= do (env2, new_guarded) <- zonkStmts new_env zonkLExpr guarded
new_rhs <- zBody env2 rhs
return (GRHS new_guarded new_rhs)
new_grhss <- mapM (wrapLocM zonk_grhs) grhss
return (GRHSs new_grhss (L l new_binds))
{-
************************************************************************
* *
\subsection[BackSubst-HsExpr]{Running a zonkitution over a TypeCheckedExpr}
* *
************************************************************************
-}
zonkLExprs :: ZonkEnv -> [LHsExpr GhcTcId] -> TcM [LHsExpr GhcTc]
zonkLExpr :: ZonkEnv -> LHsExpr GhcTcId -> TcM (LHsExpr GhcTc)
zonkExpr :: ZonkEnv -> HsExpr GhcTcId -> TcM (HsExpr GhcTc)
zonkLExprs env exprs = mapM (zonkLExpr env) exprs
zonkLExpr env expr = wrapLocM (zonkExpr env) expr
zonkExpr env (HsVar (L l id))
= ASSERT2( isNothing (isDataConId_maybe id), ppr id )
return (HsVar (L l (zonkIdOcc env id)))
zonkExpr _ e@(HsConLikeOut {}) = return e
zonkExpr _ (HsIPVar id)
= return (HsIPVar id)
zonkExpr _ e@HsOverLabel{} = return e
zonkExpr env (HsLit (HsRat e f ty))
= do new_ty <- zonkTcTypeToType env ty
return (HsLit (HsRat e f new_ty))
zonkExpr _ (HsLit lit)
= return (HsLit lit)
zonkExpr env (HsOverLit lit)
= do { lit' <- zonkOverLit env lit
; return (HsOverLit lit') }
zonkExpr env (HsLam matches)
= do new_matches <- zonkMatchGroup env zonkLExpr matches
return (HsLam new_matches)
zonkExpr env (HsLamCase matches)
= do new_matches <- zonkMatchGroup env zonkLExpr matches
return (HsLamCase new_matches)
zonkExpr env (HsApp e1 e2)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
return (HsApp new_e1 new_e2)
zonkExpr env (HsAppTypeOut e t)
= do new_e <- zonkLExpr env e
return (HsAppTypeOut new_e t)
-- NB: the type is an HsType; can't zonk that!
zonkExpr _ e@(HsRnBracketOut _ _)
= pprPanic "zonkExpr: HsRnBracketOut" (ppr e)
zonkExpr env (HsTcBracketOut body bs)
= do bs' <- mapM zonk_b bs
return (HsTcBracketOut body bs')
where
zonk_b (PendingTcSplice n e) = do e' <- zonkLExpr env e
return (PendingTcSplice n e')
zonkExpr _ (HsSpliceE s) = WARN( True, ppr s ) -- Should not happen
return (HsSpliceE s)
zonkExpr env (OpApp e1 op fixity e2)
= do new_e1 <- zonkLExpr env e1
new_op <- zonkLExpr env op
new_e2 <- zonkLExpr env e2
return (OpApp new_e1 new_op fixity new_e2)
zonkExpr env (NegApp expr op)
= do (env', new_op) <- zonkSyntaxExpr env op
new_expr <- zonkLExpr env' expr
return (NegApp new_expr new_op)
zonkExpr env (HsPar e)
= do new_e <- zonkLExpr env e
return (HsPar new_e)
zonkExpr env (SectionL expr op)
= do new_expr <- zonkLExpr env expr
new_op <- zonkLExpr env op
return (SectionL new_expr new_op)
zonkExpr env (SectionR op expr)
= do new_op <- zonkLExpr env op
new_expr <- zonkLExpr env expr
return (SectionR new_op new_expr)
zonkExpr env (ExplicitTuple tup_args boxed)
= do { new_tup_args <- mapM zonk_tup_arg tup_args
; return (ExplicitTuple new_tup_args boxed) }
where
zonk_tup_arg (L l (Present e)) = do { e' <- zonkLExpr env e
; return (L l (Present e')) }
zonk_tup_arg (L l (Missing t)) = do { t' <- zonkTcTypeToType env t
; return (L l (Missing t')) }
zonkExpr env (ExplicitSum alt arity expr args)
= do new_args <- mapM (zonkTcTypeToType env) args
new_expr <- zonkLExpr env expr
return (ExplicitSum alt arity new_expr new_args)
zonkExpr env (HsCase expr ms)
= do new_expr <- zonkLExpr env expr
new_ms <- zonkMatchGroup env zonkLExpr ms
return (HsCase new_expr new_ms)
zonkExpr env (HsIf Nothing e1 e2 e3)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
new_e3 <- zonkLExpr env e3
return (HsIf Nothing new_e1 new_e2 new_e3)
zonkExpr env (HsIf (Just fun) e1 e2 e3)
= do (env1, new_fun) <- zonkSyntaxExpr env fun
new_e1 <- zonkLExpr env1 e1
new_e2 <- zonkLExpr env1 e2
new_e3 <- zonkLExpr env1 e3
return (HsIf (Just new_fun) new_e1 new_e2 new_e3)
zonkExpr env (HsMultiIf ty alts)
= do { alts' <- mapM (wrapLocM zonk_alt) alts
; ty' <- zonkTcTypeToType env ty
; return $ HsMultiIf ty' alts' }
where zonk_alt (GRHS guard expr)
= do { (env', guard') <- zonkStmts env zonkLExpr guard
; expr' <- zonkLExpr env' expr
; return $ GRHS guard' expr' }
zonkExpr env (HsLet (L l binds) expr)
= do (new_env, new_binds) <- zonkLocalBinds env binds
new_expr <- zonkLExpr new_env expr
return (HsLet (L l new_binds) new_expr)
zonkExpr env (HsDo do_or_lc (L l stmts) ty)
= do (_, new_stmts) <- zonkStmts env zonkLExpr stmts
new_ty <- zonkTcTypeToType env ty
return (HsDo do_or_lc (L l new_stmts) new_ty)
zonkExpr env (ExplicitList ty wit exprs)
= do (env1, new_wit) <- zonkWit env wit
new_ty <- zonkTcTypeToType env1 ty
new_exprs <- zonkLExprs env1 exprs
return (ExplicitList new_ty new_wit new_exprs)
where zonkWit env Nothing = return (env, Nothing)
zonkWit env (Just fln) = second Just <$> zonkSyntaxExpr env fln
zonkExpr env (ExplicitPArr ty exprs)
= do new_ty <- zonkTcTypeToType env ty
new_exprs <- zonkLExprs env exprs
return (ExplicitPArr new_ty new_exprs)
zonkExpr env expr@(RecordCon { rcon_con_expr = con_expr, rcon_flds = rbinds })
= do { new_con_expr <- zonkExpr env con_expr
; new_rbinds <- zonkRecFields env rbinds
; return (expr { rcon_con_expr = new_con_expr
, rcon_flds = new_rbinds }) }
zonkExpr env (RecordUpd { rupd_expr = expr, rupd_flds = rbinds
, rupd_cons = cons, rupd_in_tys = in_tys
, rupd_out_tys = out_tys, rupd_wrap = req_wrap })
= do { new_expr <- zonkLExpr env expr
; new_in_tys <- mapM (zonkTcTypeToType env) in_tys
; new_out_tys <- mapM (zonkTcTypeToType env) out_tys
; new_rbinds <- zonkRecUpdFields env rbinds
; (_, new_recwrap) <- zonkCoFn env req_wrap
; return (RecordUpd { rupd_expr = new_expr, rupd_flds = new_rbinds
, rupd_cons = cons, rupd_in_tys = new_in_tys
, rupd_out_tys = new_out_tys, rupd_wrap = new_recwrap }) }
zonkExpr env (ExprWithTySigOut e ty)
= do { e' <- zonkLExpr env e
; return (ExprWithTySigOut e' ty) }
zonkExpr env (ArithSeq expr wit info)
= do (env1, new_wit) <- zonkWit env wit
new_expr <- zonkExpr env expr
new_info <- zonkArithSeq env1 info
return (ArithSeq new_expr new_wit new_info)
where zonkWit env Nothing = return (env, Nothing)
zonkWit env (Just fln) = second Just <$> zonkSyntaxExpr env fln
zonkExpr env (PArrSeq expr info)
= do new_expr <- zonkExpr env expr
new_info <- zonkArithSeq env info
return (PArrSeq new_expr new_info)
zonkExpr env (HsSCC src lbl expr)
= do new_expr <- zonkLExpr env expr
return (HsSCC src lbl new_expr)
zonkExpr env (HsTickPragma src info srcInfo expr)
= do new_expr <- zonkLExpr env expr
return (HsTickPragma src info srcInfo new_expr)
-- hdaume: core annotations
zonkExpr env (HsCoreAnn src lbl expr)
= do new_expr <- zonkLExpr env expr
return (HsCoreAnn src lbl new_expr)
-- arrow notation extensions
zonkExpr env (HsProc pat body)
= do { (env1, new_pat) <- zonkPat env pat
; new_body <- zonkCmdTop env1 body
; return (HsProc new_pat new_body) }
-- StaticPointers extension
zonkExpr env (HsStatic fvs expr)
= HsStatic fvs <$> zonkLExpr env expr
zonkExpr env (HsWrap co_fn expr)
= do (env1, new_co_fn) <- zonkCoFn env co_fn
new_expr <- zonkExpr env1 expr
return (HsWrap new_co_fn new_expr)
zonkExpr _ e@(HsUnboundVar {}) = return e
zonkExpr _ expr = pprPanic "zonkExpr" (ppr expr)
-------------------------------------------------------------------------
{-
Note [Skolems in zonkSyntaxExpr]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider rebindable syntax with something like
(>>=) :: (forall x. blah) -> (forall y. blah') -> blah''
The x and y become skolems that are in scope when type-checking the
arguments to the bind. This means that we must extend the ZonkEnv with
these skolems when zonking the arguments to the bind. But the skolems
are different between the two arguments, and so we should theoretically
carry around different environments to use for the different arguments.
However, this becomes a logistical nightmare, especially in dealing with
the more exotic Stmt forms. So, we simplify by making the critical
assumption that the uniques of the skolems are different. (This assumption
is justified by the use of newUnique in TcMType.instSkolTyCoVarX.)
Now, we can safely just extend one environment.
-}
-- See Note [Skolems in zonkSyntaxExpr]
zonkSyntaxExpr :: ZonkEnv -> SyntaxExpr GhcTcId
-> TcM (ZonkEnv, SyntaxExpr GhcTc)
zonkSyntaxExpr env (SyntaxExpr { syn_expr = expr
, syn_arg_wraps = arg_wraps
, syn_res_wrap = res_wrap })
= do { (env0, res_wrap') <- zonkCoFn env res_wrap
; expr' <- zonkExpr env0 expr
; (env1, arg_wraps') <- mapAccumLM zonkCoFn env0 arg_wraps
; return (env1, SyntaxExpr { syn_expr = expr'
, syn_arg_wraps = arg_wraps'
, syn_res_wrap = res_wrap' }) }
-------------------------------------------------------------------------
zonkLCmd :: ZonkEnv -> LHsCmd GhcTcId -> TcM (LHsCmd GhcTc)
zonkCmd :: ZonkEnv -> HsCmd GhcTcId -> TcM (HsCmd GhcTc)
zonkLCmd env cmd = wrapLocM (zonkCmd env) cmd
zonkCmd env (HsCmdWrap w cmd)
= do { (env1, w') <- zonkCoFn env w
; cmd' <- zonkCmd env1 cmd
; return (HsCmdWrap w' cmd') }
zonkCmd env (HsCmdArrApp e1 e2 ty ho rl)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
new_ty <- zonkTcTypeToType env ty
return (HsCmdArrApp new_e1 new_e2 new_ty ho rl)
zonkCmd env (HsCmdArrForm op f fixity args)
= do new_op <- zonkLExpr env op
new_args <- mapM (zonkCmdTop env) args
return (HsCmdArrForm new_op f fixity new_args)
zonkCmd env (HsCmdApp c e)
= do new_c <- zonkLCmd env c
new_e <- zonkLExpr env e
return (HsCmdApp new_c new_e)
zonkCmd env (HsCmdLam matches)
= do new_matches <- zonkMatchGroup env zonkLCmd matches
return (HsCmdLam new_matches)
zonkCmd env (HsCmdPar c)
= do new_c <- zonkLCmd env c
return (HsCmdPar new_c)
zonkCmd env (HsCmdCase expr ms)
= do new_expr <- zonkLExpr env expr
new_ms <- zonkMatchGroup env zonkLCmd ms
return (HsCmdCase new_expr new_ms)
zonkCmd env (HsCmdIf eCond ePred cThen cElse)
= do { (env1, new_eCond) <- zonkWit env eCond
; new_ePred <- zonkLExpr env1 ePred
; new_cThen <- zonkLCmd env1 cThen
; new_cElse <- zonkLCmd env1 cElse
; return (HsCmdIf new_eCond new_ePred new_cThen new_cElse) }
where
zonkWit env Nothing = return (env, Nothing)
zonkWit env (Just w) = second Just <$> zonkSyntaxExpr env w
zonkCmd env (HsCmdLet (L l binds) cmd)
= do (new_env, new_binds) <- zonkLocalBinds env binds
new_cmd <- zonkLCmd new_env cmd
return (HsCmdLet (L l new_binds) new_cmd)
zonkCmd env (HsCmdDo (L l stmts) ty)
= do (_, new_stmts) <- zonkStmts env zonkLCmd stmts
new_ty <- zonkTcTypeToType env ty
return (HsCmdDo (L l new_stmts) new_ty)
zonkCmdTop :: ZonkEnv -> LHsCmdTop GhcTcId -> TcM (LHsCmdTop GhcTc)
zonkCmdTop env cmd = wrapLocM (zonk_cmd_top env) cmd
zonk_cmd_top :: ZonkEnv -> HsCmdTop GhcTcId -> TcM (HsCmdTop GhcTc)
zonk_cmd_top env (HsCmdTop cmd stack_tys ty ids)
= do new_cmd <- zonkLCmd env cmd
new_stack_tys <- zonkTcTypeToType env stack_tys
new_ty <- zonkTcTypeToType env ty
new_ids <- mapSndM (zonkExpr env) ids
MASSERT( isLiftedTypeKind (typeKind new_stack_tys) )
-- desugarer assumes that this is not levity polymorphic...
-- but indeed it should always be lifted due to the typing
-- rules for arrows
return (HsCmdTop new_cmd new_stack_tys new_ty new_ids)
-------------------------------------------------------------------------
zonkCoFn :: ZonkEnv -> HsWrapper -> TcM (ZonkEnv, HsWrapper)
zonkCoFn env WpHole = return (env, WpHole)
zonkCoFn env (WpCompose c1 c2) = do { (env1, c1') <- zonkCoFn env c1
; (env2, c2') <- zonkCoFn env1 c2
; return (env2, WpCompose c1' c2') }
zonkCoFn env (WpFun c1 c2 t1 d) = do { (env1, c1') <- zonkCoFn env c1
; (env2, c2') <- zonkCoFn env1 c2
; t1' <- zonkTcTypeToType env2 t1
; return (env2, WpFun c1' c2' t1' d) }
zonkCoFn env (WpCast co) = do { co' <- zonkCoToCo env co
; return (env, WpCast co') }
zonkCoFn env (WpEvLam ev) = do { (env', ev') <- zonkEvBndrX env ev
; return (env', WpEvLam ev') }
zonkCoFn env (WpEvApp arg) = do { arg' <- zonkEvTerm env arg
; return (env, WpEvApp arg') }
zonkCoFn env (WpTyLam tv) = ASSERT( isImmutableTyVar tv )
do { (env', tv') <- zonkTyBndrX env tv
; return (env', WpTyLam tv') }
zonkCoFn env (WpTyApp ty) = do { ty' <- zonkTcTypeToType env ty
; return (env, WpTyApp ty') }
zonkCoFn env (WpLet bs) = do { (env1, bs') <- zonkTcEvBinds env bs
; return (env1, WpLet bs') }
-------------------------------------------------------------------------
zonkOverLit :: ZonkEnv -> HsOverLit GhcTcId -> TcM (HsOverLit GhcTc)
zonkOverLit env lit@(OverLit { ol_witness = e, ol_type = ty })
= do { ty' <- zonkTcTypeToType env ty
; e' <- zonkExpr env e
; return (lit { ol_witness = e', ol_type = ty' }) }
-------------------------------------------------------------------------
zonkArithSeq :: ZonkEnv -> ArithSeqInfo GhcTcId -> TcM (ArithSeqInfo GhcTc)
zonkArithSeq env (From e)
= do new_e <- zonkLExpr env e
return (From new_e)
zonkArithSeq env (FromThen e1 e2)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
return (FromThen new_e1 new_e2)
zonkArithSeq env (FromTo e1 e2)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
return (FromTo new_e1 new_e2)
zonkArithSeq env (FromThenTo e1 e2 e3)
= do new_e1 <- zonkLExpr env e1
new_e2 <- zonkLExpr env e2
new_e3 <- zonkLExpr env e3
return (FromThenTo new_e1 new_e2 new_e3)
-------------------------------------------------------------------------
zonkStmts :: ZonkEnv
-> (ZonkEnv -> Located (body GhcTcId) -> TcM (Located (body GhcTc)))
-> [LStmt GhcTcId (Located (body GhcTcId))]
-> TcM (ZonkEnv, [LStmt GhcTc (Located (body GhcTc))])
zonkStmts env _ [] = return (env, [])
zonkStmts env zBody (s:ss) = do { (env1, s') <- wrapLocSndM (zonkStmt env zBody) s
; (env2, ss') <- zonkStmts env1 zBody ss
; return (env2, s' : ss') }
zonkStmt :: ZonkEnv
-> (ZonkEnv -> Located (body GhcTcId) -> TcM (Located (body GhcTc)))
-> Stmt GhcTcId (Located (body GhcTcId))
-> TcM (ZonkEnv, Stmt GhcTc (Located (body GhcTc)))
zonkStmt env _ (ParStmt stmts_w_bndrs mzip_op bind_op bind_ty)
= do { (env1, new_bind_op) <- zonkSyntaxExpr env bind_op
; new_bind_ty <- zonkTcTypeToType env1 bind_ty
; new_stmts_w_bndrs <- mapM (zonk_branch env1) stmts_w_bndrs
; let new_binders = [b | ParStmtBlock _ bs _ <- new_stmts_w_bndrs, b <- bs]
env2 = extendIdZonkEnvRec env1 new_binders
; new_mzip <- zonkExpr env2 mzip_op
; return (env2, ParStmt new_stmts_w_bndrs new_mzip new_bind_op new_bind_ty) }
where
zonk_branch env1 (ParStmtBlock stmts bndrs return_op)
= do { (env2, new_stmts) <- zonkStmts env1 zonkLExpr stmts
; (env3, new_return) <- zonkSyntaxExpr env2 return_op
; return (ParStmtBlock new_stmts (zonkIdOccs env3 bndrs) new_return) }
zonkStmt env zBody (RecStmt { recS_stmts = segStmts, recS_later_ids = lvs, recS_rec_ids = rvs
, recS_ret_fn = ret_id, recS_mfix_fn = mfix_id
, recS_bind_fn = bind_id, recS_bind_ty = bind_ty
, recS_later_rets = later_rets, recS_rec_rets = rec_rets
, recS_ret_ty = ret_ty })
= do { (env1, new_bind_id) <- zonkSyntaxExpr env bind_id
; (env2, new_mfix_id) <- zonkSyntaxExpr env1 mfix_id
; (env3, new_ret_id) <- zonkSyntaxExpr env2 ret_id
; new_bind_ty <- zonkTcTypeToType env3 bind_ty
; new_rvs <- zonkIdBndrs env3 rvs
; new_lvs <- zonkIdBndrs env3 lvs
; new_ret_ty <- zonkTcTypeToType env3 ret_ty
; let env4 = extendIdZonkEnvRec env3 new_rvs
; (env5, new_segStmts) <- zonkStmts env4 zBody segStmts
-- Zonk the ret-expressions in an envt that
-- has the polymorphic bindings in the envt
; new_later_rets <- mapM (zonkExpr env5) later_rets
; new_rec_rets <- mapM (zonkExpr env5) rec_rets
; return (extendIdZonkEnvRec env3 new_lvs, -- Only the lvs are needed
RecStmt { recS_stmts = new_segStmts, recS_later_ids = new_lvs
, recS_rec_ids = new_rvs, recS_ret_fn = new_ret_id
, recS_mfix_fn = new_mfix_id, recS_bind_fn = new_bind_id
, recS_bind_ty = new_bind_ty
, recS_later_rets = new_later_rets
, recS_rec_rets = new_rec_rets, recS_ret_ty = new_ret_ty }) }
zonkStmt env zBody (BodyStmt body then_op guard_op ty)
= do (env1, new_then_op) <- zonkSyntaxExpr env then_op
(env2, new_guard_op) <- zonkSyntaxExpr env1 guard_op
new_body <- zBody env2 body
new_ty <- zonkTcTypeToType env2 ty
return (env2, BodyStmt new_body new_then_op new_guard_op new_ty)
zonkStmt env zBody (LastStmt body noret ret_op)
= do (env1, new_ret) <- zonkSyntaxExpr env ret_op
new_body <- zBody env1 body
return (env, LastStmt new_body noret new_ret)
zonkStmt env _ (TransStmt { trS_stmts = stmts, trS_bndrs = binderMap
, trS_by = by, trS_form = form, trS_using = using
, trS_ret = return_op, trS_bind = bind_op
, trS_bind_arg_ty = bind_arg_ty
, trS_fmap = liftM_op })
= do {
; (env1, bind_op') <- zonkSyntaxExpr env bind_op
; bind_arg_ty' <- zonkTcTypeToType env1 bind_arg_ty
; (env2, stmts') <- zonkStmts env1 zonkLExpr stmts
; by' <- fmapMaybeM (zonkLExpr env2) by
; using' <- zonkLExpr env2 using
; (env3, return_op') <- zonkSyntaxExpr env2 return_op
; binderMap' <- mapM (zonkBinderMapEntry env3) binderMap
; liftM_op' <- zonkExpr env3 liftM_op
; let env3' = extendIdZonkEnvRec env3 (map snd binderMap')
; return (env3', TransStmt { trS_stmts = stmts', trS_bndrs = binderMap'
, trS_by = by', trS_form = form, trS_using = using'
, trS_ret = return_op', trS_bind = bind_op'
, trS_bind_arg_ty = bind_arg_ty'
, trS_fmap = liftM_op' }) }
where
zonkBinderMapEntry env (oldBinder, newBinder) = do
let oldBinder' = zonkIdOcc env oldBinder
newBinder' <- zonkIdBndr env newBinder
return (oldBinder', newBinder')
zonkStmt env _ (LetStmt (L l binds))
= do (env1, new_binds) <- zonkLocalBinds env binds
return (env1, LetStmt (L l new_binds))
zonkStmt env zBody (BindStmt pat body bind_op fail_op bind_ty)
= do { (env1, new_bind) <- zonkSyntaxExpr env bind_op
; new_bind_ty <- zonkTcTypeToType env1 bind_ty
; new_body <- zBody env1 body
; (env2, new_pat) <- zonkPat env1 pat
; (_, new_fail) <- zonkSyntaxExpr env1 fail_op
; return (env2, BindStmt new_pat new_body new_bind new_fail new_bind_ty) }
-- Scopes: join > ops (in reverse order) > pats (in forward order)
-- > rest of stmts
zonkStmt env _zBody (ApplicativeStmt args mb_join body_ty)
= do { (env1, new_mb_join) <- zonk_join env mb_join
; (env2, new_args) <- zonk_args env1 args
; new_body_ty <- zonkTcTypeToType env2 body_ty
; return (env2, ApplicativeStmt new_args new_mb_join new_body_ty) }
where
zonk_join env Nothing = return (env, Nothing)
zonk_join env (Just j) = second Just <$> zonkSyntaxExpr env j
get_pat (_, ApplicativeArgOne pat _ _) = pat
get_pat (_, ApplicativeArgMany _ _ pat) = pat
replace_pat pat (op, ApplicativeArgOne _ a isBody)
= (op, ApplicativeArgOne pat a isBody)
replace_pat pat (op, ApplicativeArgMany a b _)
= (op, ApplicativeArgMany a b pat)
zonk_args env args
= do { (env1, new_args_rev) <- zonk_args_rev env (reverse args)
; (env2, new_pats) <- zonkPats env1 (map get_pat args)
; return (env2, zipWith replace_pat new_pats (reverse new_args_rev)) }
-- these need to go backward, because if any operators are higher-rank,
-- later operators may introduce skolems that are in scope for earlier
-- arguments
zonk_args_rev env ((op, arg) : args)
= do { (env1, new_op) <- zonkSyntaxExpr env op
; new_arg <- zonk_arg env1 arg
; (env2, new_args) <- zonk_args_rev env1 args
; return (env2, (new_op, new_arg) : new_args) }
zonk_args_rev env [] = return (env, [])
zonk_arg env (ApplicativeArgOne pat expr isBody)
= do { new_expr <- zonkLExpr env expr
; return (ApplicativeArgOne pat new_expr isBody) }
zonk_arg env (ApplicativeArgMany stmts ret pat)
= do { (env1, new_stmts) <- zonkStmts env zonkLExpr stmts
; new_ret <- zonkExpr env1 ret
; return (ApplicativeArgMany new_stmts new_ret pat) }
-------------------------------------------------------------------------
zonkRecFields :: ZonkEnv -> HsRecordBinds GhcTcId -> TcM (HsRecordBinds GhcTcId)
zonkRecFields env (HsRecFields flds dd)
= do { flds' <- mapM zonk_rbind flds
; return (HsRecFields flds' dd) }
where
zonk_rbind (L l fld)
= do { new_id <- wrapLocM (zonkFieldOcc env) (hsRecFieldLbl fld)
; new_expr <- zonkLExpr env (hsRecFieldArg fld)
; return (L l (fld { hsRecFieldLbl = new_id
, hsRecFieldArg = new_expr })) }
zonkRecUpdFields :: ZonkEnv -> [LHsRecUpdField GhcTcId]
-> TcM [LHsRecUpdField GhcTcId]
zonkRecUpdFields env = mapM zonk_rbind
where
zonk_rbind (L l fld)
= do { new_id <- wrapLocM (zonkFieldOcc env) (hsRecUpdFieldOcc fld)
; new_expr <- zonkLExpr env (hsRecFieldArg fld)
; return (L l (fld { hsRecFieldLbl = fmap ambiguousFieldOcc new_id
, hsRecFieldArg = new_expr })) }
-------------------------------------------------------------------------
mapIPNameTc :: (a -> TcM b) -> Either (Located HsIPName) a
-> TcM (Either (Located HsIPName) b)
mapIPNameTc _ (Left x) = return (Left x)
mapIPNameTc f (Right x) = do r <- f x
return (Right r)
{-
************************************************************************
* *
\subsection[BackSubst-Pats]{Patterns}
* *
************************************************************************
-}
zonkPat :: ZonkEnv -> OutPat GhcTcId -> TcM (ZonkEnv, OutPat GhcTc)
-- Extend the environment as we go, because it's possible for one
-- pattern to bind something that is used in another (inside or
-- to the right)
zonkPat env pat = wrapLocSndM (zonk_pat env) pat
zonk_pat :: ZonkEnv -> Pat GhcTcId -> TcM (ZonkEnv, Pat GhcTc)
zonk_pat env (ParPat p)
= do { (env', p') <- zonkPat env p
; return (env', ParPat p') }
zonk_pat env (WildPat ty)
= do { ty' <- zonkTcTypeToType env ty
; ensureNotLevPoly ty'
(text "In a wildcard pattern")
; return (env, WildPat ty') }
zonk_pat env (VarPat (L l v))
= do { v' <- zonkIdBndr env v
; return (extendIdZonkEnv1 env v', VarPat (L l v')) }
zonk_pat env (LazyPat pat)
= do { (env', pat') <- zonkPat env pat
; return (env', LazyPat pat') }
zonk_pat env (BangPat pat)
= do { (env', pat') <- zonkPat env pat
; return (env', BangPat pat') }
zonk_pat env (AsPat (L loc v) pat)
= do { v' <- zonkIdBndr env v
; (env', pat') <- zonkPat (extendIdZonkEnv1 env v') pat
; return (env', AsPat (L loc v') pat') }
zonk_pat env (ViewPat expr pat ty)
= do { expr' <- zonkLExpr env expr
; (env', pat') <- zonkPat env pat
; ty' <- zonkTcTypeToType env ty
; return (env', ViewPat expr' pat' ty') }
zonk_pat env (ListPat pats ty Nothing)
= do { ty' <- zonkTcTypeToType env ty
; (env', pats') <- zonkPats env pats
; return (env', ListPat pats' ty' Nothing) }
zonk_pat env (ListPat pats ty (Just (ty2,wit)))
= do { (env', wit') <- zonkSyntaxExpr env wit
; ty2' <- zonkTcTypeToType env' ty2
; ty' <- zonkTcTypeToType env' ty
; (env'', pats') <- zonkPats env' pats
; return (env'', ListPat pats' ty' (Just (ty2',wit'))) }
zonk_pat env (PArrPat pats ty)
= do { ty' <- zonkTcTypeToType env ty
; (env', pats') <- zonkPats env pats
; return (env', PArrPat pats' ty') }
zonk_pat env (TuplePat pats boxed tys)
= do { tys' <- mapM (zonkTcTypeToType env) tys
; (env', pats') <- zonkPats env pats
; return (env', TuplePat pats' boxed tys') }
zonk_pat env (SumPat pat alt arity tys)
= do { tys' <- mapM (zonkTcTypeToType env) tys
; (env', pat') <- zonkPat env pat
; return (env', SumPat pat' alt arity tys') }
zonk_pat env p@(ConPatOut { pat_arg_tys = tys, pat_tvs = tyvars
, pat_dicts = evs, pat_binds = binds
, pat_args = args, pat_wrap = wrapper
, pat_con = L _ con })
= ASSERT( all isImmutableTyVar tyvars )
do { new_tys <- mapM (zonkTcTypeToType env) tys
-- an unboxed tuple pattern (but only an unboxed tuple pattern)
-- might have levity-polymorphic arguments. Check for this badness.
; case con of
RealDataCon dc
| isUnboxedTupleTyCon (dataConTyCon dc)
-> mapM_ (checkForLevPoly doc) (dropRuntimeRepArgs new_tys)
_ -> return ()
; (env0, new_tyvars) <- zonkTyBndrsX env tyvars
-- Must zonk the existential variables, because their
-- /kind/ need potential zonking.
-- cf typecheck/should_compile/tc221.hs
; (env1, new_evs) <- zonkEvBndrsX env0 evs
; (env2, new_binds) <- zonkTcEvBinds env1 binds
; (env3, new_wrapper) <- zonkCoFn env2 wrapper
; (env', new_args) <- zonkConStuff env3 args
; return (env', p { pat_arg_tys = new_tys,
pat_tvs = new_tyvars,
pat_dicts = new_evs,
pat_binds = new_binds,
pat_args = new_args,
pat_wrap = new_wrapper}) }
where
doc = text "In the type of an element of an unboxed tuple pattern:" $$ ppr p
zonk_pat env (LitPat lit) = return (env, LitPat lit)
zonk_pat env (SigPatOut pat ty)
= do { ty' <- zonkTcTypeToType env ty
; (env', pat') <- zonkPat env pat
; return (env', SigPatOut pat' ty') }
zonk_pat env (NPat (L l lit) mb_neg eq_expr ty)
= do { (env1, eq_expr') <- zonkSyntaxExpr env eq_expr
; (env2, mb_neg') <- case mb_neg of
Nothing -> return (env1, Nothing)
Just n -> second Just <$> zonkSyntaxExpr env1 n
; lit' <- zonkOverLit env2 lit
; ty' <- zonkTcTypeToType env2 ty
; return (env2, NPat (L l lit') mb_neg' eq_expr' ty') }
zonk_pat env (NPlusKPat (L loc n) (L l lit1) lit2 e1 e2 ty)
= do { (env1, e1') <- zonkSyntaxExpr env e1
; (env2, e2') <- zonkSyntaxExpr env1 e2
; n' <- zonkIdBndr env2 n
; lit1' <- zonkOverLit env2 lit1
; lit2' <- zonkOverLit env2 lit2
; ty' <- zonkTcTypeToType env2 ty
; return (extendIdZonkEnv1 env2 n',
NPlusKPat (L loc n') (L l lit1') lit2' e1' e2' ty') }
zonk_pat env (CoPat co_fn pat ty)
= do { (env', co_fn') <- zonkCoFn env co_fn
; (env'', pat') <- zonkPat env' (noLoc pat)
; ty' <- zonkTcTypeToType env'' ty
; return (env'', CoPat co_fn' (unLoc pat') ty') }
zonk_pat _ pat = pprPanic "zonk_pat" (ppr pat)
---------------------------
zonkConStuff :: ZonkEnv
-> HsConDetails (OutPat GhcTcId) (HsRecFields id (OutPat GhcTcId))
-> TcM (ZonkEnv,
HsConDetails (OutPat GhcTc) (HsRecFields id (OutPat GhcTc)))
zonkConStuff env (PrefixCon pats)
= do { (env', pats') <- zonkPats env pats
; return (env', PrefixCon pats') }
zonkConStuff env (InfixCon p1 p2)
= do { (env1, p1') <- zonkPat env p1
; (env', p2') <- zonkPat env1 p2
; return (env', InfixCon p1' p2') }
zonkConStuff env (RecCon (HsRecFields rpats dd))
= do { (env', pats') <- zonkPats env (map (hsRecFieldArg . unLoc) rpats)
; let rpats' = zipWith (\(L l rp) p' -> L l (rp { hsRecFieldArg = p' }))
rpats pats'
; return (env', RecCon (HsRecFields rpats' dd)) }
-- Field selectors have declared types; hence no zonking
---------------------------
zonkPats :: ZonkEnv -> [OutPat GhcTcId] -> TcM (ZonkEnv, [OutPat GhcTc])
zonkPats env [] = return (env, [])
zonkPats env (pat:pats) = do { (env1, pat') <- zonkPat env pat
; (env', pats') <- zonkPats env1 pats
; return (env', pat':pats') }
{-
************************************************************************
* *
\subsection[BackSubst-Foreign]{Foreign exports}
* *
************************************************************************
-}
zonkForeignExports :: ZonkEnv -> [LForeignDecl GhcTcId]
-> TcM [LForeignDecl GhcTc]
zonkForeignExports env ls = mapM (wrapLocM (zonkForeignExport env)) ls
zonkForeignExport :: ZonkEnv -> ForeignDecl GhcTcId -> TcM (ForeignDecl GhcTc)
zonkForeignExport env (ForeignExport { fd_name = i, fd_co = co, fd_fe = spec })
= return (ForeignExport { fd_name = fmap (zonkIdOcc env) i
, fd_sig_ty = undefined, fd_co = co
, fd_fe = spec })
zonkForeignExport _ for_imp
= return for_imp -- Foreign imports don't need zonking
zonkRules :: ZonkEnv -> [LRuleDecl GhcTcId] -> TcM [LRuleDecl GhcTc]
zonkRules env rs = mapM (wrapLocM (zonkRule env)) rs
zonkRule :: ZonkEnv -> RuleDecl GhcTcId -> TcM (RuleDecl GhcTc)
zonkRule env (HsRule name act (vars{-::[RuleBndr TcId]-}) lhs fv_lhs rhs fv_rhs)
= do { (env_inside, new_bndrs) <- mapAccumLM zonk_bndr env vars
; let env_lhs = setZonkType env_inside zonkTvSkolemising
-- See Note [Zonking the LHS of a RULE]
; new_lhs <- zonkLExpr env_lhs lhs
; new_rhs <- zonkLExpr env_inside rhs
; return (HsRule name act new_bndrs new_lhs fv_lhs new_rhs fv_rhs) }
where
zonk_bndr env (L l (RuleBndr (L loc v)))
= do { (env', v') <- zonk_it env v
; return (env', L l (RuleBndr (L loc v'))) }
zonk_bndr _ (L _ (RuleBndrSig {})) = panic "zonk_bndr RuleBndrSig"
zonk_it env v
| isId v = do { v' <- zonkIdBndr env v
; return (extendIdZonkEnvRec env [v'], v') }
| otherwise = ASSERT( isImmutableTyVar v)
zonkTyBndrX env v
-- DV: used to be return (env,v) but that is plain
-- wrong because we may need to go inside the kind
-- of v and zonk there!
zonkVects :: ZonkEnv -> [LVectDecl GhcTcId] -> TcM [LVectDecl GhcTc]
zonkVects env = mapM (wrapLocM (zonkVect env))
zonkVect :: ZonkEnv -> VectDecl GhcTcId -> TcM (VectDecl GhcTc)
zonkVect env (HsVect s v e)
= do { v' <- wrapLocM (zonkIdBndr env) v
; e' <- zonkLExpr env e
; return $ HsVect s v' e'
}
zonkVect env (HsNoVect s v)
= do { v' <- wrapLocM (zonkIdBndr env) v
; return $ HsNoVect s v'
}
zonkVect _env (HsVectTypeOut s t rt)
= return $ HsVectTypeOut s t rt
zonkVect _ (HsVectTypeIn _ _ _ _) = panic "TcHsSyn.zonkVect: HsVectTypeIn"
zonkVect _env (HsVectClassOut c)
= return $ HsVectClassOut c
zonkVect _ (HsVectClassIn _ _) = panic "TcHsSyn.zonkVect: HsVectClassIn"
zonkVect _env (HsVectInstOut i)
= return $ HsVectInstOut i
zonkVect _ (HsVectInstIn _) = panic "TcHsSyn.zonkVect: HsVectInstIn"
{-
************************************************************************
* *
Constraints and evidence
* *
************************************************************************
-}
zonkEvTerm :: ZonkEnv -> EvTerm -> TcM EvTerm
zonkEvTerm env (EvId v) = ASSERT2( isId v, ppr v )
zonkEvVarOcc env v
zonkEvTerm env (EvCoercion co) = do { co' <- zonkCoToCo env co
; return (EvCoercion co') }
zonkEvTerm env (EvCast tm co) = do { tm' <- zonkEvTerm env tm
; co' <- zonkCoToCo env co
; return (mkEvCast tm' co') }
zonkEvTerm _ (EvLit l) = return (EvLit l)
zonkEvTerm env (EvTypeable ty ev) =
do { ev' <- zonkEvTypeable env ev
; ty' <- zonkTcTypeToType env ty
; return (EvTypeable ty' ev') }
zonkEvTerm env (EvCallStack cs)
= case cs of
EvCsEmpty -> return (EvCallStack cs)
EvCsPushCall n l tm -> do { tm' <- zonkEvTerm env tm
; return (EvCallStack (EvCsPushCall n l tm')) }
zonkEvTerm env (EvSuperClass d n) = do { d' <- zonkEvTerm env d
; return (EvSuperClass d' n) }
zonkEvTerm env (EvDFunApp df tys tms)
= do { tys' <- zonkTcTypeToTypes env tys
; tms' <- mapM (zonkEvTerm env) tms
; return (EvDFunApp (zonkIdOcc env df) tys' tms') }
zonkEvTerm env (EvDelayedError ty msg)
= do { ty' <- zonkTcTypeToType env ty
; return (EvDelayedError ty' msg) }
zonkEvTerm env (EvSelector sel_id tys tms)
= do { sel_id' <- zonkIdBndr env sel_id
; tys' <- zonkTcTypeToTypes env tys
; tms' <- mapM (zonkEvTerm env) tms
; return (EvSelector sel_id' tys' tms') }
zonkEvTypeable :: ZonkEnv -> EvTypeable -> TcM EvTypeable
zonkEvTypeable env (EvTypeableTyCon tycon e)
= do { e' <- mapM (zonkEvTerm env) e
; return $ EvTypeableTyCon tycon e' }
zonkEvTypeable env (EvTypeableTyApp t1 t2)
= do { t1' <- zonkEvTerm env t1
; t2' <- zonkEvTerm env t2
; return (EvTypeableTyApp t1' t2') }
zonkEvTypeable env (EvTypeableTrFun t1 t2)
= do { t1' <- zonkEvTerm env t1
; t2' <- zonkEvTerm env t2
; return (EvTypeableTrFun t1' t2') }
zonkEvTypeable env (EvTypeableTyLit t1)
= do { t1' <- zonkEvTerm env t1
; return (EvTypeableTyLit t1') }
zonkTcEvBinds_s :: ZonkEnv -> [TcEvBinds] -> TcM (ZonkEnv, [TcEvBinds])
zonkTcEvBinds_s env bs = do { (env, bs') <- mapAccumLM zonk_tc_ev_binds env bs
; return (env, [EvBinds (unionManyBags bs')]) }
zonkTcEvBinds :: ZonkEnv -> TcEvBinds -> TcM (ZonkEnv, TcEvBinds)
zonkTcEvBinds env bs = do { (env', bs') <- zonk_tc_ev_binds env bs
; return (env', EvBinds bs') }
zonk_tc_ev_binds :: ZonkEnv -> TcEvBinds -> TcM (ZonkEnv, Bag EvBind)
zonk_tc_ev_binds env (TcEvBinds var) = zonkEvBindsVar env var
zonk_tc_ev_binds env (EvBinds bs) = zonkEvBinds env bs
zonkEvBindsVar :: ZonkEnv -> EvBindsVar -> TcM (ZonkEnv, Bag EvBind)
zonkEvBindsVar env (EvBindsVar { ebv_binds = ref })
= do { bs <- readMutVar ref
; zonkEvBinds env (evBindMapBinds bs) }
zonkEvBinds :: ZonkEnv -> Bag EvBind -> TcM (ZonkEnv, Bag EvBind)
zonkEvBinds env binds
= {-# SCC "zonkEvBinds" #-}
fixM (\ ~( _, new_binds) -> do
{ let env1 = extendIdZonkEnvRec env (collect_ev_bndrs new_binds)
; binds' <- mapBagM (zonkEvBind env1) binds
; return (env1, binds') })
where
collect_ev_bndrs :: Bag EvBind -> [EvVar]
collect_ev_bndrs = foldrBag add []
add (EvBind { eb_lhs = var }) vars = var : vars
zonkEvBind :: ZonkEnv -> EvBind -> TcM EvBind
zonkEvBind env bind@(EvBind { eb_lhs = var, eb_rhs = term })
= do { var' <- {-# SCC "zonkEvBndr" #-} zonkEvBndr env var
-- Optimise the common case of Refl coercions
-- See Note [Optimise coercion zonking]
-- This has a very big effect on some programs (eg Trac #5030)
; term' <- case getEqPredTys_maybe (idType var') of
Just (r, ty1, ty2) | ty1 `eqType` ty2
-> return (EvCoercion (mkTcReflCo r ty1))
_other -> zonkEvTerm env term
; return (bind { eb_lhs = var', eb_rhs = term' }) }
{-
************************************************************************
* *
Zonking types
* *
************************************************************************
Note [Zonking mutable unbound type or kind variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In zonkTypeZapping, we zonk mutable but unbound type or kind variables to an
arbitrary type. We know if they are unbound even though we don't carry an
environment, because at the binding site for a variable we bind the mutable
var to a fresh immutable one. So the mutable store plays the role of an
environment. If we come across a mutable variable that isn't so bound, it
must be completely free. We zonk the expected kind to make sure we don't get
some unbound meta variable as the kind.
Note that since we have kind polymorphism, zonk_unbound_tyvar will handle both
type and kind variables. Consider the following datatype:
data Phantom a = Phantom Int
The type of Phantom is (forall (k : *). forall (a : k). Int). Both `a` and
`k` are unbound variables. We want to zonk this to
(forall (k : Any *). forall (a : Any (Any *)). Int).
Note [Optimise coercion zonking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When optimising evidence binds we may come across situations where
a coercion looks like
cv = ReflCo ty
or cv1 = cv2
where the type 'ty' is big. In such cases it is a waste of time to zonk both
* The variable on the LHS
* The coercion on the RHS
Rather, we can zonk the variable, and if its type is (ty ~ ty), we can just
use Refl on the right, ignoring the actual coercion on the RHS.
This can have a very big effect, because the constraint solver sometimes does go
to a lot of effort to prove Refl! (Eg when solving 10+3 = 10+3; cf Trac #5030)
-}
zonkTyVarOcc :: ZonkEnv -> TyVar -> TcM TcType
zonkTyVarOcc env@(ZonkEnv zonk_unbound_tyvar tv_env _) tv
| isTcTyVar tv
= case tcTyVarDetails tv of
SkolemTv {} -> lookup_in_env
RuntimeUnk {} -> lookup_in_env
MetaTv { mtv_ref = ref }
-> do { cts <- readMutVar ref
; case cts of
Flexi -> do { kind <- {-# SCC "zonkKind1" #-}
zonkTcTypeToType env (tyVarKind tv)
; zonk_unbound_tyvar (setTyVarKind tv kind) }
Indirect ty -> do { zty <- zonkTcTypeToType env ty
-- Small optimisation: shortern-out indirect steps
-- so that the old type may be more easily collected.
; writeMutVar ref (Indirect zty)
; return zty } }
| otherwise
= lookup_in_env
where
lookup_in_env -- Look up in the env just as we do for Ids
= case lookupVarEnv tv_env tv of
Nothing -> mkTyVarTy <$> updateTyVarKindM (zonkTcTypeToType env) tv
Just tv' -> return (mkTyVarTy tv')
zonkCoVarOcc :: ZonkEnv -> CoVar -> TcM Coercion
zonkCoVarOcc env@(ZonkEnv _ tyco_env _) cv
| Just cv' <- lookupVarEnv tyco_env cv -- don't look in the knot-tied env
= return $ mkCoVarCo cv'
| otherwise
= mkCoVarCo <$> updateVarTypeM (zonkTcTypeToType env) cv
zonkCoHole :: ZonkEnv -> CoercionHole
-> Role -> Type -> Type -- these are all redundant with
-- the details in the hole,
-- unzonked
-> TcM Coercion
zonkCoHole env h r t1 t2
= do { contents <- unpackCoercionHole_maybe h
; case contents of
Just co -> do { co <- zonkCoToCo env co
; checkCoercionHole co h r t1 t2 }
-- This next case should happen only in the presence of
-- (undeferred) type errors. Originally, I put in a panic
-- here, but that caused too many uses of `failIfErrsM`.
Nothing -> do { traceTc "Zonking unfilled coercion hole" (ppr h)
; when debugIsOn $
whenNoErrs $
MASSERT2( False
, text "Type-correct unfilled coercion hole"
<+> ppr h )
; t1 <- zonkTcTypeToType env t1
; t2 <- zonkTcTypeToType env t2
; return $ mkHoleCo h r t1 t2 } }
zonk_tycomapper :: TyCoMapper ZonkEnv TcM
zonk_tycomapper = TyCoMapper
{ tcm_smart = True -- Establish type invariants
-- See Note [Type-checking inside the knot] in TcHsType
, tcm_tyvar = zonkTyVarOcc
, tcm_covar = zonkCoVarOcc
, tcm_hole = zonkCoHole
, tcm_tybinder = \env tv _vis -> zonkTyBndrX env tv }
-- Confused by zonking? See Note [What is zonking?] in TcMType.
zonkTcTypeToType :: ZonkEnv -> TcType -> TcM Type
zonkTcTypeToType = mapType zonk_tycomapper
zonkTcTypeToTypes :: ZonkEnv -> [TcType] -> TcM [Type]
zonkTcTypeToTypes env tys = mapM (zonkTcTypeToType env) tys
zonkCoToCo :: ZonkEnv -> Coercion -> TcM Coercion
zonkCoToCo = mapCoercion zonk_tycomapper
zonkSigType :: TcType -> TcM Type
-- Zonk the type obtained from a user type signature
-- We want to turn any quantified (forall'd) variables into TyVars
-- but we may find some free TcTyVars, and we want to leave them
-- completely alone. They may even have unification variables inside
-- e.g. f (x::a) = ...(e :: a -> a)....
-- The type sig for 'e' mentions a free 'a' which will be a
-- unification SigTv variable.
zonkSigType = zonkTcTypeToType (mkEmptyZonkEnv zonk_unbound_tv)
where
zonk_unbound_tv :: UnboundTyVarZonker
zonk_unbound_tv tv = return (mkTyVarTy tv)
zonkTvSkolemising :: UnboundTyVarZonker
-- This variant is used for the LHS of rules
-- See Note [Zonking the LHS of a RULE].
zonkTvSkolemising tv
= do { let tv' = mkTyVar (tyVarName tv) (tyVarKind tv)
-- NB: the kind of tv is already zonked
ty = mkTyVarTy tv'
-- Make a proper TyVar (remember we
-- are now done with type checking)
; writeMetaTyVar tv ty
; return ty }
zonkTypeZapping :: UnboundTyVarZonker
-- This variant is used for everything except the LHS of rules
-- It zaps unbound type variables to Any, except for RuntimeRep
-- vars which it zonks to LiftedRep
-- Works on both types and kinds
zonkTypeZapping tv
= do { let ty | isRuntimeRepVar tv = liftedRepTy
| otherwise = anyTypeOfKind (tyVarKind tv)
; writeMetaTyVar tv ty
; return ty }
---------------------------------------
{- Note [Zonking the LHS of a RULE]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See also DsBinds Note [Free tyvars on rule LHS]
We need to gather the type variables mentioned on the LHS so we can
quantify over them. Example:
data T a = C
foo :: T a -> Int
foo C = 1
{-# RULES "myrule" foo C = 1 #-}
After type checking the LHS becomes (foo alpha (C alpha)) and we do
not want to zap the unbound meta-tyvar 'alpha' to Any, because that
limits the applicability of the rule. Instead, we want to quantify
over it!
We do this in two stages.
* During zonking, we skolemise the TcTyVar 'alpha' to TyVar 'a'. We
do this by using zonkTvSkolemising as the UnboundTyVarZonker in the
ZonkEnv. (This is in fact the whole reason that the ZonkEnv has a
UnboundTyVarZonker.)
* In DsBinds, we quantify over it. See DsBinds
Note [Free tyvars on rule LHS]
Quantifying here is awkward because (a) the data type is big and (b)
finding the free type vars of an expression is necessarily monadic
operation. (consider /\a -> f @ b, where b is side-effected to a)
-}
| ezyang/ghc | compiler/typecheck/TcHsSyn.hs | bsd-3-clause | 69,587 | 40 | 19 | 19,958 | 18,877 | 9,578 | 9,299 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE TemplateHaskell #-}
module Prednote.Core.Properties where
import Prednote.Core.Instances ()
import Prednote.Core
import Test.QuickCheck.Function
import Prelude hiding (not, any, all)
import qualified Prelude
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.Tasty.TH
tests :: TestTree
tests = $(testGroupGenerator)
testInt :: Pred Int -> Int -> Bool
testInt = test
prop_andIsLazyInSecondArgument i
= testInt (false &&& undefined) i || True
prop_orIsLazyInSecondArgument i
= testInt (true ||| undefined) i || True
fst3 :: (a, b, c) -> a
fst3 (a, _, _) = a
prop_andIsLikePreludeAnd (Fun _ f1) (Fun _ f2) i
= testInt (p1 &&& p2) i == (fst3 (f1 i) && fst3 (f2 i))
where
p1 = predicate f1
p2 = predicate f2
prop_orIsLikePreludeOr (Fun _ f1) (Fun _ f2) i
= testInt (p1 ||| p2) i == (fst3 (f1 i) || fst3 (f2 i))
where
p1 = predicate f1
p2 = predicate f2
prop_notIsLikePreludeNot (Fun _ f1) i
= testInt (not p1) i == Prelude.not (fst3 (f1 i))
where
p1 = predicate f1
prop_switchIsLazyInFirstArgument pb i
= test (switch undefined pb) (Right i) || True
where
_types = pb :: Pred Int
prop_switchIsLazyInSecondArgument pa i
= test (switch pa undefined) (Left i) || True
where
_types = pa :: Pred Int
prop_switch (Fun _ fa) (Fun _ fb) ei
= test (switch pa pb) ei == expected
where
_types = ei :: Either Int Char
expected = case ei of
Left i -> fst3 (fa i)
Right c -> fst3 (fb c)
pa = predicate fa
pb = predicate fb
prop_true = testInt true
prop_false = Prelude.not . testInt false
prop_same b = test same b == b
prop_any (Fun _ f) ls
= test (any pa) ls == Prelude.any (fmap fst3 f) ls
where
pa = predicate f
_types = ls :: [Int]
prop_all (Fun _ f) ls
= test (all pa) ls == Prelude.all (fmap fst3 f) ls
where
pa = predicate f
_types = ls :: [Int]
| massysett/prednote | tests/Prednote/Core/Properties.hs | bsd-3-clause | 1,948 | 0 | 12 | 465 | 807 | 417 | 390 | 57 | 2 |
{-# LANGUAGE PackageImports, BangPatterns, TemplateHaskell, QuasiQuotes #-}
{-# OPTIONS -Wall -fno-warn-missing-signatures -fno-warn-incomplete-patterns #-}
module Lib
(
greeter,
fromImageToRepa,
fromRepaToImage,
detectEdgeP,
inversionP,
gaussianBlurP,
setTransparency,
grayscale,
gaussianBlurOutsideFigureWithFrameP,
edgeInsideFigureP,
Figure(Circle, Square, Diamond),
Point(Point),
Color(Red, Green, Blue),
extractColor,
add,
sizeDown,
sizeUp,
brutalSizeUp,
overlay,
zip4,
unzip4
) where
import IO.Files
import IO.Arrays
import Filters.Effects
import Filters.Types
import Filters.Figures
import Filters.General
| MajronMan/Himage | src/Lib.hs | bsd-3-clause | 745 | 0 | 5 | 186 | 121 | 83 | 38 | 38 | 0 |
{-# LANGUAGE TypeFamilies, RecordWildCards, TupleSections, OverloadedStrings, MultiParamTypeClasses, FlexibleInstances, UndecidableInstances, GADTs, RankNTypes #-}
module Aws.S3.Commands.PutObject
where
import Aws.Http
import Aws.Response
import Aws.S3.Info
import Aws.S3.Metadata
import Aws.S3.Model
import Aws.S3.Query
import Aws.S3.Response
import Aws.Signature
import Aws.Transaction
import Control.Applicative
import Control.Arrow (second)
import Data.ByteString.Char8 ({- IsString -})
import Data.Maybe
import qualified Data.ByteString.Char8 as B
import qualified Data.CaseInsensitive as CI
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Network.HTTP.Conduit as HTTP
data PutObject = PutObject {
poObjectName :: T.Text,
poBucket :: Bucket,
poContentType :: Maybe B.ByteString,
poCacheControl :: Maybe T.Text,
poContentDisposition :: Maybe T.Text,
poContentEncoding :: Maybe T.Text,
poContentMD5 :: Maybe B.ByteString,
poExpires :: Maybe Int,
poAcl :: Maybe CannedAcl,
poStorageClass :: Maybe StorageClass,
poRequestBody :: HTTP.RequestBody IO,
poMetadata :: [(T.Text,T.Text)]
}
putObject :: T.Text -> Bucket -> HTTP.RequestBody IO -> PutObject
putObject obj bucket body = PutObject obj bucket Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing body []
data PutObjectResponse
= PutObjectResponse {
porVersionId :: Maybe T.Text
}
deriving (Show)
instance SignQuery PutObject where
type Info PutObject = S3Info
signQuery PutObject {..} = s3SignQuery S3Query {
s3QMethod = Put
, s3QBucket = Just $ T.encodeUtf8 poBucket
, s3QSubresources = []
, s3QQuery = []
, s3QContentType = poContentType
, s3QContentMd5 = poContentMD5
, s3QAmzHeaders = map (second T.encodeUtf8) $ catMaybes [
("x-amz-acl",) <$> writeCannedAcl <$> poAcl
, ("x-amz-storage-class",) <$> writeStorageClass <$> poStorageClass
] ++ map( \x -> (CI.mk . T.encodeUtf8 $ T.concat ["x-amz-meta-", fst x], snd x)) poMetadata
, s3QOtherHeaders = map (second T.encodeUtf8) $ catMaybes [
("Expires",) . T.pack . show <$> poExpires
, ("Cache-Control",) <$> poCacheControl
, ("Content-Disposition",) <$> poContentDisposition
, ("Content-Encoding",) <$> poContentEncoding
]
, s3QRequestBody = Just poRequestBody
, s3QObject = Just $ T.encodeUtf8 poObjectName
}
instance ResponseConsumer PutObject PutObjectResponse where
type ResponseMetadata PutObjectResponse = S3Metadata
responseConsumer _ = s3ResponseConsumer $ \_status headers _body -> do
let vid = T.decodeUtf8 `fmap` lookup "x-amz-version-id" headers
return $ PutObjectResponse vid
instance Transaction PutObject PutObjectResponse
| jgm/aws | Aws/S3/Commands/PutObject.hs | bsd-3-clause | 3,566 | 0 | 17 | 1,282 | 731 | 417 | 314 | 65 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
-- The Timber compiler <timber-lang.org>
--
-- Copyright 2008-2009 Johan Nordlander <[email protected]>
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the names of the copyright holder and any identified
-- contributors, nor the names of their affiliations, may be used to
-- endorse or promote products derived from this software without
-- specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
-- OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
-- ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
-- OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-- HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-- STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-- ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
module Termred(termred, redTerm, isFinite, constrs) where
import Control.Monad
import Common
import Core
import PP
import Data.Char
termred (_,ds',_,bs') m = redModule ds' (eqnsOf bs') m
redTerm coercions e = redExp (initEnv { eqns = coercions }) e
isFinite e = finite initEnv e
data Env = Env { eqns :: Map Name Exp,
args :: [Name],
cons :: [Map Name Int],
sels :: TEnv
}
initEnv = Env { eqns = [], args = [], cons = cons0, sels = [] }
cons0 = [ [(prim TRUE, 0), (prim FALSE, 0)] , [(prim NIL, 0), (prim CONS, 2)] ]
consOf ds = [ map f ce | (_,DData _ _ ce) <- ds ]
where f (c, Constr te pe _) = (c, length te + length pe)
selsOf ds = concat [ ss | (_,DRec _ _ _ ss) <- ds ]
conArity env (Tuple n _) = n
conArity env c = lookup' (concat (cons env)) c
complete _ [Tuple _ _] = True
complete _ [] = False
complete [] cs0 = False
complete (cs:css) cs0 = all (`elem`cs0) (dom cs) || complete css cs0
addArgs env vs = env { args = vs ++ args env }
addEqns env eqs = env { eqns = eqs ++ eqns env }
addDecls env ds = env { cons = consOf ds ++ cons env, sels = selsOf ds ++ sels env }
redModule impDecls impEqs (Module m ns xs ds is bss)
= do (bss,_) <- redTopBinds env1 bss
return (Module m ns xs ds is bss)
where env0 = addDecls initEnv (tdefsOf impDecls ++ tdefsOf ds)
env1 = addEqns env0 (finiteEqns env0 impEqs)
redTopBinds env [] = return ([], [])
redTopBinds env (bs : bss) = do Binds r te es <- redBinds env bs
(bss,vs) <- redTopBinds (addEqns env (finiteEqns env es)) bss
let necessary (v,_) = r || maybe (elem v vs) (const True) (fromMod v)
te' = filter necessary te
es' = filter necessary es
bs' = Binds r te' es'
bss' = if null te' then bss else bs':bss
return (bss', idents es' ++ vs)
finiteEqns env eqs = filter p eqs
where p (x,e) = isSmall e && finite env e
-- can be safely ignored without changing cbv semantics
value (EVar x) = x /= prim New
value (ECon _) = True
value (ELit _) = True
value (ESel e _) = value e
value (EAp (EVar (Prim IntDiv _)) [e1,e2])
= value e1 && nonzero e2
value (EAp (EVar (Prim FloatDiv _)) [e1,e2])
= value e1 && nonzero e2
value (EAp (EVar (Prim _ _)) es)
= all value es
value (EAp (EVar (Tuple _ _)) es)
= all value es
value (EAp (ECon c) es) = all value es
value (ELam _ _) = True
value (ERec _ eqs) = all (value . snd) eqs
value e = False
nonzero (ELit (LInt _ n)) = n /= 0
nonzero (ELit (LRat _ n)) = n /= 0
nonzero _ = False
-- may be safely inlined (can't lead to infinite expansion even if part of a recursive binding group)
finite env (EVar (Prim c _)) = True --c `notElem` [ListArray, UniArray, UpdateArray]
finite env (EVar (Tuple _ _)) = True
finite env (EVar x) = x `elem` args env || maybe False (finite env) (lookup x (eqns env))
finite env (ECon _) = True
finite env (ELit _) = True
finite env (ESel e _) = finite env e
finite env (ELam te e) = finite (addArgs env (dom te)) e
finite env (ERec _ eqs) = all (finite env) (rng eqs)
finite env (EAp e es) = all (finite env) (e:es)
finite env (ELet bs e) = fin bs && finite (addArgs env (bvars bs)) e
where fin (Binds True _ _) = False
fin (Binds _ _ eqns) = all (finite env . snd) eqns
finite env (ECase e alts) = finite env e && all (finite env . snd) alts
finite env e = False
redBinds env (Binds r te eqns) = do eqns <- redEqns env eqns
bs <- staticDelayRule env (Binds r te eqns)
return bs
staticDelayRule env bs@(Binds rec te eqs)
| not rec = return bs
| rec = do (eqs',eqs1) <- walkEqs env te (dom eqs) eqs
ts <- mapM (const (newTVar Star)) eqs1
return (Binds rec (te ++ (dom eqs1 `zip` map scheme ts)) (eqs'++eqs1))
walkEqs env te fw [] = return ([], [])
walkEqs env te fw (eq:eqs) = do (eq,eqs1) <- doEq te eq
(eqs,eqs2) <- walkEqs env te (fw \\ [fst eq] ++ dom eqs1) eqs
return (eq:eqs, eqs1++eqs2)
where doEq te eq@(x,e)
| null ke = do (e,eqs) <- doExp e
return ((x,e), eqs)
| otherwise = return ((x,e), [])
where ke = quant (lookup' te x)
doAlt (p,e) = do (e,eqs1) <- doExp e
return ((p,e), eqs1)
doExp (ESel e l)
| isCoerceLabel l = maybeDelay (\e -> ESel e l) e
| otherwise = do (e,eqs1) <- doExp e
return (ESel e l, eqs1)
doExp (ECase e alts) = do (e,eqs1) <- doExp e
(alts,eqss) <- fmap unzip (mapM doAlt alts)
return (ECase e alts, eqs1++concat eqss)
doExp (EAp e es) = do (e,eqs1) <- doExp e
(es,eqss) <- fmap unzip (mapM doExp es)
return (EAp e es, eqs1++concat eqss)
doExp (ELet (Binds r te eqs) e)
= do (eqs',eqss) <- fmap unzip (mapM (doEq te) eqs)
(e,eqs1) <- doExp e
return (ELet (Binds r te eqs') e, eqs1++concat eqss)
doExp (ERec n eqs) = do (eqs',eqss) <- fmap unzip (mapM (doEq (sels env)) eqs)
return (ERec n eqs', concat eqss)
doExp e = return (e, [])
maybeDelay f (ESel e l)
| isCoerceLabel l = maybeDelay (\e -> f (ESel e l)) e
maybeDelay f (EVar x)
| x `elem` fw = do y <- newName tempSym
return (EVar y, [(y, f (EVar x))])
maybeDelay f e = do (e,eqs1) <- doExp e
return (f e, eqs1)
redEqns env [] = return []
redEqns env ((x,e):eqns) = do e' <- redExp env e
let env' = if finite env e' && isSmall e
then addEqns env [(x,e')] -- no risk of infinite inlining
else env
liftM ((x,e'):) (redEqns env' eqns)
redExp env (ERec c eqs) = do es' <- mapM (redExp env) es
return (ERec c (ls `zip` es'))
where (ls,es) = unzip eqs
redExp env (ETempl x t te c) = liftM (ETempl x t te) (redCmd env c)
redExp env (EAct e e') = liftM2 EAct (redExp env e) (redExp env e')
redExp env (EReq e e') = liftM2 EReq (redExp env e) (redExp env e')
redExp env (EDo x t c) = liftM (EDo x t) (redCmd env c)
redExp env (ELam te e) = do e <- redExp (addArgs env (dom te)) e
redEta env te e
redExp env (ESel e s) = do e <- redExp env e
redSel env e s
redExp env (ECase e alts) = do e <- redExp env e
redCase env e alts
redExp env (ELet bs e) = do bs'@(Binds rec te eqs) <- redBinds env bs
if rec then
liftM (ELet bs') (redExp env e)
else
redBeta (addArgs env (dom te)) te e (map (lookup' eqs) (dom te))
redExp env e@(EVar (Prim {})) = return e
redExp env e@(EVar (Tuple {})) = return e
redExp env e@(EVar x) = case lookup x (eqns env) of
Just e' | inline e' -> alphaConvert e'
_ -> return e
where inline (EVar _) = True
inline (ECon _) = True
inline (ELit _) = True
inline (ELam _ _) = True
inline _ = isGenerated x
redExp env (EAp e es) = do e' <- redExp env e
es' <- mapM (redExp env) es
redApp env e' es'
redExp env (ELit l) = return (ELit (normLit l))
redExp env e = return e
normLit (LInt p i)
| i >= 0x80000000 = normLit (LInt p (i - 0x100000000))
| i < -0x80000000 = normLit (LInt p (i + 0x100000000))
| otherwise = LInt p i
normLit l = l
isRaise (EAp (EVar (Prim Raise _)) [_])
= True
isRaise _ = False
isPMC (EVar (Prim p _)) = p `elem` [Match,Commit,Fatbar,Fail]
isPMC _ = False
-- reduce an application e es (head and args already individually reduced)
redApp env e es
| exception = return (head es')
where es' = filter isRaise (e:es)
exception = not (isPMC e) && not (null es')
redApp env (EVar (Prim p a)) es
= return (redPrim env p a es)
redApp env e@(EVar x) es = case lookup x (eqns env) of
Just e' | inline e' -> do e' <- alphaConvert e'; redApp env e' es
_ -> return (EAp e es)
where inline (ELam _ _) = True
inline (EVar _) = True
inline _ = False
redApp env (ELam te e) es = do redBeta env te e es
redApp env (ECase e alts) es
| length alts' == length alts = liftM (ECase e) (redAlts env alts')
where alts' = [ a | Just a <- map (appAlt env es) alts ]
redApp env (ELet bs e) es = liftM (ELet bs) (redApp env e es)
redApp env e es = return (EAp e es)
appAlt env es (PCon c,e) = case skipLambda (conArity env c) e es of
Just e' -> Just (PCon c, e')
_ -> Nothing
appAlt env es a = Just a
skipLambda 0 e es = Just (EAp e es)
skipLambda n (ELam te e) es
| n <= length te = Just (ELam te1 (eLam te2 (EAp e es)))
where (te1,te2) = splitAt n te
skipLambda n e es = Nothing
-- perform beta reduction (if possible)
redBeta env ((x,t):te) (EVar y) (e:es)
| x == y = redBeta env te e es -- trivial body
redBeta env ((x,t):te) b (e:es)
| inline x e = do e' <- redBeta (addEqns env [(x,e)]) te b es
return (bindx e')
| otherwise = liftM (ELet bs) (redBeta env te b es)
where inline x e = isSafe x || isEVar e || (value e && finite env e && isSmall e)
isSafe x = isEtaExp x || isAssumption x || isCoercion x
bindx e'
| x `elem` evars e' = ELet bs e'
| otherwise = e'
bs = Binds False [(x,t)] [(x,e)]
isEVar (EVar _) = True
isEVar _ = False
redBeta env [] b [] = redExp env b
redEta env te (EAp e es) = do es <- mapM (redExp env') es
e <- redExp env' e
if okEta e && es == map EVar xs then
return e
else do
liftM (ELam te) (redApp env' e es)
where okEta (ECon _) = False
okEta (EVar (Prim _ _)) = False
okEta e = null (evars e `intersect` xs)
env' = addArgs env (dom te)
xs = dom te
redEta env te e = liftM (ELam te) (redExp (addArgs env (dom te)) e)
redSel env e s
| isRaise e = return e
redSel env e@(EVar x) s = case lookup x (eqns env) of
Just e' | inline e' -> do e' <- alphaConvert e'
redSel env e' s
_ -> return (ESel e s)
where inline (ERec _ _) = True
inline (EVar _) = True
inline _ = False
redSel env (ERec c eqs) s
| all value (rng eqs) = case lookup s eqs of
Just e -> return e
Nothing -> internalError0 ("redSel: did not find selector " ++ show s ++ " in " ++ show eqs) s
redSel env e s = return (ESel e s)
redCase env e alts
| isRaise e = return e
redCase env e@(EVar x) alts = case lookup x (eqns env) of
Just e' | inline (eFlat e') -> do e' <- alphaConvert e'; redCase env e' alts
Nothing -> liftM (ECase e) (redAlts env alts)
where inline (ECon _,_) = True
inline (ELit _,_) = True
inline (EVar _, []) = True
inline _ = False
redCase env (ELit l@(LStr _ _)) alts
= redCaseStrLit env l alts
redCase env (ELit l) alts = findLit env l alts
redCase env e alts = case eFlat e of
(ECon k, es) -> findCon env k es alts
_ -> liftM (ECase e) (redAlts env alts)
redAlts env alts
| complete (cons env) cs = do es <- mapM (redRhs env) es
return (map PCon cs `zip` es)
| otherwise = do es0 <- mapM (redRhs env) es0
return (ps `zip` es0)
where (cs,es) = unzip [ (c,e) | (PCon c, e) <- alts ]
(ps,es0) = unzip alts
redRhs env (ELam te e) = do e <- redRhs (addArgs env (dom te)) e
return (ELam te e)
redRhs env e = redExp env e
findCon env k es ((PWild,e):_) = redExp env e
findCon env k es ((PCon k',e):_)
| k == k' = redExp env (eAp e es)
findCon env k es (_:alts) = findCon env k es alts
findLit env l ((PWild,e):_) = redExp env e
findLit env l ((PLit l',e):_)
| l == l' = redExp env e
findLit env l (_:alts) = findLit env l alts
redCaseStrLit env l ((PWild,e):_) = redExp env e
redCaseStrLit env l ((PLit l',e):_)
| l == l' = redExp env e
redCaseStrLit env (LStr _ "") ((PCon (Prim NIL _),e):alts) = redExp env e
redCaseStrLit env l@(LStr _ str) alts@((PCon (Prim CONS _),e):_)
= redCase env (foldr (\x y -> EAp cons [chr x,y]) nil str) alts
where chr x = ELit (LChr Nothing x)
cons = ECon (prim CONS)
nil = ECon (prim NIL)
redCaseStrLit env l (_:alts) = redCaseStrLit env l alts
redPrim env Refl _ [e] = e
redPrim env Match a [e] = redMatch env a e
redPrim env Fatbar a [e,e'] = redFat a e e'
redPrim env UniArray a es = EAp (EVar (Prim UniArray a)) es
redPrim env IntNeg _ [ELit (LInt _ x)] = ELit (lInt (-x))
redPrim env IntToFloat _ [ELit (LInt _ x)] = ELit (lRat (fromInteger x))
redPrim env IntToChar _ [ELit (LInt _ x)] = ELit (lChr (chr (fromInteger x)))
redPrim env FloatNeg _ [ELit (LRat _ x)] = ELit (lRat (-x))
redPrim env FloatToInt _ [ELit (LRat _ x)] = ELit (lInt (truncate x))
redPrim env CharToInt _ [ELit (LChr _ x)] = ELit (lInt (ord x))
redPrim env p a [ELit (LInt _ x), ELit (LInt _ y)]
| p `notElem` [IntDiv,IntMod] || y /= 0 = redInt p x y
redPrim env p a [ELit (LRat _ x), ELit (LRat _ y)]
| p /= FloatDiv || y /= 0 = redRat p x y
redPrim env p a es = eAp (EVar (Prim p a)) es
redMatch env a (ELet bs e) = ELet bs (redMatch (addArgs env (bvars bs)) a e)
redMatch env a (ELam te e) = ELam te (redMatch (addArgs env (dom te)) a e)
redMatch env a (EAp (EVar (Prim Commit _)) [e]) = e
redMatch env a (ECase e alts) = ECase e (mapSnd (redMatch env a) alts)
redMatch env _ (EVar (Prim Fail a)) = EAp (EVar (Prim Raise a)) [ELit (lInt 1)]
redMatch env _ e@(ELit _) = e
redMatch env a e = EAp (EVar (Prim Match a)) [e]
redFat a (ELet bs e) e' = ELet bs (redFat a e e')
redFat a (EVar (Prim Fail _)) e = e
redFat a e@(EAp (EVar (Prim Commit _)) _) _ = e
redFat a e e' = EAp (EVar (Prim Fatbar a)) [e,e']
redInt IntPlus a b = ELit (normLit (lInt (a + b)))
redInt IntMinus a b = ELit (normLit (lInt (a - b)))
redInt IntTimes a b = ELit (normLit (lInt (a * b)))
redInt IntDiv a b = ELit (lInt (a `div` b))
redInt IntMod a b = ELit (lInt (a `mod` b))
redInt IntEQ a b = eBool (a == b)
redInt IntNE a b = eBool (a /= b)
redInt IntLT a b = eBool (a < b)
redInt IntLE a b = eBool (a <= b)
redInt IntGE a b = eBool (a >= b)
redInt IntGT a b = eBool (a > b)
redInt p _ _ = internalError0 ("redInt: " ++ show p)
redRat FloatPlus a b = ELit (lRat (a + b))
redRat FloatMinus a b = ELit (lRat (a - b))
redRat FloatTimes a b = ELit (lRat (a * b))
redRat FloatDiv a b = ELit (lRat (a / b))
redRat FloatEQ a b = eBool (a == b)
redRat FloatNE a b = eBool (a /= b)
redRat FloatLT a b = eBool (a < b)
redRat FloatLE a b = eBool (a <= b)
redRat FloatGE a b = eBool (a >= b)
redRat FloatGT a b = eBool (a > b)
redRat p _ _ = internalError0 ("redRat: " ++ show p)
eBool True = ECon (prim TRUE)
eBool False = ECon (prim FALSE)
redCmd env (CRet e) = liftM CRet (redExp env e)
redCmd env (CExp e) = liftM CExp (redExp env e)
redCmd env (CGen p t (ELet bs e) c)
= redCmd env (CLet bs (CGen p t e c))
redCmd env (CGen p t e c) = liftM2 (CGen p t) (redExp env e) (redCmd env c)
redCmd env (CLet bs c) = do bs'@(Binds rec te eqs) <- redBinds env bs
if rec then
liftM (CLet bs') (redCmd env c)
else
redBetaC (addArgs env (dom te)) te c (map (lookup' eqs) (dom te))
redCmd env (CAss x e c) = liftM2 (CAss x) (redExp env e) (redCmd env c)
-- perform beta reduction (if possible)
redBetaC env ((x,t):te) (CRet (EVar y)) (e:es)
| x == y = redBetaC env te (CRet e) es
redBetaC env ((x,t):te) c (e:es)
| inline x e = do c' <- redBetaC (addEqns env [(x,e)]) te c es
return (bindx c')
| otherwise = liftM (CLet bs) (redBetaC env te c es)
where inline x e = isSafe x || isSafeEVar e || (value e && finite env e && isSmall e)
isSafe x = isEtaExp x || isAssumption x || isCoercion x
bindx c'
| x `elem` evars c' = CLet bs c'
| otherwise = c'
bs = Binds False [(x,t)] [(x,e)]
isSafeEVar (EVar n) = not $ isState n
isSafeEVar _ = False
redBetaC env [] c [] = redCmd env c
-- Constructor presence
isSmall e = length (constrs e) < 5
class Constrs a where
constrs :: a -> [Name]
instance Constrs Binds where
constrs (Binds rec te eqns) = constrs (map snd eqns)
instance Constrs a => Constrs [a] where
constrs xs = concatMap constrs xs
instance Constrs Exp where
constrs (ECon c) = [c]
constrs (ESel e l) = constrs e
constrs (ELam te e) = constrs e
constrs (EAp e e') = constrs e ++ constrs e'
constrs (ELet bs e) = constrs bs ++ constrs e
constrs (ECase e alts) = constrs e ++ constrs alts
constrs (ERec c eqs) = constrs (map snd eqs)
constrs (EAct e e') = constrs e ++ constrs e'
constrs (EReq e e') = constrs e ++ constrs e'
constrs (ETempl x t te c) = constrs c
constrs (EDo x t c) = constrs c
constrs _ = []
instance Constrs Alt where
constrs (p,e) = constrs e
instance Constrs Cmd where
constrs (CLet bs c) = constrs bs ++ constrs c
constrs (CGen x t e c) = constrs e ++ constrs c
constrs (CAss x e c) = constrs e ++ constrs c
constrs (CRet e) = constrs e
constrs (CExp e) = constrs e
| UBMLtonGroup/timberc | src/Termred.hs | bsd-3-clause | 24,010 | 0 | 16 | 10,499 | 9,380 | 4,647 | 4,733 | 383 | 8 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
-- |
-- Module : Text.Syntax.Parser.List.LazyMaybe
-- Copyright : 2012 Kei Hibino
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unknown
--
-- This module includes a lazy parser implementation for "Text.Syntax.Poly". Result does not have error informations.
module Text.Syntax.Parser.List.LazyMaybe (
-- * Syntax instance Parser type
Parser, runParser,
-- * Poly- morphic wrapper of runParser
runAsParser
) where
import Control.Monad (MonadPlus(mzero, mplus))
import Text.Syntax.Parser.Instances ()
import Text.Syntax.Poly.Class
(TryAlternative, Syntax (token))
import Text.Syntax.Parser.List.Type (RunAsParser, ErrorString, errorString)
-- | Naive 'Parser' type. Parse @[tok]@ into @alpha@.
newtype Parser tok alpha =
Parser {
-- | Function to run parser
runParser :: [tok] -> Maybe (alpha, [tok])
}
instance Monad (Parser tok) where
return a = Parser $ \s -> Just (a, s)
Parser p >>= fb = Parser (\s -> do (a, s') <- p s
runParser (fb a) s')
fail = const mzero
instance MonadPlus (Parser tok) where
mzero = Parser $ const Nothing
Parser p1 `mplus` p2' =
Parser (\s -> p1 s `mplus` runParser p2' s)
instance TryAlternative (Parser tok)
instance Eq tok => Syntax tok (Parser tok) where
token = Parser (\s -> case s of
t:ts -> Just (t, ts)
[] -> Nothing)
-- | Run 'Syntax' as @'Parser' tok@.
runAsParser :: Eq tok => RunAsParser tok a ErrorString
runAsParser parser s = case runParser parser s of
Just (a, []) -> Right a
Just (_, (_:_)) -> Left . errorString $ "Not the end of token stream."
Nothing -> Left . errorString $ "parse error"
| khibino/haskell-invertible-syntax-poly | src/Text/Syntax/Parser/List/LazyMaybe.hs | bsd-3-clause | 1,833 | 0 | 13 | 431 | 490 | 275 | 215 | 32 | 3 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Database.TokyoCabinet
(
-- $doc
TCM
, runTCM
, OpenMode(..)
, TCDB(..)
, H.HDB
, F.FDB
, T.TDB
, BDB
-- * Error Code
, E.ECODE(..)
, E.errmsg
) where
import Control.Monad.Trans (MonadIO)
import Database.TokyoCabinet.Storable
import Database.TokyoCabinet.Sequence
import Database.TokyoCabinet.FDB.Key (ID, toID)
import qualified Database.TokyoCabinet.HDB as H
import qualified Database.TokyoCabinet.FDB as F
import qualified Database.TokyoCabinet.TDB as T
import qualified Database.TokyoCabinet.BDB as B
import qualified Database.TokyoCabinet.BDB.Cursor as C
import qualified Database.TokyoCabinet.Error as E
import Data.Int
import Data.Word
-- $doc
-- Basic Usage (sample code)
--
-- @
-- import Database.TokyoCabinet
-- import Data.ByteString.Char8
-- @
--
-- @
-- putsample :: String -> [(ByteString, ByteString)] -> TCM Bool
-- putsample file kv =
-- do tc <- new :: TCM HDB -- alternatively you can use BDB or FDB
-- open tc file [OWRITER, OCREAT]
-- mapM (uncurry $ put tc) kv
-- close tc
-- @
--
-- @
-- getsample :: String -> ByteString -> TCM (Maybe ByteString)
-- getsample file key =
-- do tc <- new :: TCM HDB -- alternatively you can use BDB or FDB
-- open tc file [OREADER]
-- val <- get tc key
-- close tc
-- return val
-- @
--
-- @
-- main = runTCM (do putsample \"foo.tch\" [(pack \"foo\", pack \"bar\")]
-- getsample \"foo.tch\" (pack \"foo\")) >>=
-- maybe (return ()) (putStrLn . show)
-- @
--
-- | Tokyo Cabinet related computation. Wrap of IO.
newtype TCM a =
TCM { -- | Unwrap TCM.
runTCM :: IO a
} deriving (Monad, MonadIO)
-- | Represent open mode for `open' function.
data OpenMode = OREADER |
OWRITER |
OCREAT |
OTRUNC |
ONOLCK |
OLCKNB
deriving (Eq, Ord, Show)
-- | Type class that abstract Tokyo Cabinet database.
class TCDB a where
-- | Create a database object.
new :: TCM a
-- | Free object resource forcibly.
delete :: a -> TCM ()
-- | Open a database file.
open :: a -- ^ database object
-> String -- ^ path to database file
-> [OpenMode] -- ^ open mode
-> TCM Bool -- ^ if successful, the return value is True
-- | Close the database file. If successful, the return value is True
close :: a -> TCM Bool
-- | Store a record.
put :: (Storable k, Storable v) =>
a -- ^ database object
-> k -- ^ key
-> v -- ^ value
-> TCM Bool -- ^ if successful, the return value is True
-- | Store a new recoed. If a record with the same key exists
-- in the database, this function has no effect.
putkeep :: (Storable k, Storable v) =>
a -- ^ database object
-> k -- ^ key
-> v -- ^ value
-> TCM Bool -- ^ if successful, the return value is True
-- | Concatenate a value at the end of the existing record.
putcat :: (Storable k, Storable v) =>
a -- ^ database object
-> k -- ^ key
-> v -- ^ value
-> TCM Bool -- ^ if successful, the return value is True
-- | Retrieve a record.
get :: (Storable k, Storable v) =>
a -- ^ database object
-> k -- ^ key
-> TCM (Maybe v) -- ^ If successful, the return value is the
-- value of the corresponding record wrapped
-- by `Just', else, Nothing is returned.
-- | Remove a record.
out :: (Storable k) =>
a -- ^ database object
-> k -- ^ key
-> TCM Bool -- ^ if successful, the return value is True
-- | Get the size of the value of a record.
vsiz :: (Storable k) =>
a -- ^ database object
-> k -- ^ key
-> TCM (Maybe Int) -- ^ If successful, the return value
-- is the size of the value of the
-- corresponding record wrapped by
-- `Just', else, it is Nothing.
-- | Initialize the iterator. If successful, the return value is True.
iterinit :: a -> TCM Bool
-- | Get the next key of the iterator. If successful, the return
-- value is the next key wrapped by `Just', else, it is Nothing.
iternext :: (Storable v) => a -> TCM (Maybe v)
-- | Get forward matching keys.
fwmkeys :: (Storable k, Storable v, Sequence q) =>
a -- ^ database object
-> k -- ^ search string
-> Int -- ^ the maximum number of keys to be fetched
-> TCM (q v) -- ^ result keys
-- | Add an integer to a record.
addint :: (Storable k) =>
a -- ^ database object
-> k -- ^ key
-> Int -- ^ the addtional value
-> TCM (Maybe Int) -- ^ If the corresponding record
-- exists, the value is treated as an
-- integer and is added to. If no
-- record corresponds, a new record
-- of the additional value is stored.
-- | Add a real number to a record.
adddouble :: (Storable k) =>
a -- ^ database object
-> k -- ^ key
-> Double -- ^ the additional value
-> TCM (Maybe Double) -- ^ If the corresponding record
-- exists, the value is treated as
-- a real number and is added
-- to. If no record corresponds, a
-- new record of the additional
-- value is stored.
-- | Synchronize updated contents with the file and the device.
-- If successful, the return value is True.
sync :: a -> TCM Bool
-- | Remove all records. If successful, the return value is True.
vanish :: a -> TCM Bool
-- | Copy the database file.
copy :: a -- ^ database object
-> String -- ^ path of the destination file
-> TCM Bool -- ^ If successful, the return value is True.
-- | Get the path of the database file.
path :: a -> TCM (Maybe String)
-- | Get the number of records.
rnum :: a -> TCM Word64
-- | Get the size of the database file.
size :: a -> TCM Word64
-- | Get the last happened error code.
ecode :: a -> TCM E.ECODE
-- | Get the default extension for specified database object.
defaultExtension :: a -> String
openModeToHOpenMode :: OpenMode -> H.OpenMode
openModeToHOpenMode OREADER = H.OREADER
openModeToHOpenMode OWRITER = H.OWRITER
openModeToHOpenMode OCREAT = H.OCREAT
openModeToHOpenMode OTRUNC = H.OTRUNC
openModeToHOpenMode ONOLCK = H.ONOLCK
openModeToHOpenMode OLCKNB = H.OLCKNB
lift :: (a -> IO b) -> a -> TCM b
lift = (TCM .)
lift2 :: (a -> b -> IO c) -> a -> b -> TCM c
lift2 f x y = TCM $ f x y
lift3 :: (a -> b -> c -> IO d) -> a -> b -> c -> TCM d
lift3 f x y z = TCM $ f x y z
instance TCDB H.HDB where
new = TCM H.new
delete = lift H.delete
open tc name mode = TCM $ H.open tc name (map openModeToHOpenMode mode)
close = lift H.close
put = lift3 H.put
putkeep = lift3 H.putkeep
putcat = lift3 H.putcat
get = lift2 H.get
out = lift2 H.out
vsiz = lift2 H.vsiz
iterinit = lift H.iterinit
iternext = lift H.iternext
fwmkeys = lift3 H.fwmkeys
addint = lift3 H.addint
adddouble = lift3 H.adddouble
sync = lift H.sync
vanish = lift H.vanish
copy = lift2 H.copy
path = lift H.path
rnum = lift H.rnum
size = lift H.fsiz
ecode = lift H.ecode
defaultExtension = const ".tch"
openModeToBOpenMode :: OpenMode -> B.OpenMode
openModeToBOpenMode OREADER = B.OREADER
openModeToBOpenMode OWRITER = B.OWRITER
openModeToBOpenMode OCREAT = B.OCREAT
openModeToBOpenMode OTRUNC = B.OTRUNC
openModeToBOpenMode ONOLCK = B.ONOLCK
openModeToBOpenMode OLCKNB = B.OLCKNB
data BDB = BDB { unTCBDB :: B.BDB
, unTCBDBCUR :: C.BDBCUR }
liftB :: (B.BDB -> IO a) -> BDB -> TCM a
liftB f x = TCM $ f (unTCBDB x)
liftB2 :: (B.BDB -> a -> IO b) -> BDB -> a -> TCM b
liftB2 f x y = TCM $ f (unTCBDB x) y
liftB3 :: (B.BDB -> a -> b -> IO c) -> BDB -> a -> b -> TCM c
liftB3 f x y z = TCM $ f (unTCBDB x) y z
instance TCDB BDB where
new = TCM $ do bdb <- B.new
cur <- C.new bdb
return $ BDB bdb cur
delete = liftB B.delete
open tc name mode = TCM $ B.open (unTCBDB tc) name
(map openModeToBOpenMode mode)
close = liftB B.close
put = liftB3 B.put
putkeep = liftB3 B.putkeep
putcat = liftB3 B.putcat
get = liftB2 B.get
out = liftB2 B.out
vsiz = liftB2 B.vsiz
iterinit bdb = TCM $ C.first (unTCBDBCUR bdb)
iternext bdb = TCM $ do k <- C.key (unTCBDBCUR bdb)
C.next (unTCBDBCUR bdb)
return k
fwmkeys = liftB3 B.fwmkeys
addint = liftB3 B.addint
adddouble = liftB3 B.adddouble
sync = liftB B.sync
vanish = liftB B.vanish
copy = liftB2 B.copy
path = liftB B.path
rnum = liftB B.rnum
size = liftB B.fsiz
ecode = liftB B.ecode
defaultExtension = const ".tcb"
instance TCDB B.BDB where
new = TCM B.new
delete = lift B.delete
open tc name mode = TCM $ B.open tc name (map openModeToBOpenMode mode)
close = lift B.close
put = lift3 B.put
putkeep = lift3 B.putkeep
putcat = lift3 B.putcat
get = lift2 B.get
out = lift2 B.out
vsiz = lift2 B.vsiz
iterinit = undefined
iternext = undefined
fwmkeys = lift3 B.fwmkeys
addint = lift3 B.addint
adddouble = lift3 B.adddouble
sync = lift B.sync
vanish = lift B.vanish
copy = lift2 B.copy
path = lift B.path
rnum = lift B.rnum
size = lift B.fsiz
ecode = lift B.ecode
defaultExtension = const ".tcb"
openModeToFOpenMode :: OpenMode -> F.OpenMode
openModeToFOpenMode OREADER = F.OREADER
openModeToFOpenMode OWRITER = F.OWRITER
openModeToFOpenMode OCREAT = F.OCREAT
openModeToFOpenMode OTRUNC = F.OTRUNC
openModeToFOpenMode ONOLCK = F.ONOLCK
openModeToFOpenMode OLCKNB = F.OLCKNB
storableToKey :: (Storable a) => a -> ID
storableToKey = toID . toInt64
liftF2 :: (Storable b) => (a -> ID -> IO c) -> a -> b -> TCM c
liftF2 f x y = TCM $ f x (storableToKey y)
liftF3 :: (Storable b) => (a -> ID -> c -> IO d) -> a -> b -> c -> TCM d
liftF3 f x y z = TCM $ f x (storableToKey y) z
keyToStorable :: (Storable a) => String -> a
keyToStorable = fromString
instance TCDB F.FDB where
new = TCM F.new
delete = lift F.delete
open tc name mode = TCM $ F.open tc name (map openModeToFOpenMode mode)
close = lift F.close
put = liftF3 F.put
putkeep = liftF3 F.putkeep
putcat = liftF3 F.putcat
get = liftF2 F.get
out = liftF2 F.out
vsiz = liftF2 F.vsiz
iterinit = lift F.iterinit
iternext tc = TCM $ do key <- F.iternext tc
case key of
Nothing -> return Nothing
Just x -> return $ Just (keyToStorable x)
fwmkeys = lift3 F.fwmkeys
addint = liftF3 F.addint
adddouble = liftF3 F.adddouble
sync = lift F.sync
vanish = lift F.vanish
copy = lift2 F.copy
path = lift F.path
rnum = lift F.rnum
size = lift F.fsiz
ecode = lift F.ecode
defaultExtension = const ".tcf"
openModeToTOpenMode :: OpenMode -> T.OpenMode
openModeToTOpenMode OREADER = T.OREADER
openModeToTOpenMode OWRITER = T.OWRITER
openModeToTOpenMode OCREAT = T.OCREAT
openModeToTOpenMode OTRUNC = T.OTRUNC
openModeToTOpenMode ONOLCK = T.ONOLCK
openModeToTOpenMode OLCKNB = T.OLCKNB
instance TCDB T.TDB where
new = TCM T.new
delete = lift T.delete
open tc name mode = TCM $ T.open tc name (map openModeToTOpenMode mode)
close = lift T.close
put = lift3 T.put'
putkeep = lift3 T.putkeep'
putcat = lift3 T.putcat'
get = lift2 T.get'
out = lift2 T.out
vsiz = lift2 T.vsiz
iterinit = lift T.iterinit
iternext = lift T.iternext
fwmkeys = lift3 T.fwmkeys
addint = lift3 T.addint
adddouble = lift3 T.adddouble
sync = lift T.sync
vanish = lift T.vanish
copy = lift2 T.copy
path = lift T.path
rnum = lift T.rnum
size = lift T.fsiz
ecode = lift T.ecode
defaultExtension = const ".tct"
| tom-lpsd/tokyocabinet-haskell | Database/TokyoCabinet.hs | bsd-3-clause | 14,341 | 0 | 15 | 5,769 | 3,166 | 1,683 | 1,483 | 276 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Tic_Tac_Toe where
import Control.Concurrent -- wiki $
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Text (Text)
-- import Debug.Trace
import Graphics.Blank
import Wiki -- (512,384)
main :: IO ()
main = blankCanvas 3000 { events = ["mousedown"] } $ \ context -> loop context Map.empty X
data XO = X | O deriving (Eq,Ord,Show)
swap :: XO -> XO
swap X = O
swap O = X
loop :: DeviceContext -> Map (Int, Int) XO -> XO -> IO ()
loop context board turn = do
sz <- send context $ do
clearRect (0,0,width context,height context)
beginPath()
let sz = min (width context) (height context)
save()
translate (width context / 2,height context / 2)
sequence_ [ do bigLine (-sz * 0.45,n) (sz * 0.45,n)
bigLine (n,-sz * 0.45) (n,sz * 0.45)
| n <- [-sz * 0.15,sz * 0.15]
]
sequence_ [ do save()
translate (fromIntegral x * sz * 0.3,fromIntegral y * sz * 0.3)
case Map.lookup (x,y) board of
Just X -> drawX (sz * 0.1)
Just O -> drawO (sz * 0.1)
Nothing -> return ()
restore()
| x <- [-1,0,1]
, y <- [-1,0,1]
]
restore()
return sz
let pointToSq :: (Double, Double) -> Maybe (Int, Int)
pointToSq (x,y) = do
x' <- fd ((x - width context / 2) / sz)
y' <- fd ((y - height context / 2) / sz)
return (x',y')
fd x =
-- trace (show ("fx",x,r)) $
if r `elem` [-1..1] then Just (signum r) else Nothing
where r = round (x * 3.3333)
let press = (width context / 2 + fromIntegral x * (sz / 4),height context / 2 + fromIntegral y * (sz / 4)) -- wiki $
where (x,y) = head [ ix | (ix,Nothing) -- wiki $
<- [ ((x',y'),Map.lookup (x',y') board) -- wiki $
| y' <- [-1,0,1] -- wiki $
, x' <- [-1,0,1] -- wiki $
]] -- wiki $
_ <- wiki $ forkIO $ send context $ trigger $ Event {
eMetaKey = False
, ePageXY = return $ press
, eType = "keypress"
, eWhich = Nothing
}
event <- wait context
file <- wiki $ anim_png "Tic_Tac_Toe"
wiki $ snapShot context $ file
wiki $ whenM (Map.size board == 7) $ do { build_anim "Tic_Tac_Toe" 100; close context ; quit }
print event
case ePageXY event of
-- if no mouse location, ignore, and redraw
Nothing -> loop context board turn
Just (x',y') -> case pointToSq (x',y') of
Nothing -> loop context board turn
Just pos -> case Map.lookup pos board of
Nothing -> loop context
(Map.insert pos turn board)
(swap turn)
-- already something here
Just _ -> loop context board turn
xColor, oColor, boardColor :: Text
xColor = "#ff0000"
oColor = "#00a000"
boardColor = "#000080"
drawX :: Double -> Canvas ()
drawX size = do
strokeStyle xColor
lineCap "butt"
beginPath()
moveTo(-size,-size)
lineTo(size,size)
lineWidth 10
stroke()
beginPath()
moveTo(-size,size)
lineTo(size,-size)
lineWidth 10
stroke()
drawO :: Double -> Canvas ()
drawO radius = do
beginPath()
arc(0, 0, radius, 0, 2 * pi, False)
lineWidth 10
strokeStyle oColor
stroke()
bigLine :: (Double, Double) -> (Double, Double) -> Canvas ()
bigLine (x,y) (x',y') = do
beginPath()
moveTo(x,y)
lineTo(x',y')
lineWidth 20
strokeStyle boardColor
lineCap "round"
stroke()
| ku-fpg/blank-canvas | wiki-suite/Tic_Tac_Toe.hs | bsd-3-clause | 4,701 | 16 | 20 | 2,272 | 1,455 | 751 | 704 | 102 | 7 |
{-# OPTIONS -XCPP #-}
module Options (options) where
-- #define ALONE -- to execute it alone, uncomment this
#ifdef ALONE
import MFlow.Wai.Blaze.Html.All
main= runNavigation "" $ transientNav options
#else
import MFlow.Wai.Blaze.Html.All hiding(page)
import Menu
#endif
options= do
r <- page $ getSelect (setSelectedOption "" (p << "select a option") <|>
setOption "red" (b << "red") <|>
setOption "blue" (b << "blue") <|>
setOption "Green" (b << "Green") )
<! dosummit
page $ p << (r ++ " selected") ++> wlink () (p << " menu")
where
dosummit= [("onchange","this.form.submit()")]
-- to run it alone, change page by ask and uncomment this:
--main= runNavigation "" $ transientNav options
| agocorona/MFlow | Demos/Options.hs | bsd-3-clause | 850 | 0 | 17 | 268 | 177 | 97 | 80 | 12 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE StandaloneDeriving #-}
-- | Construct a @Plan@ for how to build
module Stack.Build.ConstructPlan
( constructPlan
) where
import Stack.Prelude
import Control.Monad.RWS.Strict
import Control.Monad.State.Strict (execState)
import qualified Data.HashSet as HashSet
import Data.List
import Data.List.Extra (nubOrd)
import qualified Data.Map.Strict as M
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import qualified Distribution.Text as Cabal
import qualified Distribution.Version as Cabal
import Generics.Deriving.Monoid (memptydefault, mappenddefault)
import Lens.Micro (lens)
import Stack.Build.Cache
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.BuildPlan
import Stack.Config (getLocalPackages)
import Stack.Constants
import Stack.Package
import Stack.PackageDump
import Stack.PackageIndex
import Stack.PrettyPrint
import Stack.Types.Build
import Stack.Types.BuildPlan
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.FlagName
import Stack.Types.GhcPkgId
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Runner
import Stack.Types.Version
import System.IO (putStrLn)
import System.Process.Read (findExecutable)
data PackageInfo
=
-- | This indicates that the package is already installed, and
-- that we shouldn't build it from source. This is always the case
-- for snapshot packages.
PIOnlyInstalled InstallLocation Installed
-- | This indicates that the package isn't installed, and we know
-- where to find its source (either a hackage package or a local
-- directory).
| PIOnlySource PackageSource
-- | This indicates that the package is installed and we know
-- where to find its source. We may want to reinstall from source.
| PIBoth PackageSource Installed
deriving (Show)
combineSourceInstalled :: PackageSource
-> (InstallLocation, Installed)
-> PackageInfo
combineSourceInstalled ps (location, installed) =
assert (piiVersion ps == installedVersion installed) $
assert (piiLocation ps == location) $
case location of
-- Always trust something in the snapshot
Snap -> PIOnlyInstalled location installed
Local -> PIBoth ps installed
type CombinedMap = Map PackageName PackageInfo
combineMap :: SourceMap -> InstalledMap -> CombinedMap
combineMap = Map.mergeWithKey
(\_ s i -> Just $ combineSourceInstalled s i)
(fmap PIOnlySource)
(fmap (uncurry PIOnlyInstalled))
data AddDepRes
= ADRToInstall Task
| ADRFound InstallLocation Installed
deriving Show
type ParentMap = MonoidMap PackageName (First Version, [(PackageIdentifier, VersionRange)])
data W = W
{ wFinals :: !(Map PackageName (Either ConstructPlanException Task))
, wInstall :: !(Map Text InstallLocation)
-- ^ executable to be installed, and location where the binary is placed
, wDirty :: !(Map PackageName Text)
-- ^ why a local package is considered dirty
, wDeps :: !(Set PackageName)
-- ^ Packages which count as dependencies
, wWarnings :: !([Text] -> [Text])
-- ^ Warnings
, wParents :: !ParentMap
-- ^ Which packages a given package depends on, along with the package's version
} deriving Generic
instance Monoid W where
mempty = memptydefault
mappend = mappenddefault
type M = RWST -- TODO replace with more efficient WS stack on top of StackT
Ctx
W
(Map PackageName (Either ConstructPlanException AddDepRes))
IO
data Ctx = Ctx
{ ls :: !LoadedSnapshot
, baseConfigOpts :: !BaseConfigOpts
, loadPackage :: !(PackageLocationIndex FilePath -> Map FlagName Bool -> [Text] -> IO Package)
, combinedMap :: !CombinedMap
, toolToPackages :: !(ExeName -> Map PackageName VersionRange)
, ctxEnvConfig :: !EnvConfig
, callStack :: ![PackageName]
, extraToBuild :: !(Set PackageName)
, getVersions :: !(PackageName -> IO (Set Version))
, wanted :: !(Set PackageName)
, localNames :: !(Set PackageName)
}
instance HasPlatform Ctx
instance HasGHCVariant Ctx
instance HasLogFunc Ctx where
logFuncL = configL.logFuncL
instance HasRunner Ctx where
runnerL = configL.runnerL
instance HasConfig Ctx
instance HasBuildConfig Ctx
instance HasEnvConfig Ctx where
envConfigL = lens ctxEnvConfig (\x y -> x { ctxEnvConfig = y })
-- | Computes a build plan. This means figuring out which build 'Task's
-- to take, and the interdependencies among the build 'Task's. In
-- particular:
--
-- 1) It determines which packages need to be built, based on the
-- transitive deps of the current targets. For local packages, this is
-- indicated by the 'lpWanted' boolean. For extra packages to build,
-- this comes from the @extraToBuild0@ argument of type @Set
-- PackageName@. These are usually packages that have been specified on
-- the commandline.
--
-- 2) It will only rebuild an upstream package if it isn't present in
-- the 'InstalledMap', or if some of its dependencies have changed.
--
-- 3) It will only rebuild a local package if its files are dirty or
-- some of its dependencies have changed.
constructPlan :: forall env. HasEnvConfig env
=> LoadedSnapshot
-> BaseConfigOpts
-> [LocalPackage]
-> Set PackageName -- ^ additional packages that must be built
-> [DumpPackage () () ()] -- ^ locally registered
-> (PackageLocationIndex FilePath -> Map FlagName Bool -> [Text] -> IO Package) -- ^ load upstream package
-> SourceMap
-> InstalledMap
-> Bool
-> RIO env Plan
constructPlan ls0 baseConfigOpts0 locals extraToBuild0 localDumpPkgs loadPackage0 sourceMap installedMap initialBuildSteps = do
logDebug "Constructing the build plan"
u <- askUnliftIO
econfig <- view envConfigL
let onWanted = void . addDep False . packageName . lpPackage
let inner = do
mapM_ onWanted $ filter lpWanted locals
mapM_ (addDep False) $ Set.toList extraToBuild0
lp <- getLocalPackages
let ctx = mkCtx econfig (unliftIO u . getPackageVersions) lp
((), m, W efinals installExes dirtyReason deps warnings parents) <-
liftIO $ runRWST inner ctx M.empty
mapM_ logWarn (warnings [])
let toEither (_, Left e) = Left e
toEither (k, Right v) = Right (k, v)
(errlibs, adrs) = partitionEithers $ map toEither $ M.toList m
(errfinals, finals) = partitionEithers $ map toEither $ M.toList efinals
errs = errlibs ++ errfinals
if null errs
then do
let toTask (_, ADRFound _ _) = Nothing
toTask (name, ADRToInstall task) = Just (name, task)
tasks = M.fromList $ mapMaybe toTask adrs
takeSubset =
case boptsCLIBuildSubset $ bcoBuildOptsCLI baseConfigOpts0 of
BSAll -> id
BSOnlySnapshot -> stripLocals
BSOnlyDependencies -> stripNonDeps deps
return $ takeSubset Plan
{ planTasks = tasks
, planFinals = M.fromList finals
, planUnregisterLocal = mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps
, planInstallExes =
if boptsInstallExes (bcoBuildOpts baseConfigOpts0) ||
boptsInstallCompilerTool (bcoBuildOpts baseConfigOpts0)
then installExes
else Map.empty
}
else do
planDebug $ show errs
stackYaml <- view stackYamlL
prettyErrorNoIndent $ pprintExceptions errs stackYaml parents (wanted ctx)
throwM $ ConstructPlanFailed "Plan construction failed."
where
mkCtx econfig getVersions0 lp = Ctx
{ ls = ls0
, baseConfigOpts = baseConfigOpts0
, loadPackage = loadPackage0
, combinedMap = combineMap sourceMap installedMap
, toolToPackages = \name ->
maybe Map.empty (Map.fromSet (const Cabal.anyVersion)) $
Map.lookup name toolMap
, ctxEnvConfig = econfig
, callStack = []
, extraToBuild = extraToBuild0
, getVersions = getVersions0
, wanted = wantedLocalPackages locals <> extraToBuild0
, localNames = Set.fromList $ map (packageName . lpPackage) locals
}
where
toolMap = getToolMap ls0 lp
-- | State to be maintained during the calculation of local packages
-- to unregister.
data UnregisterState = UnregisterState
{ usToUnregister :: !(Map GhcPkgId (PackageIdentifier, Text))
, usKeep :: ![DumpPackage () () ()]
, usAnyAdded :: !Bool
}
-- | Determine which packages to unregister based on the given tasks and
-- already registered local packages
mkUnregisterLocal :: Map PackageName Task
-- ^ Tasks
-> Map PackageName Text
-- ^ Reasons why packages are dirty and must be rebuilt
-> [DumpPackage () () ()]
-- ^ Local package database dump
-> SourceMap
-> Bool
-- ^ If true, we're doing a special initialBuildSteps
-- build - don't unregister target packages.
-> Map GhcPkgId (PackageIdentifier, Text)
mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps =
-- We'll take multiple passes through the local packages. This
-- will allow us to detect that a package should be unregistered,
-- as well as all packages directly or transitively depending on
-- it.
loop Map.empty localDumpPkgs
where
loop toUnregister keep
-- If any new packages were added to the unregister Map, we
-- need to loop through the remaining packages again to detect
-- if a transitive dependency is being unregistered.
| usAnyAdded us = loop (usToUnregister us) (usKeep us)
-- Nothing added, so we've already caught them all. Return the
-- Map we've already calculated.
| otherwise = usToUnregister us
where
-- Run the unregister checking function on all packages we
-- currently think we'll be keeping.
us = execState (mapM_ go keep) UnregisterState
{ usToUnregister = toUnregister
, usKeep = []
, usAnyAdded = False
}
go dp = do
us <- get
case go' (usToUnregister us) ident deps of
-- Not unregistering, add it to the keep list
Nothing -> put us { usKeep = dp : usKeep us }
-- Unregistering, add it to the unregister Map and
-- indicate that a package was in fact added to the
-- unregister Map so we loop again.
Just reason -> put us
{ usToUnregister = Map.insert gid (ident, reason) (usToUnregister us)
, usAnyAdded = True
}
where
gid = dpGhcPkgId dp
ident = dpPackageIdent dp
deps = dpDepends dp
go' toUnregister ident deps
-- If we're planning on running a task on it, then it must be
-- unregistered, unless it's a target and an initial-build-steps
-- build is being done.
| Just task <- Map.lookup name tasks
= if initialBuildSteps && taskIsTarget task && taskProvides task == ident
then Nothing
else Just $ fromMaybe "" $ Map.lookup name dirtyReason
-- Check if we're no longer using the local version
| Just (piiLocation -> Snap) <- Map.lookup name sourceMap
= Just "Switching to snapshot installed package"
-- Check if a dependency is going to be unregistered
| (dep, _):_ <- mapMaybe (`Map.lookup` toUnregister) deps
= Just $ "Dependency being unregistered: " <> packageIdentifierText dep
-- None of the above, keep it!
| otherwise = Nothing
where
name = packageIdentifierName ident
-- | Given a 'LocalPackage' and its 'lpTestBench', adds a 'Task' for
-- running its tests and benchmarks.
--
-- If @isAllInOne@ is 'True', then this means that the build step will
-- also build the tests. Otherwise, this indicates that there's a cyclic
-- dependency and an additional build step needs to be done.
--
-- This will also add all the deps needed to build the tests /
-- benchmarks. If @isAllInOne@ is 'True' (the common case), then all of
-- these should have already been taken care of as part of the build
-- step.
addFinal :: LocalPackage -> Package -> Bool -> M ()
addFinal lp package isAllInOne = do
depsRes <- addPackageDeps package
res <- case depsRes of
Left e -> return $ Left e
Right (missing, present, _minLoc) -> do
ctx <- ask
return $ Right Task
{ taskProvides = PackageIdentifier
(packageName package)
(packageVersion package)
, taskConfigOpts = TaskConfigOpts missing $ \missing' ->
let allDeps = Map.union present missing'
in configureOpts
(view envConfigL ctx)
(baseConfigOpts ctx)
allDeps
True -- local
Local
package
, taskPresent = present
, taskType = TTFiles lp Local -- FIXME we can rely on this being Local, right?
, taskAllInOne = isAllInOne
, taskCachePkgSrc = CacheSrcLocal (toFilePath (lpDir lp))
}
tell mempty { wFinals = Map.singleton (packageName package) res }
-- | Given a 'PackageName', adds all of the build tasks to build the
-- package, if needed.
--
-- 'constructPlan' invokes this on all the target packages, setting
-- @treatAsDep'@ to False, because those packages are direct build
-- targets. 'addPackageDeps' invokes this while recursing into the
-- dependencies of a package. As such, it sets @treatAsDep'@ to True,
-- forcing this package to be marked as a dependency, even if it is
-- directly wanted. This makes sense - if we left out packages that are
-- deps, it would break the --only-dependencies build plan.
addDep :: Bool -- ^ is this being used by a dependency?
-> PackageName
-> M (Either ConstructPlanException AddDepRes)
addDep treatAsDep' name = do
ctx <- ask
let treatAsDep = treatAsDep' || name `Set.notMember` wanted ctx
when treatAsDep $ markAsDep name
m <- get
case Map.lookup name m of
Just res -> do
planDebug $ "addDep: Using cached result for " ++ show name ++ ": " ++ show res
return res
Nothing -> do
res <- if name `elem` callStack ctx
then do
planDebug $ "addDep: Detected cycle " ++ show name ++ ": " ++ show (callStack ctx)
return $ Left $ DependencyCycleDetected $ name : callStack ctx
else local (\ctx' -> ctx' { callStack = name : callStack ctx' }) $ do
let mpackageInfo = Map.lookup name $ combinedMap ctx
planDebug $ "addDep: Package info for " ++ show name ++ ": " ++ show mpackageInfo
case mpackageInfo of
-- TODO look up in the package index and see if there's a
-- recommendation available
Nothing -> return $ Left $ UnknownPackage name
Just (PIOnlyInstalled loc installed) -> do
-- FIXME Slightly hacky, no flags since
-- they likely won't affect executable
-- names. This code does not feel right.
tellExecutablesUpstream
(PackageIdentifierRevision (PackageIdentifier name (installedVersion installed)) CFILatest)
loc
Map.empty
return $ Right $ ADRFound loc installed
Just (PIOnlySource ps) -> do
tellExecutables ps
installPackage name ps Nothing
Just (PIBoth ps installed) -> do
tellExecutables ps
installPackage name ps (Just installed)
updateLibMap name res
return res
-- FIXME what's the purpose of this? Add a Haddock!
tellExecutables :: PackageSource -> M ()
tellExecutables (PSFiles lp _)
| lpWanted lp = tellExecutablesPackage Local $ lpPackage lp
| otherwise = return ()
-- Ignores ghcOptions because they don't matter for enumerating
-- executables.
tellExecutables (PSIndex loc flags _ghcOptions pir) =
tellExecutablesUpstream pir loc flags
tellExecutablesUpstream :: PackageIdentifierRevision -> InstallLocation -> Map FlagName Bool -> M ()
tellExecutablesUpstream pir@(PackageIdentifierRevision (PackageIdentifier name _) _) loc flags = do
ctx <- ask
when (name `Set.member` extraToBuild ctx) $ do
p <- liftIO $ loadPackage ctx (PLIndex pir) flags []
tellExecutablesPackage loc p
tellExecutablesPackage :: InstallLocation -> Package -> M ()
tellExecutablesPackage loc p = do
cm <- asks combinedMap
-- Determine which components are enabled so we know which ones to copy
let myComps =
case Map.lookup (packageName p) cm of
Nothing -> assert False Set.empty
Just (PIOnlyInstalled _ _) -> Set.empty
Just (PIOnlySource ps) -> goSource ps
Just (PIBoth ps _) -> goSource ps
goSource (PSFiles lp _)
| lpWanted lp = exeComponents (lpComponents lp)
| otherwise = Set.empty
goSource PSIndex{} = Set.empty
tell mempty { wInstall = Map.fromList $ map (, loc) $ Set.toList $ filterComps myComps $ packageExes p }
where
filterComps myComps x
| Set.null myComps = x
| otherwise = Set.intersection x myComps
-- | Given a 'PackageSource' and perhaps an 'Installed' value, adds
-- build 'Task's for the package and its dependencies.
installPackage
:: PackageName
-> PackageSource
-> Maybe Installed
-> M (Either ConstructPlanException AddDepRes)
installPackage name ps minstalled = do
ctx <- ask
case ps of
PSIndex _ flags ghcOptions pkgLoc -> do
planDebug $ "installPackage: Doing all-in-one build for upstream package " ++ show name
package <- liftIO $ loadPackage ctx (PLIndex pkgLoc) flags ghcOptions -- FIXME be more efficient! Get this from the LoadedPackageInfo!
resolveDepsAndInstall True ps package minstalled
PSFiles lp _ ->
case lpTestBench lp of
Nothing -> do
planDebug $ "installPackage: No test / bench component for " ++ show name ++ " so doing an all-in-one build."
resolveDepsAndInstall True ps (lpPackage lp) minstalled
Just tb -> do
-- Attempt to find a plan which performs an all-in-one
-- build. Ignore the writer action + reset the state if
-- it fails.
s <- get
res <- pass $ do
res <- addPackageDeps tb
let writerFunc w = case res of
Left _ -> mempty
_ -> w
return (res, writerFunc)
case res of
Right deps -> do
planDebug $ "installPackage: For " ++ show name ++ ", successfully added package deps"
adr <- installPackageGivenDeps True ps tb minstalled deps
-- FIXME: this redundantly adds the deps (but
-- they'll all just get looked up in the map)
addFinal lp tb True
return $ Right adr
Left _ -> do
-- Reset the state to how it was before
-- attempting to find an all-in-one build
-- plan.
planDebug $ "installPackage: Before trying cyclic plan, resetting lib result map to " ++ show s
put s
-- Otherwise, fall back on building the
-- tests / benchmarks in a separate step.
res' <- resolveDepsAndInstall False ps (lpPackage lp) minstalled
when (isRight res') $ do
-- Insert it into the map so that it's
-- available for addFinal.
updateLibMap name res'
addFinal lp tb False
return res'
resolveDepsAndInstall :: Bool
-> PackageSource
-> Package
-> Maybe Installed
-> M (Either ConstructPlanException AddDepRes)
resolveDepsAndInstall isAllInOne ps package minstalled = do
res <- addPackageDeps package
case res of
Left err -> return $ Left err
Right deps -> liftM Right $ installPackageGivenDeps isAllInOne ps package minstalled deps
-- | Checks if we need to install the given 'Package', given the results
-- of 'addPackageDeps'. If dependencies are missing, the package is
-- dirty, or it's not installed, then it needs to be installed.
installPackageGivenDeps :: Bool
-> PackageSource
-> Package
-> Maybe Installed
-> ( Set PackageIdentifier
, Map PackageIdentifier GhcPkgId
, InstallLocation )
-> M AddDepRes
installPackageGivenDeps isAllInOne ps package minstalled (missing, present, minLoc) = do
let name = packageName package
ctx <- ask
mRightVersionInstalled <- case (minstalled, Set.null missing) of
(Just installed, True) -> do
shouldInstall <- checkDirtiness ps installed package present (wanted ctx)
return $ if shouldInstall then Nothing else Just installed
(Just _, False) -> do
let t = T.intercalate ", " $ map (T.pack . packageNameString . packageIdentifierName) (Set.toList missing)
tell mempty { wDirty = Map.singleton name $ "missing dependencies: " <> addEllipsis t }
return Nothing
(Nothing, _) -> return Nothing
return $ case mRightVersionInstalled of
Just installed -> ADRFound (piiLocation ps) installed
Nothing -> ADRToInstall Task
{ taskProvides = PackageIdentifier
(packageName package)
(packageVersion package)
, taskConfigOpts = TaskConfigOpts missing $ \missing' ->
let allDeps = Map.union present missing'
destLoc = piiLocation ps <> minLoc
in configureOpts
(view envConfigL ctx)
(baseConfigOpts ctx)
allDeps
(psLocal ps)
-- An assertion to check for a recurrence of
-- https://github.com/commercialhaskell/stack/issues/345
(assert (destLoc == piiLocation ps) destLoc)
package
, taskPresent = present
, taskType =
case ps of
PSFiles lp loc -> TTFiles lp (loc <> minLoc)
PSIndex loc _ _ pkgLoc -> TTIndex package (loc <> minLoc) pkgLoc
, taskAllInOne = isAllInOne
, taskCachePkgSrc = toCachePkgSrc ps
}
-- Update response in the lib map. If it is an error, and there's
-- already an error about cyclic dependencies, prefer the cyclic error.
updateLibMap :: PackageName -> Either ConstructPlanException AddDepRes -> M ()
updateLibMap name val = modify $ \mp ->
case (M.lookup name mp, val) of
(Just (Left DependencyCycleDetected{}), Left _) -> mp
_ -> M.insert name val mp
addEllipsis :: Text -> Text
addEllipsis t
| T.length t < 100 = t
| otherwise = T.take 97 t <> "..."
-- | Given a package, recurses into all of its dependencies. The results
-- indicate which packages are missing, meaning that their 'GhcPkgId's
-- will be figured out during the build, after they've been built. The
-- 2nd part of the tuple result indicates the packages that are already
-- installed which will be used.
--
-- The 3rd part of the tuple is an 'InstallLocation'. If it is 'Local',
-- then the parent package must be installed locally. Otherwise, if it
-- is 'Snap', then it can either be installed locally or in the
-- snapshot.
addPackageDeps :: Package -> M (Either ConstructPlanException (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, InstallLocation))
addPackageDeps package = do
ctx <- ask
deps' <- packageDepsWithTools package
deps <- forM (Map.toList deps') $ \(depname, range) -> do
eres <- addDep True depname
let getLatestApplicable = do
vs <- liftIO $ getVersions ctx depname
return (latestApplicableVersion range vs)
case eres of
Left e -> do
addParent depname range Nothing
let bd =
case e of
UnknownPackage name -> assert (name == depname) NotInBuildPlan
_ -> Couldn'tResolveItsDependencies (packageVersion package)
mlatestApplicable <- getLatestApplicable
return $ Left (depname, (range, mlatestApplicable, bd))
Right adr -> do
addParent depname range Nothing
inRange <- if adrVersion adr `withinRange` range
then return True
else do
let warn_ reason =
tell mempty { wWarnings = (msg:) }
where
msg = T.concat
[ "WARNING: Ignoring out of range dependency"
, reason
, ": "
, T.pack $ packageIdentifierString $ PackageIdentifier depname (adrVersion adr)
, ". "
, T.pack $ packageNameString $ packageName package
, " requires: "
, versionRangeText range
]
allowNewer <- view $ configL.to configAllowNewer
if allowNewer
then do
warn_ " (allow-newer enabled)"
return True
else do
x <- inSnapshot (packageName package) (packageVersion package)
y <- inSnapshot depname (adrVersion adr)
if x && y
then do
warn_ " (trusting snapshot over Hackage revisions)"
return True
else return False
if inRange
then case adr of
ADRToInstall task -> return $ Right
(Set.singleton $ taskProvides task, Map.empty, taskLocation task)
ADRFound loc (Executable _) -> return $ Right
(Set.empty, Map.empty, loc)
ADRFound loc (Library ident gid) -> return $ Right
(Set.empty, Map.singleton ident gid, loc)
else do
mlatestApplicable <- getLatestApplicable
return $ Left (depname, (range, mlatestApplicable, DependencyMismatch $ adrVersion adr))
case partitionEithers deps of
-- Note that the Monoid for 'InstallLocation' means that if any
-- is 'Local', the result is 'Local', indicating that the parent
-- package must be installed locally. Otherwise the result is
-- 'Snap', indicating that the parent can either be installed
-- locally or in the snapshot.
([], pairs) -> return $ Right $ mconcat pairs
(errs, _) -> return $ Left $ DependencyPlanFailures
package
(Map.fromList errs)
where
adrVersion (ADRToInstall task) = packageIdentifierVersion $ taskProvides task
adrVersion (ADRFound _ installed) = installedVersion installed
-- Update the parents map, for later use in plan construction errors
-- - see 'getShortestDepsPath'.
addParent depname range mversion = tell mempty { wParents = MonoidMap $ M.singleton depname val }
where
val = (First mversion, [(packageIdentifier package, range)])
checkDirtiness :: PackageSource
-> Installed
-> Package
-> Map PackageIdentifier GhcPkgId
-> Set PackageName
-> M Bool
checkDirtiness ps installed package present wanted = do
ctx <- ask
moldOpts <- runRIO ctx $ tryGetFlagCache installed
let configOpts = configureOpts
(view envConfigL ctx)
(baseConfigOpts ctx)
present
(psLocal ps)
(piiLocation ps) -- should be Local always
package
buildOpts = bcoBuildOpts (baseConfigOpts ctx)
wantConfigCache = ConfigCache
{ configCacheOpts = configOpts
, configCacheDeps = Set.fromList $ Map.elems present
, configCacheComponents =
case ps of
PSFiles lp _ -> Set.map renderComponent $ lpComponents lp
PSIndex{} -> Set.empty
, configCacheHaddock =
shouldHaddockPackage buildOpts wanted (packageName package) ||
-- Disabling haddocks when old config had haddocks doesn't make dirty.
maybe False configCacheHaddock moldOpts
, configCachePkgSrc = toCachePkgSrc ps
}
let mreason =
case moldOpts of
Nothing -> Just "old configure information not found"
Just oldOpts
| Just reason <- describeConfigDiff config oldOpts wantConfigCache -> Just reason
| True <- psForceDirty ps -> Just "--force-dirty specified"
| Just files <- psDirty ps -> Just $ "local file changes: " <>
addEllipsis (T.pack $ unwords $ Set.toList files)
| otherwise -> Nothing
config = view configL ctx
case mreason of
Nothing -> return False
Just reason -> do
tell mempty { wDirty = Map.singleton (packageName package) reason }
return True
describeConfigDiff :: Config -> ConfigCache -> ConfigCache -> Maybe Text
describeConfigDiff config old new
| configCachePkgSrc old /= configCachePkgSrc new = Just $
"switching from " <>
pkgSrcName (configCachePkgSrc old) <> " to " <>
pkgSrcName (configCachePkgSrc new)
| not (configCacheDeps new `Set.isSubsetOf` configCacheDeps old) = Just "dependencies changed"
| not $ Set.null newComponents =
Just $ "components added: " `T.append` T.intercalate ", "
(map (decodeUtf8With lenientDecode) (Set.toList newComponents))
| not (configCacheHaddock old) && configCacheHaddock new = Just "rebuilding with haddocks"
| oldOpts /= newOpts = Just $ T.pack $ concat
[ "flags changed from "
, show oldOpts
, " to "
, show newOpts
]
| otherwise = Nothing
where
stripGhcOptions =
go
where
go [] = []
go ("--ghc-option":x:xs) = go' Ghc x xs
go ("--ghc-options":x:xs) = go' Ghc x xs
go ((T.stripPrefix "--ghc-option=" -> Just x):xs) = go' Ghc x xs
go ((T.stripPrefix "--ghc-options=" -> Just x):xs) = go' Ghc x xs
go ("--ghcjs-option":x:xs) = go' Ghcjs x xs
go ("--ghcjs-options":x:xs) = go' Ghcjs x xs
go ((T.stripPrefix "--ghcjs-option=" -> Just x):xs) = go' Ghcjs x xs
go ((T.stripPrefix "--ghcjs-options=" -> Just x):xs) = go' Ghcjs x xs
go (x:xs) = x : go xs
go' wc x xs = checkKeepers wc x $ go xs
checkKeepers wc x xs =
case filter isKeeper $ T.words x of
[] -> xs
keepers -> T.pack (compilerOptionsCabalFlag wc) : T.unwords keepers : xs
-- GHC options which affect build results and therefore should always
-- force a rebuild
--
-- For the most part, we only care about options generated by Stack
-- itself
isKeeper = (== "-fhpc") -- more to be added later
userOpts = filter (not . isStackOpt)
. (if configRebuildGhcOptions config
then id
else stripGhcOptions)
. map T.pack
. (\(ConfigureOpts x y) -> x ++ y)
. configCacheOpts
(oldOpts, newOpts) = removeMatching (userOpts old) (userOpts new)
removeMatching (x:xs) (y:ys)
| x == y = removeMatching xs ys
removeMatching xs ys = (xs, ys)
newComponents = configCacheComponents new `Set.difference` configCacheComponents old
pkgSrcName (CacheSrcLocal fp) = T.pack fp
pkgSrcName CacheSrcUpstream = "upstream source"
psForceDirty :: PackageSource -> Bool
psForceDirty (PSFiles lp _) = lpForceDirty lp
psForceDirty PSIndex{} = False
psDirty :: PackageSource -> Maybe (Set FilePath)
psDirty (PSFiles lp _) = lpDirtyFiles lp
psDirty PSIndex{} = Nothing -- files never change in an upstream package
psLocal :: PackageSource -> Bool
psLocal (PSFiles _ loc) = loc == Local -- FIXME this is probably not the right logic, see configureOptsNoDir. We probably want to check if this appears in packages:
psLocal PSIndex{} = False
-- | Get all of the dependencies for a given package, including guessed build
-- tool dependencies.
packageDepsWithTools :: Package -> M (Map PackageName VersionRange)
packageDepsWithTools p = do
ctx <- ask
-- TODO: it would be cool to defer these warnings until there's an
-- actual issue building the package.
let toEither name mp =
case Map.toList mp of
[] -> Left (ToolWarning name (packageName p) Nothing)
[_] -> Right mp
((x, _):(y, _):zs) ->
Left (ToolWarning name (packageName p) (Just (x, y, map fst zs)))
(warnings0, toolDeps) =
partitionEithers $
map (\dep -> toEither dep (toolToPackages ctx dep)) (Map.keys (packageTools p))
-- Check whether the tool is on the PATH before warning about it.
warnings <- fmap catMaybes $ forM warnings0 $ \warning@(ToolWarning (ExeName toolName) _ _) -> do
config <- view configL
menv <- liftIO $ configEnvOverride config minimalEnvSettings { esIncludeLocals = True }
mfound <- findExecutable menv $ T.unpack toolName
case mfound of
Nothing -> return (Just warning)
Just _ -> return Nothing
tell mempty { wWarnings = (map toolWarningText warnings ++) }
return $ Map.unionsWith intersectVersionRanges
$ packageDeps p
: toolDeps
-- | Warn about tools in the snapshot definition. States the tool name
-- expected, the package name using it, and found packages. If the
-- last value is Nothing, it means the tool was not found
-- anywhere. For a Just value, it was found in at least two packages.
data ToolWarning = ToolWarning ExeName PackageName (Maybe (PackageName, PackageName, [PackageName]))
deriving Show
toolWarningText :: ToolWarning -> Text
toolWarningText (ToolWarning (ExeName toolName) pkgName Nothing) =
"No packages found in snapshot which provide a " <>
T.pack (show toolName) <>
" executable, which is a build-tool dependency of " <>
T.pack (show (packageNameString pkgName))
toolWarningText (ToolWarning (ExeName toolName) pkgName (Just (option1, option2, options))) =
"Multiple packages found in snapshot which provide a " <>
T.pack (show toolName) <>
" exeuctable, which is a build-tool dependency of " <>
T.pack (show (packageNameString pkgName)) <>
", so none will be installed.\n" <>
"Here's the list of packages which provide it: " <>
T.intercalate ", " (map packageNameText (option1:option2:options)) <>
"\nSince there's no good way to choose, you may need to install it manually."
-- | Strip out anything from the @Plan@ intended for the local database
stripLocals :: Plan -> Plan
stripLocals plan = plan
{ planTasks = Map.filter checkTask $ planTasks plan
, planFinals = Map.empty
, planUnregisterLocal = Map.empty
, planInstallExes = Map.filter (/= Local) $ planInstallExes plan
}
where
checkTask task = taskLocation task == Snap
stripNonDeps :: Set PackageName -> Plan -> Plan
stripNonDeps deps plan = plan
{ planTasks = Map.filter checkTask $ planTasks plan
, planFinals = Map.empty
, planInstallExes = Map.empty -- TODO maybe don't disable this?
}
where
checkTask task = packageIdentifierName (taskProvides task) `Set.member` deps
markAsDep :: PackageName -> M ()
markAsDep name = tell mempty { wDeps = Set.singleton name }
-- | Is the given package/version combo defined in the snapshot?
inSnapshot :: PackageName -> Version -> M Bool
inSnapshot name version = do
p <- asks ls
ls <- asks localNames
return $ fromMaybe False $ do
guard $ not $ name `Set.member` ls
lpi <- Map.lookup name (lsPackages p)
return $ lpiVersion lpi == version
data ConstructPlanException
= DependencyCycleDetected [PackageName]
| DependencyPlanFailures Package (Map PackageName (VersionRange, LatestApplicableVersion, BadDependency))
| UnknownPackage PackageName -- TODO perhaps this constructor will be removed, and BadDependency will handle it all
-- ^ Recommend adding to extra-deps, give a helpful version number?
deriving (Typeable, Eq, Ord, Show)
deriving instance Ord VersionRange
-- | For display purposes only, Nothing if package not found
type LatestApplicableVersion = Maybe Version
-- | Reason why a dependency was not used
data BadDependency
= NotInBuildPlan
| Couldn'tResolveItsDependencies Version
| DependencyMismatch Version
deriving (Typeable, Eq, Ord, Show)
-- TODO: Consider intersecting version ranges for multiple deps on a
-- package. This is why VersionRange is in the parent map.
pprintExceptions
:: [ConstructPlanException]
-> Path Abs File
-> ParentMap
-> Set PackageName
-> AnsiDoc
pprintExceptions exceptions stackYaml parentMap wanted =
mconcat $
[ flow "While constructing the build plan, the following exceptions were encountered:"
, line <> line
, mconcat (intersperse (line <> line) (mapMaybe pprintException exceptions'))
, line <> line
, flow "Some potential ways to resolve this:"
, line <> line
] ++
(if Map.null extras then [] else
[ " *" <+> align
(flow "Recommended action: try adding the following to your extra-deps in" <+>
toAnsiDoc (display stackYaml) <> ":")
, line <> line
, vsep (map pprintExtra (Map.toList extras))
, line <> line
]
) ++
[ " *" <+> align (flow "Set 'allow-newer: true' to ignore all version constraints and build anyway.")
, line <> line
, " *" <+> align (flow "You may also want to try using the 'stack solver' command.")
, line
]
where
exceptions' = nubOrd exceptions
extras = Map.unions $ map getExtras exceptions'
getExtras (DependencyCycleDetected _) = Map.empty
getExtras (UnknownPackage _) = Map.empty
getExtras (DependencyPlanFailures _ m) =
Map.unions $ map go $ Map.toList m
where
-- TODO: Likely a good idea to distinguish these to the user. In particular, for DependencyMismatch
go (name, (_range, Just version, NotInBuildPlan)) =
Map.singleton name version
go (name, (_range, Just version, DependencyMismatch{})) =
Map.singleton name version
go _ = Map.empty
pprintExtra (name, version) =
fromString (concat ["- ", packageNameString name, "-", versionString version])
allNotInBuildPlan = Set.fromList $ concatMap toNotInBuildPlan exceptions'
toNotInBuildPlan (DependencyPlanFailures _ pDeps) =
map fst $ filter (\(_, (_, _, badDep)) -> badDep == NotInBuildPlan) $ Map.toList pDeps
toNotInBuildPlan _ = []
pprintException (DependencyCycleDetected pNames) = Just $
flow "Dependency cycle detected in packages:" <> line <>
indent 4 (encloseSep "[" "]" "," (map (styleError . display) pNames))
pprintException (DependencyPlanFailures pkg pDeps) =
case mapMaybe pprintDep (Map.toList pDeps) of
[] -> Nothing
depErrors -> Just $
flow "In the dependencies for" <+> pkgIdent <>
pprintFlags (packageFlags pkg) <> ":" <> line <>
indent 4 (vsep depErrors) <>
case getShortestDepsPath parentMap wanted (packageName pkg) of
Nothing -> line <> flow "needed for unknown reason - stack invariant violated."
Just [] -> line <> flow "needed since" <+> pkgName <+> flow "is a build target."
Just (target:path) -> line <> flow "needed due to " <> encloseSep "" "" " -> " pathElems
where
pathElems =
[styleTarget . display $ target] ++
map display path ++
[pkgIdent]
where
pkgName = styleCurrent . display $ packageName pkg
pkgIdent = styleCurrent . display $ packageIdentifier pkg
-- Skip these when they are redundant with 'NotInBuildPlan' info.
pprintException (UnknownPackage name)
| name `Set.member` allNotInBuildPlan = Nothing
| name `HashSet.member` wiredInPackages =
Just $ flow "Can't build a package with same name as a wired-in-package:" <+> (styleCurrent . display $ name)
| otherwise = Just $ flow "Unknown package:" <+> (styleCurrent . display $ name)
pprintFlags flags
| Map.null flags = ""
| otherwise = parens $ sep $ map pprintFlag $ Map.toList flags
pprintFlag (name, True) = "+" <> fromString (show name)
pprintFlag (name, False) = "-" <> fromString (show name)
pprintDep (name, (range, mlatestApplicable, badDep)) = case badDep of
NotInBuildPlan -> Just $
styleError (display name) <+>
align (flow "must match" <+> goodRange <> "," <> softline <>
flow "but the stack configuration has no specified version" <>
latestApplicable Nothing)
-- TODO: For local packages, suggest editing constraints
DependencyMismatch version -> Just $
(styleError . display) (PackageIdentifier name version) <+>
align (flow "from stack configuration does not match" <+> goodRange <>
latestApplicable (Just version))
-- I think the main useful info is these explain why missing
-- packages are needed. Instead lets give the user the shortest
-- path from a target to the package.
Couldn'tResolveItsDependencies _version -> Nothing
where
goodRange = styleGood (fromString (Cabal.display range))
latestApplicable mversion =
case mlatestApplicable of
Nothing -> ""
Just la
| mlatestApplicable == mversion -> softline <>
flow "(latest matching version is specified)"
| otherwise -> softline <>
flow "(latest matching version is" <+> styleGood (display la) <> ")"
-- | Get the shortest reason for the package to be in the build plan. In
-- other words, trace the parent dependencies back to a 'wanted'
-- package.
getShortestDepsPath
:: ParentMap
-> Set PackageName
-> PackageName
-> Maybe [PackageIdentifier]
getShortestDepsPath (MonoidMap parentsMap) wanted name =
if Set.member name wanted
then Just []
else case M.lookup name parentsMap of
Nothing -> Nothing
Just (_, parents) -> Just $ findShortest 256 paths0
where
paths0 = M.fromList $ map (\(ident, _) -> (packageIdentifierName ident, startDepsPath ident)) parents
where
-- The 'paths' map is a map from PackageName to the shortest path
-- found to get there. It is the frontier of our breadth-first
-- search of dependencies.
findShortest :: Int -> Map PackageName DepsPath -> [PackageIdentifier]
findShortest fuel _ | fuel <= 0 =
[PackageIdentifier $(mkPackageName "stack-ran-out-of-jet-fuel") $(mkVersion "0")]
findShortest _ paths | M.null paths = []
findShortest fuel paths =
case targets of
[] -> findShortest (fuel - 1) $ M.fromListWith chooseBest $ concatMap extendPath recurses
_ -> let (DepsPath _ _ path) = minimum (map snd targets) in path
where
(targets, recurses) = partition (\(n, _) -> n `Set.member` wanted) (M.toList paths)
chooseBest :: DepsPath -> DepsPath -> DepsPath
chooseBest x y = if x > y then x else y
-- Extend a path to all its parents.
extendPath :: (PackageName, DepsPath) -> [(PackageName, DepsPath)]
extendPath (n, dp) =
case M.lookup n parentsMap of
Nothing -> []
Just (_, parents) -> map (\(pkgId, _) -> (packageIdentifierName pkgId, extendDepsPath pkgId dp)) parents
data DepsPath = DepsPath
{ dpLength :: Int -- ^ Length of dpPath
, dpNameLength :: Int -- ^ Length of package names combined
, dpPath :: [PackageIdentifier] -- ^ A path where the packages later
-- in the list depend on those that
-- come earlier
}
deriving (Eq, Ord, Show)
startDepsPath :: PackageIdentifier -> DepsPath
startDepsPath ident = DepsPath
{ dpLength = 1
, dpNameLength = T.length (packageNameText (packageIdentifierName ident))
, dpPath = [ident]
}
extendDepsPath :: PackageIdentifier -> DepsPath -> DepsPath
extendDepsPath ident dp = DepsPath
{ dpLength = dpLength dp + 1
, dpNameLength = dpNameLength dp + T.length (packageNameText (packageIdentifierName ident))
, dpPath = [ident]
}
-- Utility newtype wrapper to make make Map's Monoid also use the
-- element's Monoid.
newtype MonoidMap k a = MonoidMap (Map k a)
deriving (Eq, Ord, Read, Show, Generic, Functor)
instance (Ord k, Monoid a) => Monoid (MonoidMap k a) where
mappend (MonoidMap mp1) (MonoidMap mp2) = MonoidMap (M.unionWith mappend mp1 mp2)
mempty = MonoidMap mempty
-- Switch this to 'True' to enable some debugging putStrLn in this module
planDebug :: MonadIO m => String -> m ()
planDebug = if False then liftIO . putStrLn else \_ -> return ()
| MichielDerhaeg/stack | src/Stack/Build/ConstructPlan.hs | bsd-3-clause | 49,007 | 0 | 31 | 15,808 | 10,567 | 5,378 | 5,189 | 846 | 16 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveFunctor #-}
-- |Strict Decoder Types
module Flat.Decoder.Types
(
Get(..)
, S(..)
, GetResult(..)
, Decoded
, DecodeException(..)
, notEnoughSpace
, tooMuchSpace
, badEncoding
) where
import Control.DeepSeq ( NFData(..) )
import Control.Exception ( throwIO, Exception )
import Data.Word ( Word8 )
import Foreign ( Ptr )
#if MIN_VERSION_base(4,9,0)
import qualified Control.Monad.Fail as Fail
#endif
{- |
A decoder.
Given:
* end of input buffer
* current position in input buffer
returns:
* decoded value
* new position in input buffer
-}
newtype Get a =
Get
{ runGet ::
Ptr Word8
-> S
-> IO (GetResult a)
} -- deriving (Functor)
-- Seems to give better performance than the derived version
instance Functor Get where
fmap f g =
Get $ \end s -> do
GetResult s' a <- runGet g end s
return $ GetResult s' (f a)
{-# INLINE fmap #-}
-- Is this correct?
instance NFData (Get a) where
rnf !_ = ()
instance Show (Get a) where
show _ = "Get"
instance Applicative Get where
pure x = Get (\_ ptr -> return $ GetResult ptr x)
{-# INLINE pure #-}
Get f <*> Get g =
Get $ \end ptr1 -> do
GetResult ptr2 f' <- f end ptr1
GetResult ptr3 g' <- g end ptr2
return $ GetResult ptr3 (f' g')
{-# INLINE (<*>) #-}
Get f *> Get g =
Get $ \end ptr1 -> do
GetResult ptr2 _ <- f end ptr1
g end ptr2
{-# INLINE (*>) #-}
instance Monad Get where
return = pure
{-# INLINE return #-}
(>>) = (*>)
{-# INLINE (>>) #-}
Get x >>= f =
Get $ \end s -> do
GetResult s' x' <- x end s
runGet (f x') end s'
{-# INLINE (>>=) #-}
#if !(MIN_VERSION_base(4,13,0))
fail = failGet
-- base < 4.13
#endif
#if MIN_VERSION_base(4,9,0)
instance Fail.MonadFail Get where
fail = failGet
#endif
{-# INLINE failGet #-}
failGet :: String -> Get a
failGet msg = Get $ \end s -> badEncoding end s msg
-- |Decoder state
data S =
S
{ currPtr :: {-# UNPACK #-}!(Ptr Word8)
, usedBits :: {-# UNPACK #-}!Int
}
deriving (Show, Eq, Ord)
data GetResult a =
GetResult {-# UNPACK #-}!S !a
deriving (Functor)
-- |A decoded value
type Decoded a = Either DecodeException a
-- |An exception during decoding
data DecodeException
= NotEnoughSpace Env
| TooMuchSpace Env
| BadEncoding Env String
deriving (Show, Eq, Ord)
type Env = (Ptr Word8, S)
notEnoughSpace :: Ptr Word8 -> S -> IO a
notEnoughSpace endPtr s = throwIO $ NotEnoughSpace (endPtr, s)
tooMuchSpace :: Ptr Word8 -> S -> IO a
tooMuchSpace endPtr s = throwIO $ TooMuchSpace (endPtr, s)
badEncoding :: Ptr Word8 -> S -> String -> IO a
badEncoding endPtr s msg = throwIO $ BadEncoding (endPtr, s) msg
instance Exception DecodeException
| tittoassini/flat | src/Flat/Decoder/Types.hs | bsd-3-clause | 2,840 | 0 | 13 | 730 | 857 | 459 | 398 | 85 | 1 |
{-|
Specific unit tests for 'ClockTable' queries.
-}
module Database.Export.ClockTableSpec (spec) where
import Data.Default
import TestImport
import Data.Time.Calendar (fromGregorian)
import Data.Time.Clock (secondsToDiffTime, UTCTime(..))
import Database.OrgMode.Types
import qualified Database.OrgMode.Export.ClockTable as ClockTable
-------------------------------------------------------------------------------
spec :: Spec
spec = do
describe "getTable" $ do
it "1 heading, 1 doc, no filter" $ do
table <- runDb $ do
void $ importExample "1_clock_2_hours.org"
ClockTable.getTable def
(length (clockTableRows table)) `shouldBe` 1
it "2 docs, no filter" $ do
table <- runDb $ do
void $ importExample "1_clock_2_hours.org"
void $ importExample "2_sections_2_clocks_each.org"
ClockTable.getTable def
(length (clockTableRows table)) `shouldBe` 2
it "all data, no filter" $ do
table <- runDb $ do
void $ importExample "all_data.org"
ClockTable.getTable def
let rows = clockTableRows table
(length rows) `shouldBe` 1
let (row1:_) = rows
shorts = clockRowShorts row1
(length shorts) `shouldBe` 1
describe "getShorts" $ do
it "1 heading, 1 doc, no filter" $ do
parts <- runDb $ do
void $ importExample "1_clock_2_hours.org"
ClockTable.getShorts def
(length parts) `shouldBe` 1
it "2 docs, filter by 1 doc" $ do
parts <- runDb $ do
void $ importExample "1_clock_2_hours.org"
docId2 <- importExample "2_sections_2_clocks_each.org"
let f = def{ headingFilterDocumentIds = [docId2] }
ClockTable.getShorts f
(length parts) `shouldBe` 2
it "3 docs, filter by 2 docs" $ do
parts <- runDb $ do
void $ importExample "1_clock_2_hours.org"
docId2 <- importExample "2_sections_2_clocks_each.org"
docId3 <- importExample "2_clocks_45_minutes.org"
let f = def{ headingFilterDocumentIds = [docId2, docId3] }
ClockTable.getShorts f
(length parts) `shouldBe` 3
it "2 docs, filter by start date after both" $ do
let startT = UTCTime (fromGregorian 2050 1 1) (secondsToDiffTime 0)
parts <- runDb $ do
void $ importExample "1_clock_2_hours.org"
void $ importExample "2_sections_2_clocks_each.org"
let f = def{ headingFilterClockStart = Just startT }
ClockTable.getShorts f
(length parts) `shouldBe` 0
it "2 docs, filter by start date before first" $ do
let startT = UTCTime (fromGregorian 2015 10 16) (secondsToDiffTime 0)
parts <- runDb $ do
void $ importExample "1_clock_2_hours.org"
void $ importExample "2_sections_2_clocks_each.org"
let f = def{ headingFilterClockStart = Just startT }
ClockTable.getShorts f
(length parts) `shouldBe` 2
it "2 docs, filter by end date before both" $ do
let endT = UTCTime (fromGregorian 1999 1 1) (secondsToDiffTime 0)
parts <- runDb $ do
void $ importExample "1_clock_2_hours.org"
void $ importExample "2_sections_2_clocks_each.org"
let f = def{ headingFilterClockEnd = Just endT }
ClockTable.getShorts f
(length parts) `shouldBe` 0
it "2 docs, filter by end date after first" $ do
let endT = UTCTime (fromGregorian 2015 10 16) (secondsToDiffTime 0)
parts <- runDb $ do
void $ importExample "1_clock_2_hours.org"
void $ importExample "2_sections_2_clocks_each.org"
let f = def{ headingFilterClockEnd = Just endT }
ClockTable.getShorts f
(length parts) `shouldBe` 1
it "2 docs, filter by start and end date for first" $ do
let startT = UTCTime (fromGregorian 2015 10 14) (secondsToDiffTime 0)
endT = UTCTime (fromGregorian 2015 10 16) (secondsToDiffTime 0)
parts <- runDb $ do
void $ importExample "1_clock_2_hours.org"
void $ importExample "2_sections_2_clocks_each.org"
let f = def{ headingFilterClockStart = Just startT
, headingFilterClockEnd = Just endT
}
ClockTable.getShorts f
(length parts) `shouldBe` 1
it "2 docs, filter by start and end date for second" $ do
let startT = UTCTime (fromGregorian 2015 10 16) (secondsToDiffTime 0)
endT = UTCTime (fromGregorian 2015 10 20) (secondsToDiffTime 0)
parts <- runDb $ do
void $ importExample "1_clock_2_hours.org"
void $ importExample "2_sections_2_clocks_each.org"
let f = def{ headingFilterClockStart = Just startT
, headingFilterClockEnd = Just endT
}
ClockTable.getShorts f
(length parts) `shouldBe` 2
| rzetterberg/orgmode-sql | test/Database/Export/ClockTableSpec.hs | bsd-3-clause | 5,315 | 0 | 22 | 1,805 | 1,322 | 623 | 699 | 103 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS -fno-warn-orphans #-}
module BMake.Base
( thenP
, returnP
, happyError
, Parser
, Token(..)
, TokenClass(..)
, AlexState(..)
, parseDCToken
, lexer
, AssignType(..)
, IfCmpType(..)
, MetaVar(..)
, MetaVarModifier(..)
, MakefileF(..)
, Makefile
, StatementF(..), substmts
, Statement
, Expr
, ExprF(..)
, module BMake.Lexer
)
where
--------------------------------------------------------------------------------
import Control.DeepSeq (NFData (..))
import Control.DeepSeq.Generics (genericRnf)
import Data.Aeson
import Data.ByteString.Lazy (ByteString)
import Data.String (IsString)
import Data.Text
import GHC.Generics
----
import BMake.Lexer
--------------------------------------------------------------------------------
type Parser a = Alex a
thenP :: Parser a -> (a -> Parser b) -> Parser b
thenP = (>>=)
returnP :: a -> Parser a
returnP = return
alexGetPosition :: Alex (AlexPosn)
alexGetPosition = Alex $ \s@AlexState{alex_pos=pos} -> Right (s, pos)
happyError :: Parser a
happyError = do
(AlexPn _ line col) <- alexGetPosition
alexStructError (line, col, "syntax error" :: String)
data MetaVar
= FirstOutput
| FirstInput
| AllInputs
| AllOOInputs
| Stem
deriving (Eq, Ord, Show, Generic)
instance ToJSON MetaVar where
instance NFData MetaVar where
rnf = genericRnf
data MetaVarModifier
= NoMod
| ModFile
| ModDir
deriving (Eq, Ord, Show, Generic)
instance ToJSON MetaVarModifier where
instance NFData MetaVarModifier where
rnf = genericRnf
data ExprF text
= Str text
| OpenBrace
| CloseBrace
| Comma
| Spaces
| VarSpecial MetaVar MetaVarModifier
| VarSimple text
deriving (Eq, Ord, Show, Generic, Functor)
type Expr = ExprF ByteString
parseMetaVarChar :: Char -> MetaVar
parseMetaVarChar '@' = FirstOutput
parseMetaVarChar '<' = FirstInput
parseMetaVarChar '^' = AllInputs
parseMetaVarChar '|' = AllOOInputs
parseMetaVarChar '*' = Stem
parseMetaVarChar other = error $ "unknown meta-variable: $" ++ [other]
parseModifier :: Maybe Char -> MetaVarModifier
parseModifier Nothing = NoMod
parseModifier (Just 'F') = ModFile
parseModifier (Just 'D') = ModDir
parseModifier (Just other) = error $ "unknown meta-variable modifier: $(," ++ [other] ++ ")"
parseDCToken :: IsString text => (Char, Maybe Char) -> ExprF text
parseDCToken ('.', Nothing) = VarSimple "."
parseDCToken (other, modifier) = VarSpecial (parseMetaVarChar other) (parseModifier modifier)
instance NFData text => NFData (ExprF text) where
rnf = genericRnf
data AssignType = AssignNormal | AssignConditional
deriving (Show, Generic)
instance NFData AssignType where
instance ToJSON AssignType where
data IfCmpType = IfEquals | IfNotEquals
deriving (Show, Generic)
instance NFData IfCmpType where
instance ToJSON IfCmpType where
data StatementF text
= Assign text AssignType [ExprF text]
| Local [StatementF text]
| Target [ExprF text] [ExprF text] [[ExprF text]]
| Include text
| IfCmp IfCmpType [ExprF text] [ExprF text] [StatementF text] [StatementF text]
deriving (Show, Generic, Functor)
type Statement = StatementF ByteString
-- | Traversal of direct children of statement
substmts ::
Applicative f =>
([StatementF text] -> f [StatementF text]) ->
StatementF text -> f (StatementF text)
substmts f (Local dl) = Local <$> f dl
substmts f (IfCmp a b c dla dlb) = IfCmp a b c <$> f dla <*> f dlb
substmts _ x = pure x
instance NFData text => NFData (StatementF text) where
rnf = genericRnf
data MakefileF text = Makefile
{ unit :: [StatementF text]
} deriving (Show, Generic, Functor)
type Makefile = MakefileF ByteString
instance NFData text => NFData (MakefileF text) where
instance ToJSON (ExprF Text) where
toJSON (Str name) = String name
toJSON Comma = String ","
toJSON OpenBrace = String "{"
toJSON CloseBrace = String "}"
toJSON (Spaces) = String " "
toJSON (VarSpecial vtype mods) =
object [ "varSpecial" .= vtype
, "varMod" .= mods ]
toJSON (VarSimple name) =
object [ "var" .= String name ]
instance ToJSON (StatementF Text) where
toJSON (Assign name isOptional expr) =
object [ "assign" .= object [
"name" .= name
, "isOptional" .= isOptional
, "expr" .= expr
] ]
toJSON (Local stmts) =
object [
"local" .= stmts
]
toJSON (Target outps inps exprs) =
object [ "target" .= object [
"outputs" .= outps
, "inputs" .= inps
, "recipe" .= exprs
] ]
toJSON (Include name) =
object [
"include" .= name
]
toJSON (IfCmp b val_a val_b if_pass if_otherwise) =
object [ "compare" .= object [
"is_equal" .= b
, "val_a" .= val_a
, "val_b" .= val_b
, "if_pass" .= if_pass
, "if_otherwise" .= if_otherwise
] ]
instance ToJSON (MakefileF Text) where
getPrevTokens :: Alex (Maybe Token, Maybe Token)
getPrevTokens = prevTokens <$> getUserState
modifyPrevTokens :: ((Maybe Token, Maybe Token) -> (Maybe Token, Maybe Token)) -> Alex ()
modifyPrevTokens f = modifyUserState $ \us -> us { prevTokens = f (prevTokens us) }
setPrevTokens :: (Maybe Token, Maybe Token) -> Alex ()
setPrevTokens = modifyPrevTokens . const
lexer :: (Token -> Parser a) -> Parser a
lexer f = do
mPrevTokens <- getPrevTokens
case mPrevTokens of
(jx, Just token) ->
setPrevTokens (jx, Nothing) >> f token
(jx, Nothing) -> do
token <- alexMonadScan
case token of
Token _ TokenNewLine ->
setPrevTokens (Just token, Nothing) >> lexer f
_ -> case jx of
Just prevToken@(Token _ TokenNewLine) -> do
case token of
Token _ TokenNewLineAndTab -> do
setPrevTokens (Nothing, Nothing)
f token
_ -> do
setPrevTokens (Nothing, Just token)
f prevToken
_ -> f token
| Peaker/buildsome-tst | app/BMake/Base.hs | bsd-3-clause | 6,543 | 0 | 28 | 1,850 | 1,965 | 1,042 | 923 | 183 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Metrics where
import Data.Text (Text, pack)
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Prometheus (Counter, Gauge, Info (..), MonadMonitor, Vector, addCounter, counter, decGauge,
gauge, incCounter, incGauge, register, setGauge, vector, withLabel)
import qualified Network.HTTP.Types as Http
type HttpMethodLabel = Text
type HttpStatusCode = Text
-- We want to store for each (HTTP method, HTTP status code) pair how many times it has been called
type HttpRequestCounter = Vector (HttpMethodLabel, HttpStatusCode) Counter
countHttpRequest :: MonadMonitor m => Http.Method -> Http.Status -> HttpRequestCounter -> m ()
countHttpRequest method status httpRequestCounter = withLabel httpRequestCounter label incCounter
where
label = (textMethod, textStatusCode)
textMethod = decodeUtf8With lenientDecode method
textStatusCode = pack $ show $ Http.statusCode status
data IcepeakMetrics = IcepeakMetrics
{ icepeakMetricsRequestCounter :: HttpRequestCounter
-- TODO: the following line can be removed after dashboard has been updated to use icepeak_data_size_bytes
, icepeakMetricsDataSize :: Gauge
, icepeakMetricsDataSizeBytes :: Gauge
, icepeakMetricsJournalSize :: Gauge
, icepeakMetricsDataWritten :: Counter
, icepeakMetricsDataWrittenTotal :: Counter
, icepeakMetricsJournalWritten :: Counter
, icepeakMetricsSubscriberCount :: Gauge
}
createAndRegisterIcepeakMetrics :: IO IcepeakMetrics
createAndRegisterIcepeakMetrics = IcepeakMetrics
<$> register (vector ("method", "status") requestCounter)
-- TODO: the following line can be removed after dashboard has been updated to use icepeak_data_size_bytes
<*> register (gauge (Info "icepeak_data_size" "Size of data file in bytes."))
<*> register (gauge (Info "icepeak_data_size_bytes" "Size of data file in bytes."))
<*> register (gauge (Info "icepeak_journal_size_bytes"
"Size of journal file in bytes."))
-- TODO: the following line can be removed after dashboard has been updated to use icepeak_data_size_bytes
<*> register (counter (Info "icepeak_data_written" "Total number of bytes written so far."))
<*> register (counter (Info "icepeak_data_written_bytes_total" "Total number of bytes written so far."))
<*> register (counter (Info "icepeak_journal_written_bytes_total"
"Total number of bytes written to the journal so far."))
<*> register (gauge
(Info "icepeak_subscriber_count" "Number of websocket subscriber connections."))
where
requestCounter = counter (Info "icepeak_http_requests"
"Total number of HTTP requests since starting Icepeak.")
notifyRequest :: Http.Method -> Http.Status -> IcepeakMetrics -> IO ()
notifyRequest method status = countHttpRequest method status . icepeakMetricsRequestCounter
setDataSize :: (MonadMonitor m, Real a) => a -> IcepeakMetrics -> m ()
setDataSize val metrics = do
-- TODO: the following line can be removed after dashboard has been updated to use icepeak_data_size_bytes
setGauge (icepeakMetricsDataSize metrics) (realToFrac val)
setGauge (icepeakMetricsDataSizeBytes metrics) (realToFrac val)
setJournalSize :: (MonadMonitor m, Real a) => a -> IcepeakMetrics -> m ()
setJournalSize val metrics = setGauge (icepeakMetricsJournalSize metrics) (realToFrac val)
-- | Increment the total data written to disk by the given number of bytes.
-- Returns True, when it actually increased the counter and otherwise False.
incrementDataWritten :: (MonadMonitor m, Real a) => a -> IcepeakMetrics -> m Bool
incrementDataWritten num_bytes metrics = do
-- Ignore the result to silence linter.
-- TODO: the following line can be removed after dashboard has been updated to use icepeak_data_size_bytes
_ <- addCounter (icepeakMetricsDataWritten metrics) (realToFrac num_bytes)
addCounter (icepeakMetricsDataWrittenTotal metrics) (realToFrac num_bytes)
-- | Increment the data written to the journal by the given number of bytes.
-- Returns True, when it actually increased the counter and otherwise False.
incrementJournalWritten :: (MonadMonitor m, Real a) => a -> IcepeakMetrics -> m Bool
incrementJournalWritten num_bytes metrics = addCounter (icepeakMetricsJournalWritten metrics)
(realToFrac num_bytes)
incrementSubscribers :: MonadMonitor m => IcepeakMetrics -> m ()
incrementSubscribers = incGauge . icepeakMetricsSubscriberCount
decrementSubscribers :: MonadMonitor m => IcepeakMetrics -> m ()
decrementSubscribers = decGauge . icepeakMetricsSubscriberCount
| channable/icepeak | server/src/Metrics.hs | bsd-3-clause | 4,693 | 0 | 16 | 798 | 884 | 472 | 412 | 59 | 1 |
module Main where
import Codec.Encryption.Historical.XOR.Implementation
import System.Environment (getArgs)
main :: IO ()
main = getArgs >>= encoder
encoder :: [String] -> IO ()
encoder [key] = do
plain <- getContents
let cypher = xor_decode key plain
putStrLn cypher
encoder _ = putStrLn "Usage: cat foo.txt | encode_xor <key>"
| beni55/Historical-Cryptography | Codec/Encryption/Historical/XOR/CommandXOR.hs | mit | 344 | 0 | 10 | 63 | 107 | 56 | 51 | 11 | 1 |
{-# OPTIONS_GHC -F -pgmF hspec-discover #-}
module Main where
| aar-lang/aar | test/Spec.hs | gpl-3.0 | 63 | 0 | 2 | 10 | 5 | 4 | 1 | 2 | 0 |
module B
where
bString :: String
bString = "another string"
| 23Skidoo/ghc-parmake | tests/data/executable/B.hs | bsd-3-clause | 68 | 0 | 4 | 18 | 14 | 9 | 5 | 3 | 1 |
-- Copyright (c) 1998-1999 Chris Okasaki.
-- See COPYRIGHT file for terms and conditions.
module Sequence
{-# DEPRECATED "This module is unmaintained, and will disappear soon" #-}
(
-- class definition + method wrappers
module Sequence,
-- re-export view type from EdisonPrelude for convenience
Maybe2(..)
) where
import Prelude hiding (concat,reverse,map,concatMap,foldr,foldl,foldr1,foldl1,
filter,takeWhile,dropWhile,lookup,take,drop,splitAt,
zip,zip3,zipWith,zipWith3,unzip,unzip3,null)
import Monad
import EdisonPrelude(Maybe2(..))
-- naming convention: instances of Sequence are named Seq whenever possible
class (Functor s, MonadPlus s) => Sequence s where
-- in addition to Functor, Monad, and MonadPlus,
-- sequences should also be instances of Eq and Show
----------------------------------------------------------------------
-- Constructors
empty :: s a
single :: a -> s a
-- empty = <>
-- single x = <x>
cons :: a -> s a -> s a
snoc :: s a -> a -> s a
append :: s a -> s a -> s a
-- cons x <x0,...,xn-1> = <x,x0,...,xn-1>
-- snoc <x0,...,xn-1> x = <x0,...,xn-1,x>
-- append <x0,...,xn-1> <y0,...,ym-1> = <x0,...,xn-1,y0,...,ym-1>
fromList :: [a] -> s a
-- fromList [x0,...,xn-1] = <x0,...,xn-1>
-- initialize a sequence
copy :: Int -> a -> s a -- returns empty if size is negative
tabulate :: Int -> (Int -> a) -> s a -- returns empty if size is negative
-- copy n x = <x,...,x> -- n copies
-- tabulate f n = <f 0,...,f n-1>
----------------------------------------------------------------------
-- Destructors
-- view the left element
lview :: s a -> Maybe2 a (s a)
lhead :: s a -> a -- signals error if sequence is empty
ltail :: s a -> s a -- returns empty if sequence is empty
-- lview <x0,...,xn-1> | n==0 = Nothing2
-- | n>0 = Just2 x0 <x1,...,xn-1>
-- lhead <x0,...,xn-1> | n==0 = error "ModuleName.lhead: empty sequence"
-- | n>0 = x0
-- ltail <x0,...,xn-1> | n==0 = <>
-- | n>0 = <x1,...,xn-1>
-- view the right element
rview :: s a -> Maybe2 (s a) a
rhead :: s a -> a -- signals error if sequence is empty
rtail :: s a -> s a -- returns empty if sequence is empty
-- rview <x0,...,xn-1> | n==0 = Nothing2
-- | n>0 = Just2 <x0,...,xn-2> xn-1
-- rhead <x0,...,xn-1> | n==0 = error "ModuleName.rhead: empty sequence"
-- | n>0 = xn-1
-- rtail <x0,...,xn-1> | n==0 = <>
-- | n>0 = <x0,...,xn-2>
----------------------------------------------------------------------
-- Observers
null :: s a -> Bool
size :: s a -> Int
-- null <x0,...,xn-1> = (n==0)
-- size <x0,...,xn-1> = n
toList :: s a -> [a]
-- toList <x0,...,xn-1> = [x0,...,xn-1]
----------------------------------------------------------------------
-- Concat and revers
-- flattening a sequence
concat :: s (s a) -> s a
-- concat xss = foldr append empty xss
-- reversing a sequence
reverse :: s a -> s a
reverseOnto :: s a -> s a -> s a
-- reverse <x0,...,xn-1> = <xn-1,...,x0>
-- reverseOnto <x0,...,xn-1> <y0,...,ym-1> = <xn-1,...,x0,y0,...,ym-1>
----------------------------------------------------------------------
-- Maps and folds
map :: (a -> b) -> s a -> s b
concatMap :: (a -> s b) -> s a -> s b
-- map f <x0,...,xn-1> = <f x0,...,f xn-1>
-- concatMap f xs = concat (map f xs)
foldr :: (a -> b -> b) -> b -> s a -> b
foldl :: (b -> a -> b) -> b -> s a -> b
-- foldr (+) c <x0,...,xn-1> = x0 + (x1 + ... + (xn-1 + c))
-- foldl (+) c <x0,...,xn-1> = ((c + x0) + x1) + ... + xn-1
foldr1 :: (a -> a -> a) -> s a -> a -- signals error if sequence is empty
foldl1 :: (a -> a -> a) -> s a -> a -- signals error if sequence is empty
-- foldr1 (+) <x0,...,xn-1>
-- | n==0 = error "ModuleName.foldr1: empty sequence"
-- | n>0 = x0 + (x1 + ... + xn-1)
-- foldl1 (+) <x0,...,xn-1>
-- | n==0 = error "ModuleName.foldl1: empty sequence"
-- | n>0 = (x0 + x1) + ... + xn-1
reducer :: (a -> a -> a) -> a -> s a -> a
reducel :: (a -> a -> a) -> a -> s a -> a
reduce1 :: (a -> a -> a) -> s a -> a -- signals error if sequence is empty
-- reduce is similar to fold, but combines elements in a balanced fashion
-- the combining function should usually be associative
--
-- reducer (+) x xs = reduce1 (+) (cons x xs)
-- reducel (+) x xs = reduce1 (+) (snoc xs x)
--
-- reduce1 (+) <x> = x
-- reduce1 (+) <x0,...,xn-1> =
-- (reduce1 (+) <x0,...,xi>) + (reduce1 (+) <xi+1,...,xn-1>)
-- for some i such that 0 <= i && i < n-1
--
-- Although the exact value of i is unspecified it tends toward n/2
-- so that the depth of calls to + is at most logarithmic
----------------------------------------------------------------------
-- Subsequences
take :: Int -> s a -> s a
drop :: Int -> s a -> s a
splitAt :: Int -> s a -> (s a, s a)
-- take i xs = fst (splitAt i xs)
-- drop i xs = snd (splitAt i xs)
--
-- splitAt i xs
-- | i < 0 = (<> , <x0,...,xn-1>)
-- | i < n = (<x0,...,xi-1>, <xi,...,xn-1>)
-- | i >= n = (<x0,...,xn-1>, <> )
subseq :: Int -> Int -> s a -> s a
-- args are index/length rather than start index/end index
--
-- subseq i len xs = take len (drop i xs)
----------------------------------------------------------------------
-- Predicate-based operations
filter :: (a -> Bool) -> s a -> s a
partition :: (a -> Bool) -> s a -> (s a, s a)
-- filter p xs = foldr pcons empty xs
-- where pcons x xs = if p x then cons x xs else xs
--
-- partition p xs = (filter p xs, filter (not . p) xs)
takeWhile :: (a -> Bool) -> s a -> s a
dropWhile :: (a -> Bool) -> s a -> s a
splitWhile :: (a -> Bool) -> s a -> (s a, s a)
-- takeWhile p xs = fst (splitWhile p xs)
-- dropWhile p xs = snd (splitWhile p xs)
--
-- splitWhile p <x0,...,xn-1> = (<x0,...,xi-1>, <xi,...,xn-1>)
-- where i = min j such that p xj (or n if no such j)
----------------------------------------------------------------------
-- Index-based operations (zero-based)
inBounds :: s a -> Int -> Bool
-- inBounds <x0,...,xn-1> i = (0 <= i && i < n)
lookup :: s a -> Int -> a -- signals error if index out of bounds
lookupM :: s a -> Int -> Maybe a
lookupWithDefault :: a -> s a -> Int -> a
-- lookup xs@<x0,...,xn-1> i
-- | inBounds xs = xi
-- | otherwise = error "ModuleName.lookup: index out of bounds"
-- lookupM xs@<x0,...,xn-1> i
-- | inBounds xs = Just xi
-- | otherwise = Nothing
-- lookupWithDefault d xs@<x0,...,xn-1> i
-- | inBounds xs = xi
-- | otherwise = d
update :: Int -> a -> s a -> s a
adjust :: (a -> a) -> Int -> s a -> s a -- map a single element
-- both return original sequence if index out of bounds
--
-- update i y xs@<x0,...,xn-1>
-- | inBounds xs = <x0,...xi-1,y,xi+1,...,xn-1>
-- | otherwise = xs
-- adjust f i xs@<x0,...,xn-1>
-- | inBounds xs = <x0,...xi-1,f xi,xi+1,...,xn-1>
-- | otherwise = xs
mapWithIndex :: (Int -> a -> b) -> s a -> s b
foldrWithIndex :: (Int -> a -> b -> b) -> b -> s a -> b
foldlWithIndex :: (b -> Int -> a -> b) -> b -> s a -> b
-- mapWithIndex f <x0,...,xn-1> = <f 0 x0,...,f (n-1) xn-1>
-- foldrWithIndex f c <x0,...,xn-1> =
-- f 0 x0 (f 1 x1 (... (f (n-1) xn-1 c)))
-- foldlWithIndex f c <x0,...,xn-1> =
-- f (...(f (f c 0 x0) 1 x1)...) (n-1) xn-1)
----------------------------------------------------------------------
-- Zips and unzips
zip :: s a -> s b -> s (a,b)
zip3 :: s a -> s b -> s c -> s (a,b,c)
-- zip <x0,...,xn-1> <y0,...,ym-1> = <(x0,y0),...,(xj-1,yj-1)>
-- where j = min {n,m}
-- zip3 <x0,...,xn-1> <y0,...,ym-1> <z0,...,zk-1> =
-- <(x0,y0,z0),...,(xj-1,yj-1,zj-1)>
-- where j = min {n,m,k}
zipWith :: (a -> b -> c) -> s a -> s b -> s c
zipWith3 :: (a -> b -> c -> d) -> s a -> s b -> s c -> s d
-- zipWith f xs ys = map (uncurry f) (zip xs ys)
-- zipWith3 f xs ys zs = map (uncurry f) (zip3 xs ys zs)
unzip :: s (a,b) -> (s a, s b)
unzip3 :: s (a,b,c) -> (s a, s b, s c)
-- unzip xs = (map fst xs, map snd xs)
-- unzip3 xs = (map fst3 xs, map snd3 xs, map thd3 xs)
-- where fst3 (x,y,z) = x
-- snd3 (x,y,z) = y
-- thd3 (x,y,z) = z
unzipWith :: (a -> b) -> (a -> c) -> s a -> (s b, s c)
unzipWith3 :: (a -> b) -> (a -> c) -> (a -> d) -> s a -> (s b, s c, s d)
-- unzipWith f g xs = (map f xs, map g xs)
-- unzipWith3 f g h xs = (map f xs, map g xs, map h xs)
----------------------------------------------------------------------
-- Documentation
instanceName :: s a -> String
-- The name of the module implementing s.
----------------------------------------------------------------------
-- Other possible operations not currently included
{-
insertAt :: Int -> a -> s a -> s a
-- adds to front or rear if index out of bounds
--
-- insertAt i y xs@<x0,...,xn-1>
-- | i < 0 = cons y xs
-- | i >= n = snoc xs y
-- | otherwise = <x0,...,xi-1,y,xi,...,xn-1>
deleteAt :: Int -> s a -> s a
-- returns original sequence if index out of bounds
--
-- deleteAt i xs@<x0,...,xn-1>
-- | i < 0 = xs
-- | i >= n = xs
-- | otherwise = <x0,...,xi-1,xi+1,...,xn-1>
insertAt i x s = append before (cons x after)
where (before, after) = splitAt i s
deleteAt i s = if i < 0 then s else append before (ltail after)
where (before, after) = splitAt i s
-}
| alekar/hugs | fptools/hslibs/data/edison/Seq/Sequence.hs | bsd-3-clause | 10,061 | 2 | 12 | 2,979 | 1,676 | 934 | 742 | 67 | 0 |
{-# LANGUAGE RankNTypes #-}
module Lets.OpticPolyLens (
Lens(..)
, getsetLaw
, setgetLaw
, setsetLaw
, get
, set
, modify
, (%~)
, fmodify
, (|=)
, fstL
, sndL
, mapL
, setL
, compose
, (|.)
, identity
, product
, (***)
, choice
, (|||)
, cityL
, countryL
, streetL
, suburbL
, localityL
, ageL
, nameL
, addressL
, intAndIntL
, intAndL
, getSuburb
, setStreet
, getAgeAndCountry
, setCityAndLocality
, getSuburbOrCity
, setStreetOrState
, modifyCityUppercase
, modifyIntandLengthEven
) where
import Data.Char(toUpper)
import Data.Map(Map)
import qualified Data.Map as Map(insert, delete, lookup)
import Data.Set(Set)
import qualified Data.Set as Set(insert, delete, member)
import Lets.Data(AlongsideLeft(AlongsideLeft, getAlongsideLeft), AlongsideRight(AlongsideRight, getAlongsideRight), Identity(Identity, getIdentity), Const(Const, getConst), IntAnd(IntAnd), Person(Person), Locality(Locality), Address(Address), bool)
import Prelude hiding (product)
-- $setup
-- >>> import qualified Data.Map as Map(fromList)
-- >>> import qualified Data.Set as Set(fromList)
-- >>> import Data.Char(ord)
-- >>> import Lets.Data
data Lens s t a b =
Lens
(forall f. Functor f => (a -> f b) -> s -> f t)
get ::
Lens s t a b
-> s
-> a
get (Lens r) =
getConst . r Const
set ::
Lens s t a b
-> s
-> b
-> t
set (Lens r) a b =
getIdentity (r (const (Identity b)) a)
-- | The get/set law of lenses. This function should always return @True@.
getsetLaw ::
Eq s =>
Lens s s a a
-> s
-> Bool
getsetLaw l =
\a -> set l a (get l a) == a
-- | The set/get law of lenses. This function should always return @True@.
setgetLaw ::
Eq a =>
Lens s s a a
-> s
-> a
-> Bool
setgetLaw l a b =
get l (set l a b) == b
-- | The set/set law of lenses. This function should always return @True@.
setsetLaw ::
Eq s =>
Lens s s a b
-> s
-> b
-> b
-> Bool
setsetLaw l a b1 b2 =
set l (set l a b1) b2 == set l a b2
----
-- |
--
-- >>> modify fstL (+1) (0 :: Int, "abc")
-- (1,"abc")
--
-- >>> modify sndL (+1) ("abc", 0 :: Int)
-- ("abc",1)
--
-- prop> let types = (x :: Int, y :: String) in modify fstL id (x, y) == (x, y)
--
-- prop> let types = (x :: Int, y :: String) in modify sndL id (x, y) == (x, y)
modify ::
Lens s t a b
-> (a -> b)
-> s
-> t
modify =
error "todo: modify"
-- | An alias for @modify@.
(%~) ::
Lens s t a b
-> (a -> b)
-> s
-> t
(%~) =
modify
infixr 4 %~
-- |
--
-- >>> fstL .~ 1 $ (0 :: Int, "abc")
-- (1,"abc")
--
-- >>> sndL .~ 1 $ ("abc", 0 :: Int)
-- ("abc",1)
--
-- prop> let types = (x :: Int, y :: String) in set fstL (x, y) z == (fstL .~ z $ (x, y))
--
-- prop> let types = (x :: Int, y :: String) in set sndL (x, y) z == (sndL .~ z $ (x, y))
(.~) ::
Lens s t a b
-> b
-> s
-> t
(.~) =
error "todo: (.~)"
infixl 5 .~
-- |
--
-- >>> fmodify fstL (+) (5 :: Int, "abc") 8
-- (13,"abc")
--
-- >>> fmodify fstL (\n -> bool Nothing (Just (n * 2)) (even n)) (10, "abc")
-- Just (20,"abc")
--
-- >>> fmodify fstL (\n -> bool Nothing (Just (n * 2)) (even n)) (11, "abc")
-- Nothing
fmodify ::
Functor f =>
Lens s t a b
-> (a -> f b)
-> s
-> f t
fmodify =
error "todo: fmodify"
-- |
--
-- >>> fstL |= Just 3 $ (7, "abc")
-- Just (3,"abc")
--
-- >>> (fstL |= (+1) $ (3, "abc")) 17
-- (18,"abc")
(|=) ::
Functor f =>
Lens s t a b
-> f b
-> s
-> f t
(|=) =
error "todo: (|=)"
infixl 5 |=
-- |
--
-- >>> modify fstL (*10) (3, "abc")
-- (30,"abc")
--
-- prop> let types = (x :: Int, y :: String) in getsetLaw fstL (x, y)
--
-- prop> let types = (x :: Int, y :: String) in setgetLaw fstL (x, y) z
--
-- prop> let types = (x :: Int, y :: String) in setsetLaw fstL (x, y) z
fstL ::
Lens (a, x) (b, x) a b
fstL =
error "todo: fstL"
-- |
--
-- >>> modify sndL (++ "def") (13, "abc")
-- (13,"abcdef")
--
-- prop> let types = (x :: Int, y :: String) in getsetLaw sndL (x, y)
--
-- prop> let types = (x :: Int, y :: String) in setgetLaw sndL (x, y) z
--
-- prop> let types = (x :: Int, y :: String) in setsetLaw sndL (x, y) z
sndL ::
Lens (x, a) (x, b) a b
sndL =
error "todo: sndL"
-- |
--
-- >>> get (mapL 3) (Map.fromList (map (\c -> (ord c - 96, c)) ['a'..'d']))
-- Just 'c'
--
-- >>> get (mapL 33) (Map.fromList (map (\c -> (ord c - 96, c)) ['a'..'d']))
-- Nothing
--
-- >>> set (mapL 3) (Map.fromList (map (\c -> (ord c - 96, c)) ['a'..'d'])) (Just 'X')
-- fromList [(1,'a'),(2,'b'),(3,'X'),(4,'d')]
--
-- >>> set (mapL 33) (Map.fromList (map (\c -> (ord c - 96, c)) ['a'..'d'])) (Just 'X')
-- fromList [(1,'a'),(2,'b'),(3,'c'),(4,'d'),(33,'X')]
--
-- >>> set (mapL 3) (Map.fromList (map (\c -> (ord c - 96, c)) ['a'..'d'])) Nothing
-- fromList [(1,'a'),(2,'b'),(4,'d')]
--
-- >>> set (mapL 33) (Map.fromList (map (\c -> (ord c - 96, c)) ['a'..'d'])) Nothing
-- fromList [(1,'a'),(2,'b'),(3,'c'),(4,'d')]
mapL ::
Ord k =>
k
-> Lens (Map k v) (Map k v) (Maybe v) (Maybe v)
mapL =
error "todo: mapL"
-- |
--
-- >>> get (setL 3) (Set.fromList [1..5])
-- True
--
-- >>> get (setL 33) (Set.fromList [1..5])
-- False
--
-- >>> set (setL 3) (Set.fromList [1..5]) True
-- fromList [1,2,3,4,5]
--
-- >>> set (setL 3) (Set.fromList [1..5]) False
-- fromList [1,2,4,5]
--
-- >>> set (setL 33) (Set.fromList [1..5]) True
-- fromList [1,2,3,4,5,33]
--
-- >>> set (setL 33) (Set.fromList [1..5]) False
-- fromList [1,2,3,4,5]
setL ::
Ord k =>
k
-> Lens (Set k) (Set k) Bool Bool
setL =
error "todo: setL"
-- |
--
-- >>> get (compose fstL sndL) ("abc", (7, "def"))
-- 7
--
-- >>> set (compose fstL sndL) ("abc", (7, "def")) 8
-- ("abc",(8,"def"))
compose ::
Lens s t a b
-> Lens q r s t
-> Lens q r a b
compose =
error "todo: compose"
-- | An alias for @compose@.
(|.) ::
Lens s t a b
-> Lens q r s t
-> Lens q r a b
(|.) =
compose
infixr 9 |.
-- |
--
-- >>> get identity 3
-- 3
--
-- >>> set identity 3 4
-- 4
identity ::
Lens a b a b
identity =
error "todo: identity"
-- |
--
-- >>> get (product fstL sndL) (("abc", 3), (4, "def"))
-- ("abc","def")
--
-- >>> set (product fstL sndL) (("abc", 3), (4, "def")) ("ghi", "jkl")
-- (("ghi",3),(4,"jkl"))
product ::
Lens s t a b
-> Lens q r c d
-> Lens (s, q) (t, r) (a, c) (b, d)
product =
error "todo: product"
-- | An alias for @product@.
(***) ::
Lens s t a b
-> Lens q r c d
-> Lens (s, q) (t, r) (a, c) (b, d)
(***) =
product
infixr 3 ***
-- |
--
-- >>> get (choice fstL sndL) (Left ("abc", 7))
-- "abc"
--
-- >>> get (choice fstL sndL) (Right ("abc", 7))
-- 7
--
-- >>> set (choice fstL sndL) (Left ("abc", 7)) "def"
-- Left ("def",7)
--
-- >>> set (choice fstL sndL) (Right ("abc", 7)) 8
-- Right ("abc",8)
choice ::
Lens s t a b
-> Lens q r a b
-> Lens (Either s q) (Either t r) a b
choice =
error "todo: choice"
-- | An alias for @choice@.
(|||) ::
Lens s t a b
-> Lens q r a b
-> Lens (Either s q) (Either t r) a b
(|||) =
choice
infixr 2 |||
----
type Lens' a b =
Lens a a b b
cityL ::
Lens' Locality String
cityL =
Lens
(\p (Locality c t y) -> fmap (\c' -> Locality c' t y) (p c))
stateL ::
Lens' Locality String
stateL =
Lens
(\p (Locality c t y) -> fmap (\t' -> Locality c t' y) (p t))
countryL ::
Lens' Locality String
countryL =
Lens
(\p (Locality c t y) -> fmap (\y' -> Locality c t y') (p y))
streetL ::
Lens' Address String
streetL =
Lens
(\p (Address t s l) -> fmap (\t' -> Address t' s l) (p t))
suburbL ::
Lens' Address String
suburbL =
Lens
(\p (Address t s l) -> fmap (\s' -> Address t s' l) (p s))
localityL ::
Lens' Address Locality
localityL =
Lens
(\p (Address t s l) -> fmap (\l' -> Address t s l') (p l))
ageL ::
Lens' Person Int
ageL =
Lens
(\p (Person a n d) -> fmap (\a' -> Person a' n d) (p a))
nameL ::
Lens' Person String
nameL =
Lens
(\p (Person a n d) -> fmap (\n' -> Person a n' d) (p n))
addressL ::
Lens' Person Address
addressL =
Lens
(\p (Person a n d) -> fmap (\d' -> Person a n d') (p d))
intAndIntL ::
Lens' (IntAnd a) Int
intAndIntL =
Lens
(\p (IntAnd n a) -> fmap (\n' -> IntAnd n' a) (p n))
-- lens for polymorphic update
intAndL ::
Lens (IntAnd a) (IntAnd b) a b
intAndL =
Lens
(\p (IntAnd n a) -> fmap (\a' -> IntAnd n a') (p a))
-- |
--
-- >>> getSuburb fred
-- "Fredville"
--
-- >>> getSuburb mary
-- "Maryland"
getSuburb ::
Person
-> String
getSuburb =
error "todo: getSuburb"
-- |
--
-- >>> setStreet fred "Some Other St"
-- Person 24 "Fred" (Address "Some Other St" "Fredville" (Locality "Fredmania" "New South Fred" "Fredalia"))
--
-- >>> setStreet mary "Some Other St"
-- Person 28 "Mary" (Address "Some Other St" "Maryland" (Locality "Mary Mary" "Western Mary" "Maristan"))
setStreet ::
Person
-> String
-> Person
setStreet =
error "todo: setStreet"
-- |
--
-- >>> getAgeAndCountry (fred, maryLocality)
-- (24,"Maristan")
--
-- >>> getAgeAndCountry (mary, fredLocality)
-- (28,"Fredalia")
getAgeAndCountry ::
(Person, Locality)
-> (Int, String)
getAgeAndCountry =
error "todo: getAgeAndCountry"
-- |
--
-- >>> setCityAndLocality (fred, maryAddress) ("Some Other City", fredLocality)
-- (Person 24 "Fred" (Address "15 Fred St" "Fredville" (Locality "Some Other City" "New South Fred" "Fredalia")),Address "83 Mary Ln" "Maryland" (Locality "Fredmania" "New South Fred" "Fredalia"))
--
-- >>> setCityAndLocality (mary, fredAddress) ("Some Other City", maryLocality)
-- (Person 28 "Mary" (Address "83 Mary Ln" "Maryland" (Locality "Some Other City" "Western Mary" "Maristan")),Address "15 Fred St" "Fredville" (Locality "Mary Mary" "Western Mary" "Maristan"))
setCityAndLocality ::
(Person, Address) -> (String, Locality) -> (Person, Address)
setCityAndLocality =
error "todo: setCityAndLocality"
-- |
--
-- >>> getSuburbOrCity (Left maryAddress)
-- "Maryland"
--
-- >>> getSuburbOrCity (Right fredLocality)
-- "Fredmania"
getSuburbOrCity ::
Either Address Locality
-> String
getSuburbOrCity =
get (suburbL ||| cityL)
-- |
--
-- >>> setStreetOrState (Right maryLocality) "Some Other State"
-- Right (Locality "Mary Mary" "Some Other State" "Maristan")
--
-- >>> setStreetOrState (Left fred) "Some Other St"
-- Left (Person 24 "Fred" (Address "Some Other St" "Fredville" (Locality "Fredmania" "New South Fred" "Fredalia")))
setStreetOrState ::
Either Person Locality
-> String
-> Either Person Locality
setStreetOrState =
set (streetL |. addressL ||| stateL)
-- |
--
-- >>> modifyCityUppercase fred
-- Person 24 "Fred" (Address "15 Fred St" "Fredville" (Locality "FREDMANIA" "New South Fred" "Fredalia"))
--
-- >>> modifyCityUppercase mary
-- Person 28 "Mary" (Address "83 Mary Ln" "Maryland" (Locality "MARY MARY" "Western Mary" "Maristan"))
modifyCityUppercase ::
Person
-> Person
modifyCityUppercase =
cityL |. localityL |. addressL %~ map toUpper
-- |
--
-- >>> modify intAndL (even . length) (IntAnd 10 "abc")
-- IntAnd 10 False
--
-- >>> modify intAndL (even . length) (IntAnd 10 "abcd")
-- IntAnd 10 True
modifyIntandLengthEven ::
IntAnd [a]
-> IntAnd Bool
modifyIntandLengthEven =
intAndL %~ even . length
| bitemyapp/lets-lens | src/Lets/OpticPolyLens.hs | bsd-3-clause | 11,062 | 0 | 13 | 2,471 | 2,651 | 1,522 | 1,129 | 289 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
--------------------------------------------------------------------
-- |
-- Module : Network.Curl.Easy
-- Copyright : (c) Galois Inc 2007-2009
-- License :
--
-- Maintainer: Sigbjorn Finne <[email protected]>
-- Stability : provisional
-- Portability: portable
--
-- Haskell binding to the libcurl <http://curl.haxx.se/> \"easy\" API.
-- The \"easy\" API provides a higher-level, easy-to-get-started calling
-- interface to the library's wide range of features for interacting
-- with HTTP\/FTP\/etc servers.
--
--------------------------------------------------------------------
module Network.Curl.Easy
( initialize -- :: IO Curl
, perform -- :: Curl -> IO CurlCode
, setopt -- :: Curl -> CurlOption -> IO CurlCode
, duphandle -- :: Curl -> IO Curl
, reset -- :: Curl -> IO ()
, curl_global_init -- :: CInt -> IO CurlCode
, curl_global_cleanup -- :: IO ()
, curl_version_number -- :: IO Int
, curl_version_string -- :: IO String
) where
import Network.Curl.Types
import Network.Curl.Opts
import Network.Curl.Code
import Network.Curl.Post
import Network.Curl.Debug
import Data.IORef(IORef)
import Foreign.Ptr
import Foreign.Marshal.Alloc(free)
import Foreign.C.Types
import Foreign.C.String
import Control.Monad
import Data.Maybe
-- | Initialise a curl instance
initialize :: IO Curl
initialize = do
h <- easy_initialize
mkCurl h
-- XXX: Is running cleanup here OK?
reset :: Curl -> IO ()
reset hh = curlPrim hh $ \r h -> easy_reset h >> runCleanup r
duphandle :: Curl -> IO Curl
duphandle hh = curlPrim hh $ \r h ->
do h1 <- easy_duphandle h
cleanup <- shareCleanup r
mkCurlWithCleanup h1 cleanup
setopt :: Curl
-> CurlOption
-> IO CurlCode
setopt hh o = curlPrim hh $ \ r h -> unmarshallOption (easy_um r h) o
where
easy_um :: IORef OptionMap -> CurlH -> Unmarshaller CurlCode
easy_um r h =
Unmarshaller
{ u_long -- :: Int -> Long -> IO CurlCode
= \ i x -> liftM toCode $ easy_setopt_long h i x
, u_llong -- :: Int -> LLong -> IO CurlCode
= \ i x -> liftM toCode $ easy_setopt_llong h i x
, u_string -- :: Int -> String -> IO CurlCode
= \ i x -> do debug $ "ALLOC: " ++ x
c_x <- newCString x
updateCleanup r i $ debug ("FREE: "++ x) >> free c_x
liftM toCode $ easy_setopt_string h i c_x
, u_strings -- :: Int -> [String] -> IO CurlCode
= \ i x ->
do debug ("ALLOC: " ++ show x)
-- curl_slist_append will copy its string argument
let addOne ip s = withCString s $ curl_slist_append ip
ip <- foldM addOne nullPtr x
updateCleanup r i $
debug ("FREE: " ++ show x) >> curl_slist_free ip
liftM toCode $ easy_setopt_string h i (castPtr ip)
, u_ptr -- :: Int -> Ptr () -> IO a
= \ i x -> liftM toCode $ easy_setopt_ptr h i x
, u_writeFun -- :: Int -> WriteFunction -> IO a
= \ i x -> do
debug "ALLOC: WRITER"
fp <- mkWriter x
updateCleanup r i $ debug "FREE: WRITER" >> freeHaskellFunPtr fp
liftM toCode $ easy_setopt_wfun h i fp
, u_readFun -- :: Int -> ReadFunction -> IO a
= \ i x -> do
let wrapResult f a b c d = do
mb <- f a b c d
return (fromMaybe curl_readfunc_abort mb)
debug "ALLOC: READER"
fp <- mkReader (wrapResult x)
updateCleanup r i $ debug "FREE: READER" >> freeHaskellFunPtr fp
liftM toCode $ easy_setopt_rfun h i fp
, u_progressFun -- :: Int -> ProgressFunction -> IO a
= \ i x -> do
debug "ALLOC: PROGRESS"
fp <- mkProgress x
updateCleanup r i $ debug "FREE: PROGRESS" >> freeHaskellFunPtr fp
liftM toCode $ easy_setopt_fptr h i fp
, u_debugFun -- :: Int -> DebugFunction -> IO a
= \ i debFun -> do
let wrapFun fun _a b c d e =
fun hh (toEnum (fromIntegral b)) c d e >> return 0
debug "ALLOC: DEBUG"
fp <- mkDebugFun (wrapFun debFun)
updateCleanup r i $ debug "FREE: DEBUG" >> freeHaskellFunPtr fp
liftM toCode $ easy_setopt_fptr h i fp
, u_posts -- :: Int -> [HttpPost] -> IO a
= \ i x -> do
debug "ALLOC: POSTS"
p <- marshallPosts x
updateCleanup r i $ debug "FREE: POSTS" >> curl_formfree p
liftM toCode $ easy_setopt_ptr h i p
, u_sslctxt -- :: Int -> SSLCtxtFunction -> IO a
= \ i x -> do
debug "ALLOC: SSL_FUN"
p <- mkSslCtxtFun x
updateCleanup r i $ debug "FREE: SSL_FUN" >> freeHaskellFunPtr p
liftM toCode $ easy_setopt_fptr h i p
, u_ioctl_fun -- :: Int -> Ptr () -> IO a
= \ i x -> liftM toCode $ easy_setopt_ptr h i x
, u_convFromNetwork -- :: Int -> Ptr () -> IO a
= \ i x -> liftM toCode $ easy_setopt_ptr h i x
, u_convToNetwork -- :: Int -> Ptr () -> IO a
= \ i x -> liftM toCode $ easy_setopt_ptr h i x
, u_convFromUtf8 -- :: Int -> Ptr () -> IO a
= \ i x -> liftM toCode $ easy_setopt_ptr h i x
, u_sockoptFun -- :: Int -> Ptr () -> IO a
= \ i x -> liftM toCode $ easy_setopt_ptr h i x
}
perform :: Curl -> IO CurlCode
perform hh = liftM toCode $ curlPrim hh $ \_ h -> easy_perform_prim h
curl_global_init :: CInt -> IO CurlCode
curl_global_init v = liftM toCode $ curl_global_init_prim v
curl_version_number :: IO Int
curl_version_number = do
x <- curl_version_num
return (fromIntegral x)
curl_version_string :: IO String
curl_version_string = do
cs <- curl_version_str
peekCString cs
-- FFI decls
foreign import ccall
"curl_version_num" curl_version_num :: IO CInt
foreign import ccall
"curl_version_str" curl_version_str :: IO CString
foreign import ccall
"curl/easy.h curl_global_init" curl_global_init_prim :: CInt -> IO CInt
foreign import ccall
"curl/easy.h curl_global_cleanup" curl_global_cleanup :: IO ()
foreign import ccall
"curl/easy.h curl_easy_init" easy_initialize :: IO CurlH
foreign import ccall
"curl/easy.h curl_easy_perform" easy_perform_prim :: CurlH -> IO CInt
foreign import ccall
"curl_easy_duphandle" easy_duphandle :: CurlH -> IO CurlH
foreign import ccall
"curl_easy_reset" easy_reset :: CurlH -> IO ()
foreign import ccall
"curl_easy_setopt_long" easy_setopt_long :: CurlH -> Int -> Long -> IO CInt
foreign import ccall
"curl_easy_setopt_longlong" easy_setopt_llong :: CurlH -> Int -> LLong -> IO CInt
foreign import ccall
"curl_easy_setopt_string" easy_setopt_string :: CurlH -> Int -> Ptr CChar -> IO CInt
foreign import ccall
"curl_easy_setopt_ptr" easy_setopt_ptr :: CurlH -> Int -> Ptr a -> IO CInt
foreign import ccall
"curl_easy_setopt_ptr" easy_setopt_fptr :: CurlH -> Int -> FunPtr a -> IO CInt
foreign import ccall
"curl_easy_setopt_ptr" easy_setopt_wfun :: CurlH -> Int -> FunPtr WriteFunction -> IO CInt
foreign import ccall
"curl_easy_setopt_ptr" easy_setopt_rfun :: CurlH -> Int -> FunPtr ReadFunctionPrim -> IO CInt
foreign import ccall "wrapper"
mkWriter :: WriteFunction -> IO (FunPtr WriteFunction)
foreign import ccall "wrapper"
mkReader :: ReadFunctionPrim -> IO (FunPtr ReadFunctionPrim)
foreign import ccall "wrapper"
mkProgress :: ProgressFunction -> IO (FunPtr ProgressFunction)
foreign import ccall "wrapper"
mkDebugFun :: DebugFunctionPrim -> IO (FunPtr DebugFunctionPrim)
foreign import ccall "wrapper"
mkSslCtxtFun :: SSLCtxtFunction -> IO (FunPtr SSLCtxtFunction)
| GaloisInc/curl | Network/Curl/Easy.hs | bsd-3-clause | 7,789 | 0 | 20 | 2,140 | 1,921 | 964 | 957 | 163 | 1 |
{-|
Module : System.GPIO.Monad
Description : A monad for GPIO computations
Copyright : (c) 2019, Drew Hess
License : BSD3
Maintainer : Drew Hess <[email protected]>
Stability : experimental
Portability : non-portable
A monadic context for GPIO computations.
-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE Safe #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module System.GPIO.Monad
( -- * GPIO types
--
-- | For your convenience, the following types are
-- re-exported from the "System.GPIO.Types" module.
Pin(..)
, pinNumber
, PinInputMode(..)
, PinOutputMode(..)
, PinCapabilities(..)
, PinDirection(..)
, PinActiveLevel(..)
, PinValue(..)
, PinInterruptMode(..)
-- * Some convenient constraint synonyms for 'MonadGpio' signatures.
, MaskGpioM
, ThrowGpioM
-- * MonadGpio class
, MonadGpio(..)
, withPin
-- * Safer types
--
-- | If you can restrict your use of a particular pin to just
-- one mode of operation (input, interrupt-driven input, or
-- output), you can achieve better type-safety than is
-- possible with the fully-general 'Pin' type by using the
-- one of the following more limited types and its
-- corresponding actions.
--
-- == A caveat
--
-- On some GPIO platforms (e.g., Linux @sysfs@), no provision
-- is made for opening pins in "exclusive mode," and as such,
-- pins can be opened and configured by any number of
-- processes on the system other than our own programs.
-- Therefore, even when using these safer types, a robust
-- @hpio@ program should still be prepared to deal with
-- configuration-related errors in case another process
-- re-configures a pin while the @hpio@ program is using it.
--
-- In other words, even when using these safer types, you
-- should still be prepared to handle the full range of
-- 'System.GPIO.Types.SomeGpioException's.
, InputPin
, withInputPin
, readInputPin
, getInputPinInputMode
, getInputPinActiveLevel
, setInputPinActiveLevel
, toggleInputPinActiveLevel
, InterruptPin
, withInterruptPin
, readInterruptPin
, pollInterruptPin
, pollInterruptPinTimeout
, getInterruptPinInputMode
, getInterruptPinInterruptMode
, setInterruptPinInterruptMode
, getInterruptPinActiveLevel
, setInterruptPinActiveLevel
, toggleInterruptPinActiveLevel
, OutputPin
, withOutputPin
, writeOutputPin
, toggleOutputPin
, readOutputPin
, getOutputPinOutputMode
, getOutputPinActiveLevel
, setOutputPinActiveLevel
, toggleOutputPinActiveLevel
-- * The GPIO exception hierarchy
--
-- | Re-exported from "System.GPIO.Types".
, SomeGpioException(..)
, gpioExceptionToException
, gpioExceptionFromException
) where
import Protolude hiding (bracket)
import Control.Monad.Catch (MonadMask, MonadThrow, bracket)
import Control.Monad.Catch.Pure (CatchT)
import Control.Monad.Logger (LoggingT, NoLoggingT)
import Control.Monad.Trans.Cont (ContT)
import Control.Monad.Trans.Class (MonadTrans)
import Control.Monad.Trans.Identity (IdentityT)
import "transformers" Control.Monad.Trans.List (ListT)
import Control.Monad.Trans.Maybe (MaybeT)
import qualified Control.Monad.Trans.RWS.Lazy as LazyRWS (RWST)
import qualified Control.Monad.Trans.RWS.Strict as StrictRWS (RWST)
import qualified Control.Monad.Trans.State.Lazy as LazyState (StateT)
import qualified Control.Monad.Trans.State.Strict as StrictState (StateT)
import qualified Control.Monad.Trans.Writer.Lazy as LazyWriter (WriterT)
import qualified Control.Monad.Trans.Writer.Strict as StrictWriter (WriterT)
import System.GPIO.Types
(Pin(..), PinInputMode(..), PinOutputMode(..), PinCapabilities(..),
PinActiveLevel(..), PinDirection(..), PinInterruptMode(..),
PinValue(..), SomeGpioException(..), gpioExceptionToException,
gpioExceptionFromException, pinNumber)
-- | A monad type class for GPIO computations. The type class
-- specifies a DSL for writing portable GPIO programs, and instances
-- of the type class provide the interpreter needed to run these
-- programs on a particular GPIO platform.
--
-- In the type signature, 'h' represents a (platform-dependent)
-- abstract pin handle for operating on opened pins. It is analogous
-- to a file handle.
--
-- == Active-high versus active-low logic
--
-- The DSL supports both /active-high/ and /active-low/ logic. That
-- is, the /active level/ of a GPIO pin can be configured as
-- 'ActiveHigh' or 'ActiveLow'. If a pin's active level is
-- 'ActiveHigh', then for that pin, a 'PinValue' of 'High' corresponds
-- to a "high" physical signal level, and a 'PinValue' of 'Low'
-- corresponds to a "low" physical signal level. The converse is true
-- when the pin's active level is 'ActiveLow'.
--
-- Despite the potential confusion, the advantage of supporting
-- active-low logic is that you can, if you choose, write your program
-- in terms of "positive" logic (where 'High' always means "on" and
-- 'Low' always means "off"), and, with the same program, interface
-- with either positive (active-high) or negative (active-low) logic
-- simply by setting the pin's active level before running the
-- program.
--
-- In the documentation for this package, whenever you see a reference
-- to a "pin value" or "signal level," unless otherwise noted, we mean
-- the /logical/ value or level, not the /physical/ value or level;
-- that is, we mean the abstract notion of the pin being "on" or
-- "off," independent of the voltage level seen on the physical pin.
-- If the pin is configured as active-high, then the logical and
-- physical values are one and the same; if not, they are the inverse
-- of each other.
--
-- Note that the active-high/active-low setting is per-pin; each pin's
-- active level is independent of the others.
--
-- Not all platforms natively support active-low logic. On platforms
-- without native support, the platform interpreter will invert values
-- (both read and written) in software when a pin is configured as
-- active-low.
class Monad m => MonadGpio h m | m -> h where
-- | Get a list of available GPIO pins on the system.
--
-- This command makes a best-effort attempt to find the available
-- pins, but some systems may not make the complete list available at
-- runtime. Therefore, there may be more pins available than are
-- returned by this action.
pins :: m [Pin]
-- | Query the pin's capabilities.
pinCapabilities :: Pin -> m PinCapabilities
-- | Open a pin for use and return a handle to it.
--
-- Note that on some platforms (notably Linux), pin handles are
-- global resources and it is, strictly speaking, an error to
-- attempt to open a pin which has already been opened. However,
-- because there is generally no way to perform an atomic "only open
-- the pin if it hasn't already been opened" operation on such
-- platforms, this action will squash that particular error on those
-- platforms and return the global handle anyway, without making any
-- other state changes to the already-opened pin.
--
-- Keep in mind, however, that on these platforms where pin handles
-- are global resources, closing one pin handle will effectively
-- invalidate all other handles for the same pin. Be very careful to
-- coordinate the opening and closing of pins if you are operating
-- on the same pin in multiple threads.
openPin :: Pin -> m h
-- | Close the pin; i.e., indicate to the system that you no longer
-- intend to use the pin via the given handle.
--
-- Note that on some platforms (notably Linux), pin handles are
-- global resources and it is, strictly speaking, an error to
-- attempt to close a pin which has already been closed via another
-- handle to the same pin. However, this action will squash that
-- error on those platforms and will simply return without making
-- any changes to the GPIO environment.
--
-- Keep in mind, however, that on these platforms where pin handles
-- are global resources, opening multiple handles for the same pin
-- and then closing one of those handles will render all other
-- handles for the same pin invalid. Be very careful to coordinate
-- the opening and closing of pins if you are operating on the same
-- pin in multiple threads.
--
-- Note that there are also platforms (again, notably certain Linux
-- systems) where some pins are effectively always open and cannot
-- be closed. Invoking this action on such a pin will squash any
-- error that occurs when attempting to close the pin, and the
-- action will simply return without making any changes to the GPIO
-- environment.
closePin :: h -> m ()
-- | Get the pin's currently configured direction.
--
-- Note that there is no @setPinDirection@ action. You set the pin's
-- direction indirectly by setting its input mode or output mode via
-- 'setPinInputMode' and 'setPinOutputMode', respectively.
--
-- Rarely, a particular pin's direction may not be available in a
-- cross-platform way. In these cases, calling this action is an
-- error. In general, though, if the pin's capabilities indicate
-- that it supports at least one 'PinInputMode' or 'PinOutputMode',
-- it's safe to call this action.
getPinDirection :: h -> m PinDirection
-- | Get the pin's input mode.
--
-- If the pin is not currently configured for input, it's an error
-- to call this action.
getPinInputMode :: h -> m PinInputMode
-- | Set the pin's input mode. This action will also set the pin's
-- direction to 'In'.
--
-- It is an error to call this action if the given pin does not
-- support the given input mode.
setPinInputMode :: h -> PinInputMode -> m ()
-- | Get the pin's output mode.
--
-- If the pin is not currently configured for output, it's an error
-- to call this action.
getPinOutputMode :: h -> m PinOutputMode
-- | Set the pin's output mode and value. This action will also set
-- the pin's direction to 'Out'
--
-- If the pin is already in output mode and you only want to change
-- its value, use 'writePin'.
--
-- It is an error to call this action if the given pin does not
-- support the given output mode.
setPinOutputMode :: h -> PinOutputMode -> PinValue -> m ()
-- | Read the pin's value.
--
-- Note that this action never blocks.
readPin :: h -> m PinValue
-- | Block the current thread until an event occurs on the pin which
-- corresponds to the pin's current interrupt mode. Upon detection
-- of the event, return the pin's value.
--
-- If the pin does not support interrupts, then this action's
-- behavior is plaform-dependent.
--
-- It is an error to call this action when the pin is not configured
-- for input.
--
-- Note: due to its interaction with the threading system, this
-- action may behave differently across different implementations of
-- Haskell. It has only been tested with GHC. (On GHC, you should
-- compile any program that uses this action with the @-threaded@
-- option.)
pollPin :: h -> m PinValue
-- | Same as 'pollPin', except with a timeout, specified in
-- microseconds. If no event occurs before the timeout expires, this
-- action returns 'Nothing'; otherwise, it returns the pin's signal
-- level wrapped in a 'Just'.
--
-- If the timeout value is negative, this action behaves just like
-- 'pollPin'.
--
-- If the pin does not support interrupts, then this action's
-- behavior is platform-dependent.
--
-- It is an error to call this action when the pin is not configured
-- for input.
--
-- Note: due to its interaction with the threading system, this
-- action may behave differently across different implementations of
-- Haskell. It has only been tested with GHC. (On GHC, you should
-- compile any program that uses this action with the @-threaded@
-- option.)
pollPinTimeout :: h -> Int -> m (Maybe PinValue)
-- | Set the pin's output value.
--
-- It is an error to call this action when the pin is not configured
-- for output.
writePin :: h -> PinValue -> m ()
-- | Toggle the pin's output value and return the pin's new output
-- value.
--
-- It is an error to call this action when the pin is not configured
-- for output.
togglePin :: h -> m PinValue
-- | Get the pin's interrupt mode.
--
-- If the pin does not support interrupts, it is an error to call
-- this action.
--
-- (Note that 'RisingEdge' and 'FallingEdge' are relative to the
-- pin's active level; i.e., they refer to the pin's /logical/
-- signal edges, not its physical signal edges.)
getPinInterruptMode :: h -> m PinInterruptMode
-- | Set the pin's interrupt mode (only when the pin is configured
-- for input).
--
-- A pin's interrupt mode determines the behavior of the 'pollPin'
-- and 'pollPinTimeout' actions. Those actions will block the
-- current thread on an input pin until a particular event occurs on
-- that pin's signal waveform: a low-to-high transition
-- ('RisingEdge'), a high-to-low transition ('FallingEdge'), or any
-- change of level ('Level').
--
-- You can also disable interrupts on the pin so that 'pollPin' will
-- block the current thread indefinitely (or until a timer expires,
-- in the case of 'pollPinTimeout'). This functionality is useful
-- when, for example, one thread is dedicated to servicing
-- interrupts on a pin, and another thread wants to mask interrupts
-- on that pin for some period of time.
--
-- Some pins (or even some GPIO platforms) may not support
-- interrupts. In such cases, it is an error to call this action.
--
-- It is an error to use this action on a pin configured for output.
setPinInterruptMode :: h -> PinInterruptMode -> m ()
-- | Get the pin's active level.
getPinActiveLevel :: h -> m PinActiveLevel
-- | Set the pin's active level.
setPinActiveLevel :: h -> PinActiveLevel -> m ()
-- | Toggle the pin's active level. Returns the pin's new level.
togglePinActiveLevel :: h -> m PinActiveLevel
default pins :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
m [Pin]
default pinCapabilities :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
Pin -> m PinCapabilities
default openPin :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
Pin -> m h
default closePin :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m ()
default getPinDirection :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m PinDirection
default getPinInputMode :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m PinInputMode
default setPinInputMode :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> PinInputMode -> m ()
default getPinOutputMode :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m PinOutputMode
default setPinOutputMode :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> PinOutputMode -> PinValue -> m ()
default readPin :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m PinValue
default pollPin :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m PinValue
default pollPinTimeout :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> Int -> m (Maybe PinValue)
default writePin :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> PinValue -> m ()
default togglePin :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m PinValue
default getPinInterruptMode :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m PinInterruptMode
default setPinInterruptMode :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> PinInterruptMode -> m ()
default getPinActiveLevel :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m PinActiveLevel
default setPinActiveLevel :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> PinActiveLevel -> m ()
default togglePinActiveLevel :: (MonadTrans t, MonadGpio h m', t m' ~ m) =>
h -> m PinActiveLevel
pins = lift pins
{-# INLINE pins #-}
pinCapabilities = lift . pinCapabilities
{-# INLINE pinCapabilities #-}
openPin = lift . openPin
{-# INLINE openPin #-}
closePin = lift . closePin
{-# INLINE closePin #-}
getPinDirection = lift . getPinDirection
{-# INLINE getPinDirection #-}
getPinInputMode = lift . getPinInputMode
{-# INLINE getPinInputMode #-}
setPinInputMode h mode = lift $ setPinInputMode h mode
{-# INLINE setPinInputMode #-}
getPinOutputMode = lift . getPinOutputMode
{-# INLINE getPinOutputMode #-}
setPinOutputMode h mode v = lift $ setPinOutputMode h mode v
{-# INLINE setPinOutputMode #-}
readPin = lift . readPin
{-# INLINE readPin #-}
pollPin = lift . readPin
{-# INLINE pollPin #-}
pollPinTimeout h timeout = lift $ pollPinTimeout h timeout
{-# INLINE pollPinTimeout #-}
writePin h v = lift $ writePin h v
{-# INLINE writePin #-}
togglePin = lift . togglePin
{-# INLINE togglePin #-}
getPinInterruptMode = lift . getPinInterruptMode
{-# INLINE getPinInterruptMode #-}
setPinInterruptMode h mode = lift $ setPinInterruptMode h mode
{-# INLINE setPinInterruptMode #-}
getPinActiveLevel = lift . getPinActiveLevel
{-# INLINE getPinActiveLevel #-}
setPinActiveLevel h v = lift $ setPinActiveLevel h v
{-# INLINE setPinActiveLevel #-}
togglePinActiveLevel = lift . togglePinActiveLevel
{-# INLINE togglePinActiveLevel #-}
instance (MonadGpio h m) => MonadGpio h (IdentityT m)
instance (MonadGpio h m) => MonadGpio h (ContT r m)
instance (MonadGpio h m) => MonadGpio h (CatchT m)
instance (MonadGpio h m) => MonadGpio h (ExceptT e m)
instance (MonadGpio h m) => MonadGpio h (ListT m)
instance (MonadGpio h m) => MonadGpio h (MaybeT m)
instance (MonadGpio h m) => MonadGpio h (ReaderT r m)
instance (MonadGpio h m, Monoid w) => MonadGpio h (LazyRWS.RWST r w s m)
instance (MonadGpio h m, Monoid w) => MonadGpio h (StrictRWS.RWST r w s m)
instance (MonadGpio h m) => MonadGpio h (LazyState.StateT s m)
instance (MonadGpio h m) => MonadGpio h (StrictState.StateT s m)
instance (MonadGpio h m, Monoid w) => MonadGpio h (LazyWriter.WriterT w m)
instance (MonadGpio h m, Monoid w) => MonadGpio h (StrictWriter.WriterT w m)
instance (MonadGpio h m) => MonadGpio h (LoggingT m)
instance (MonadGpio h m) => MonadGpio h (NoLoggingT m)
type MaskGpioM h m = (MonadMask m, MonadGpio h m)
type ThrowGpioM h m = (MonadThrow m, MonadGpio h m)
-- | Exception-safe pin management.
--
-- 'withPin' opens a pin using 'openPin' and passes the handle to the
-- given GPIO computation. Upon completion of the computation, or an
-- exception occuring within the computation, 'withPin' closes the
-- handle using 'closePin' and then propagates the result, either by
-- returning the value of the computation or by re-raising the
-- exception.
withPin :: (MaskGpioM h m) => Pin -> (h -> m a) -> m a
withPin p = bracket (openPin p) closePin
-- | A handle to a pin that's been configured for non-blocking reads
-- only.
--
-- You cannot poll an 'InputPin' for interrupts. See 'InterruptPin'.
newtype InputPin h =
InputPin {_inputHandle :: h}
deriving (Eq,Show)
maybeSetPinActiveLevel :: (MonadGpio h m) => h -> Maybe PinActiveLevel -> m ()
maybeSetPinActiveLevel _ Nothing = return ()
maybeSetPinActiveLevel h (Just v) = setPinActiveLevel h v
-- | Like 'withPin', but for 'InputPin's. Sets the pin's input mode to
-- the specified 'PinInputMode' value.
--
-- If the optional active level argument is 'Nothing', then the pin's
-- active level is unchanged from its current state. Otherwise, the
-- pin's active level is set to the specified level.
--
-- It is an error to call this action if the pin cannot be configured
-- for input, or if it does not support the specified input mode.
withInputPin :: (MaskGpioM h m) => Pin -> PinInputMode -> Maybe PinActiveLevel -> (InputPin h -> m a) -> m a
withInputPin p mode l action =
withPin p $ \h ->
do setPinInputMode h mode
maybeSetPinActiveLevel h l
action $ InputPin h
-- | Like 'readPin'.
readInputPin :: (MonadGpio h m) => InputPin h -> m PinValue
readInputPin p =
readPin (_inputHandle p)
-- | Like 'getPinInputMode'.
getInputPinInputMode :: (MonadGpio h m) => InputPin h -> m PinInputMode
getInputPinInputMode p =
getPinInputMode (_inputHandle p)
-- | Like 'getPinActiveLevel'.
getInputPinActiveLevel :: (MonadGpio h m) => InputPin h -> m PinActiveLevel
getInputPinActiveLevel p =
getPinActiveLevel (_inputHandle p)
-- | Like 'setPinActiveLevel'.
setInputPinActiveLevel :: (MonadGpio h m) => InputPin h -> PinActiveLevel -> m ()
setInputPinActiveLevel p =
setPinActiveLevel (_inputHandle p)
-- | Like 'togglePinActiveLevel'.
toggleInputPinActiveLevel :: (MonadGpio h m) => InputPin h -> m PinActiveLevel
toggleInputPinActiveLevel p =
togglePinActiveLevel (_inputHandle p)
-- | A handle to a pin that's been configured both for non-blocking
-- reads and for interrupt-driven polling reads.
newtype InterruptPin h =
InterruptPin {_interruptHandle :: h}
deriving (Eq,Show)
-- | Like 'withPin', but for 'InterruptPin's. The pin is opened for
-- input, is input mode is set to the specified 'PinInputMode' value,
-- and its interrupt mode is set to the specified 'PinInterruptMode'
-- value.
--
-- If the optional active level argument is 'Nothing', then the pin's
-- active level is unchanged from its current state. Otherwise, the
-- pin's active level is set to the specified level.
--
-- It is an error to call this action if any of the following are true:
--
-- * The pin cannot be configured for input.
--
-- * The pin does not support the specified input mode.
--
-- * The pin does not support interrupts.
withInterruptPin :: (MaskGpioM h m) => Pin -> PinInputMode -> PinInterruptMode -> Maybe PinActiveLevel -> (InterruptPin h -> m a) -> m a
withInterruptPin p inputMode interruptMode l action =
withPin p $ \h ->
do setPinInputMode h inputMode
setPinInterruptMode h interruptMode
maybeSetPinActiveLevel h l
action $ InterruptPin h
-- | Like 'readPin'.
readInterruptPin :: (MonadGpio h m) => InterruptPin h -> m PinValue
readInterruptPin p =
readPin (_interruptHandle p)
-- | Like 'pollPin'.
pollInterruptPin :: (MonadGpio h m) => InterruptPin h -> m PinValue
pollInterruptPin p =
pollPin (_interruptHandle p)
-- | Like 'pollPinTimeout'.
pollInterruptPinTimeout :: (MonadGpio h m) => InterruptPin h -> Int -> m (Maybe PinValue)
pollInterruptPinTimeout p =
pollPinTimeout (_interruptHandle p)
-- | Like 'getPinInputMode'.
getInterruptPinInputMode :: (MonadGpio h m) => InterruptPin h -> m PinInputMode
getInterruptPinInputMode p =
getPinInputMode (_interruptHandle p)
-- | Like 'getPinInterruptMode'.
getInterruptPinInterruptMode :: (ThrowGpioM h m) => InterruptPin h -> m PinInterruptMode
getInterruptPinInterruptMode p =
getPinInterruptMode (_interruptHandle p)
-- | Like 'setPinInterruptMode'.
setInterruptPinInterruptMode :: (MonadGpio h m) => InterruptPin h -> PinInterruptMode -> m ()
setInterruptPinInterruptMode p =
setPinInterruptMode (_interruptHandle p)
-- | Like 'getPinActiveLevel'.
getInterruptPinActiveLevel :: (MonadGpio h m) => InterruptPin h -> m PinActiveLevel
getInterruptPinActiveLevel p =
getPinActiveLevel (_interruptHandle p)
-- | Like 'setPinActiveLevel'.
setInterruptPinActiveLevel :: (MonadGpio h m) => InterruptPin h -> PinActiveLevel -> m ()
setInterruptPinActiveLevel p =
setPinActiveLevel (_interruptHandle p)
-- | Like 'togglePinActiveLevel'.
toggleInterruptPinActiveLevel :: (MonadGpio h m) => InterruptPin h -> m PinActiveLevel
toggleInterruptPinActiveLevel p =
togglePinActiveLevel (_interruptHandle p)
-- | A handle to a pin that's been configured for output only.
--
-- Note that output pins can be both read and written. However, they
-- only support non-blocking reads, not interrupt-driven polling
-- reads.
newtype OutputPin h =
OutputPin {_outputHandle :: h}
deriving (Eq,Show)
-- | Like 'withPin', but for 'OutputPin's. Sets the pin's output mode
-- to the specified 'PinOutputMode' value.
--
-- The 'PinValue' argument specifies the pin's initial output value.
-- It is relative to the active level argument, or to the pin's
-- current active level if the active level argument is 'Nothing'.
--
-- It is an error to call this action if the pin cannot be configured
-- for output, or if it does not support the specified output mode.
withOutputPin :: (MaskGpioM h m) => Pin -> PinOutputMode -> Maybe PinActiveLevel -> PinValue -> (OutputPin h -> m a) -> m a
withOutputPin p mode l v action =
withPin p $ \h ->
do maybeSetPinActiveLevel h l
setPinOutputMode h mode v
action $ OutputPin h
-- | Like 'writePin'.
writeOutputPin :: (MonadGpio h m) => OutputPin h -> PinValue -> m ()
writeOutputPin p =
writePin (_outputHandle p)
-- | Like 'togglePin'.
toggleOutputPin :: (MonadGpio h m) => OutputPin h -> m PinValue
toggleOutputPin p =
togglePin (_outputHandle p)
-- | Like 'readPin'.
readOutputPin :: (MonadGpio h m) => OutputPin h -> m PinValue
readOutputPin p =
readPin (_outputHandle p)
-- | Like 'getPinOutputMode'.
getOutputPinOutputMode :: (MonadGpio h m) => OutputPin h -> m PinOutputMode
getOutputPinOutputMode p =
getPinOutputMode (_outputHandle p)
-- | Like 'getPinActiveLevel'.
getOutputPinActiveLevel :: (MonadGpio h m) => OutputPin h -> m PinActiveLevel
getOutputPinActiveLevel p =
getPinActiveLevel (_outputHandle p)
-- | Like 'setPinActiveLevel'.
setOutputPinActiveLevel :: (MonadGpio h m) => OutputPin h -> PinActiveLevel -> m ()
setOutputPinActiveLevel p =
setPinActiveLevel (_outputHandle p)
-- | Like 'togglePinActiveLevel'.
toggleOutputPinActiveLevel :: (MonadGpio h m) => OutputPin h -> m PinActiveLevel
toggleOutputPinActiveLevel p =
togglePinActiveLevel (_outputHandle p)
| dhess/gpio | src/System/GPIO/Monad.hs | bsd-3-clause | 26,309 | 0 | 13 | 5,405 | 4,143 | 2,325 | 1,818 | -1 | -1 |
{-# LANGUAGE ConstraintKinds #-}
module Database.Persist.Class.PersistStore
( HasPersistBackend (..)
, IsPersistBackend (..)
, PersistRecordBackend
, liftPersist
, PersistCore (..)
, PersistStoreRead (..)
, PersistStoreWrite (..)
, getEntity
, getJust
, getJustEntity
, belongsTo
, belongsToJust
, insertEntity
, insertRecord
, ToBackendKey(..)
, BackendCompatible(..)
) where
import Control.Exception (throwIO)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (MonadReader (ask), runReaderT)
import Control.Monad.Trans.Reader (ReaderT)
import qualified Data.Aeson as A
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Maybe as Maybe
import qualified Data.Text as T
import Database.Persist.Class.PersistEntity
import Database.Persist.Class.PersistField
import Database.Persist.Types
-- | Class which allows the plucking of a @BaseBackend backend@ from some larger type.
-- For example,
-- @
-- instance HasPersistBackend (SqlReadBackend, Int) where
-- type BaseBackend (SqlReadBackend, Int) = SqlBackend
-- persistBackend = unSqlReadBackend . fst
-- @
class HasPersistBackend backend where
type BaseBackend backend
persistBackend :: backend -> BaseBackend backend
-- | Class which witnesses that @backend@ is essentially the same as @BaseBackend backend@.
-- That is, they're isomorphic and @backend@ is just some wrapper over @BaseBackend backend@.
class (HasPersistBackend backend) => IsPersistBackend backend where
-- | This function is how we actually construct and tag a backend as having read or write capabilities.
-- It should be used carefully and only when actually constructing a @backend@. Careless use allows us
-- to accidentally run a write query against a read-only database.
mkPersistBackend :: BaseBackend backend -> backend
-- | This class witnesses that two backend are compatible, and that you can
-- convert from the @sub@ backend into the @sup@ backend. This is similar
-- to the 'HasPersistBackend' and 'IsPersistBackend' classes, but where you
-- don't want to fix the type associated with the 'PersistEntityBackend' of
-- a record.
--
-- Generally speaking, where you might have:
--
-- @
-- foo ::
-- ( 'PersistEntity' record
-- , 'PeristEntityBackend' record ~ 'BaseBackend' backend
-- , 'IsSqlBackend' backend
-- )
-- @
--
-- this can be replaced with:
--
-- @
-- foo ::
-- ( 'PersistEntity' record,
-- , 'PersistEntityBackend' record ~ backend
-- , 'BackendCompatible' 'SqlBackend' backend
-- )
-- @
--
-- This works for 'SqlReadBackend' because of the @instance 'BackendCompatible' 'SqlBackend' 'SqlReadBackend'@, without needing to go through the 'BaseBackend' type family.
--
-- Likewise, functions that are currently hardcoded to use 'SqlBackend' can be generalized:
--
-- @
-- -- before:
-- asdf :: 'ReaderT' 'SqlBackend' m ()
-- asdf = pure ()
--
-- -- after:
-- asdf' :: 'BackendCompatible' SqlBackend backend => ReaderT backend m ()
-- asdf' = withReaderT 'projectBackend' asdf
-- @
--
-- @since 2.7.1
class BackendCompatible sup sub where
projectBackend :: sub -> sup
-- | A convenient alias for common type signatures
type PersistRecordBackend record backend = (PersistEntity record, PersistEntityBackend record ~ BaseBackend backend)
liftPersist
:: (MonadIO m, MonadReader backend m)
=> ReaderT backend IO b -> m b
liftPersist f = do
env <- ask
liftIO $ runReaderT f env
-- | 'ToBackendKey' converts a 'PersistEntity' 'Key' into a 'BackendKey'
-- This can be used by each backend to convert between a 'Key' and a plain
-- Haskell type. For Sql, that is done with 'toSqlKey' and 'fromSqlKey'.
--
-- By default, a 'PersistEntity' uses the default 'BackendKey' for its Key
-- and is an instance of ToBackendKey
--
-- A 'Key' that instead uses a custom type will not be an instance of
-- 'ToBackendKey'.
class ( PersistEntity record
, PersistEntityBackend record ~ backend
, PersistCore backend
) => ToBackendKey backend record where
toBackendKey :: Key record -> BackendKey backend
fromBackendKey :: BackendKey backend -> Key record
class PersistCore backend where
data BackendKey backend
class
( Show (BackendKey backend), Read (BackendKey backend)
, Eq (BackendKey backend), Ord (BackendKey backend)
, PersistCore backend
, PersistField (BackendKey backend), A.ToJSON (BackendKey backend), A.FromJSON (BackendKey backend)
) => PersistStoreRead backend where
-- | Get a record by identifier, if available.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > getSpj :: MonadIO m => ReaderT SqlBackend m (Maybe User)
-- > getSpj = get spjId
--
-- > mspj <- getSpj
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will get this:
--
-- > +------+-----+
-- > | name | age |
-- > +------+-----+
-- > | SPJ | 40 |
-- > +------+-----+
get :: (MonadIO m, PersistRecordBackend record backend)
=> Key record -> ReaderT backend m (Maybe record)
-- | Get many records by their respective identifiers, if available.
--
-- @since 2.8.1
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>:
--
-- > getUsers :: MonadIO m => ReaderT SqlBackend m (Map (Key User) User)
-- > getUsers = getMany allkeys
--
-- > musers <- getUsers
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will get these records:
--
-- > +----+-------+-----+
-- > | id | name | age |
-- > +----+-------+-----+
-- > | 1 | SPJ | 40 |
-- > +----+-------+-----+
-- > | 2 | Simon | 41 |
-- > +----+-------+-----+
getMany
:: (MonadIO m, PersistRecordBackend record backend)
=> [Key record] -> ReaderT backend m (Map (Key record) record)
getMany [] = return Map.empty
getMany ks = do
vs <- mapM get ks
let kvs = zip ks vs
let kvs' = (fmap Maybe.fromJust) `fmap` filter (\(_,v) -> Maybe.isJust v) kvs
return $ Map.fromList kvs'
class
( Show (BackendKey backend), Read (BackendKey backend)
, Eq (BackendKey backend), Ord (BackendKey backend)
, PersistStoreRead backend
, PersistField (BackendKey backend), A.ToJSON (BackendKey backend), A.FromJSON (BackendKey backend)
) => PersistStoreWrite backend where
-- | Create a new record in the database, returning an automatically created
-- key (in SQL an auto-increment id).
--
-- === __Example usage__
--
-- Using <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>, let's insert a new user 'John'.
--
-- > insertJohn :: MonadIO m => ReaderT SqlBackend m (Key User)
-- > insertJohn = insert $ User "John" 30
--
-- > johnId <- insertJohn
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |40 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
-- > |3 |John |30 |
-- > +-----+------+-----+
insert :: (MonadIO m, PersistRecordBackend record backend)
=> record -> ReaderT backend m (Key record)
-- | Same as 'insert', but doesn't return a @Key@.
--
-- === __Example usage__
--
-- with <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > insertJohn :: MonadIO m => ReaderT SqlBackend m (Key User)
-- > insertJohn = insert_ $ User "John" 30
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |40 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
-- > |3 |John |30 |
-- > +-----+------+-----+
insert_ :: (MonadIO m, PersistRecordBackend record backend)
=> record -> ReaderT backend m ()
insert_ record = insert record >> return ()
-- | Create multiple records in the database and return their 'Key's.
--
-- If you don't need the inserted 'Key's, use 'insertMany_'.
--
-- The MongoDB and PostgreSQL backends insert all records and
-- retrieve their keys in one database query.
--
-- The SQLite and MySQL backends use the slow, default implementation of
-- @mapM insert@.
--
-- === __Example usage__
--
-- with <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > insertUsers :: MonadIO m => ReaderT SqlBackend m [Key User]
-- > insertUsers = insertMany [User "John" 30, User "Nick" 32, User "Jane" 20]
--
-- > userIds <- insertUsers
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |40 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
-- > |3 |John |30 |
-- > +-----+------+-----+
-- > |4 |Nick |32 |
-- > +-----+------+-----+
-- > |5 |Jane |20 |
-- > +-----+------+-----+
insertMany :: (MonadIO m, PersistRecordBackend record backend)
=> [record] -> ReaderT backend m [Key record]
insertMany = mapM insert
-- | Same as 'insertMany', but doesn't return any 'Key's.
--
-- The MongoDB, PostgreSQL, SQLite and MySQL backends insert all records in
-- one database query.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > insertUsers_ :: MonadIO m => ReaderT SqlBackend m ()
-- > insertUsers_ = insertMany_ [User "John" 30, User "Nick" 32, User "Jane" 20]
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |40 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
-- > |3 |John |30 |
-- > +-----+------+-----+
-- > |4 |Nick |32 |
-- > +-----+------+-----+
-- > |5 |Jane |20 |
-- > +-----+------+-----+
insertMany_ :: (MonadIO m, PersistRecordBackend record backend)
=> [record] -> ReaderT backend m ()
insertMany_ x = insertMany x >> return ()
-- | Same as 'insertMany_', but takes an 'Entity' instead of just a record.
--
-- Useful when migrating data from one entity to another
-- and want to preserve ids.
--
-- The MongoDB, PostgreSQL, SQLite and MySQL backends insert all records in
-- one database query.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > insertUserEntityMany :: MonadIO m => ReaderT SqlBackend m ()
-- > insertUserEntityMany = insertEntityMany [SnakeEntity, EvaEntity]
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |40 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
-- > |3 |Snake |38 |
-- > +-----+------+-----+
-- > |4 |Eva |38 |
-- > +-----+------+-----+
insertEntityMany :: (MonadIO m, PersistRecordBackend record backend)
=> [Entity record] -> ReaderT backend m ()
insertEntityMany = mapM_ (\(Entity k record) -> insertKey k record)
-- | Create a new record in the database using the given key.
--
-- === __Example usage__
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > insertAliceKey :: MonadIO m => Key User -> ReaderT SqlBackend m ()
-- > insertAliceKey key = insertKey key $ User "Alice" 20
--
-- > insertAliceKey $ UserKey {unUserKey = SqlBackendKey {unSqlBackendKey = 3}}
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |40 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
-- > |3 |Alice |20 |
-- > +-----+------+-----+
insertKey :: (MonadIO m, PersistRecordBackend record backend)
=> Key record -> record -> ReaderT backend m ()
-- | Put the record in the database with the given key.
-- Unlike 'replace', if a record with the given key does not
-- exist then a new record will be inserted.
--
-- === __Example usage__
--
-- We try to explain 'upsertBy' using <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>.
--
-- First, we insert Philip to <#dataset-persist-store-1 dataset-1>.
--
-- > insertPhilip :: MonadIO m => ReaderT SqlBackend m (Key User)
-- > insertPhilip = insert $ User "Philip" 42
--
-- > philipId <- insertPhilip
--
-- This query will produce:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |40 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
-- > |3 |Philip|42 |
-- > +-----+------+-----+
--
-- > repsertHaskell :: MonadIO m => Key record -> ReaderT SqlBackend m ()
-- > repsertHaskell id = repsert id $ User "Haskell" 81
--
-- > repsertHaskell philipId
--
-- This query will replace Philip's record with Haskell's one:
--
-- > +-----+-----------------+--------+
-- > |id |name |age |
-- > +-----+-----------------+--------+
-- > |1 |SPJ |40 |
-- > +-----+-----------------+--------+
-- > |2 |Simon |41 |
-- > +-----+-----------------+--------+
-- > |3 |Philip -> Haskell|42 -> 81|
-- > +-----+-----------------+--------+
--
-- 'repsert' inserts the given record if the key doesn't exist.
--
-- > repsertXToUnknown :: MonadIO m => ReaderT SqlBackend m ()
-- > repsertXToUnknown = repsert unknownId $ User "X" 999
--
-- For example, applying the above query to <#dataset-persist-store-1 dataset-1> will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |40 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
-- > |3 |X |999 |
-- > +-----+------+-----+
repsert :: (MonadIO m, PersistRecordBackend record backend)
=> Key record -> record -> ReaderT backend m ()
-- | Put many entities into the database.
--
-- Batch version of 'repsert' for SQL backends.
--
-- Useful when migrating data from one entity to another
-- and want to preserve ids.
--
-- @since 2.8.1
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > repsertManyUsers :: MonadIO m =>ReaderT SqlBackend m ()
-- > repsertManyusers = repsertMany [(simonId, User "Philip" 20), (unknownId999, User "Mr. X" 999)]
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+----------------+---------+
-- > |id |name |age |
-- > +-----+----------------+---------+
-- > |1 |SPJ |40 |
-- > +-----+----------------+---------+
-- > |2 |Simon -> Philip |41 -> 20 |
-- > +-----+----------------+---------+
-- > |999 |Mr. X |999 |
-- > +-----+----------------+---------+
repsertMany
:: (MonadIO m, PersistRecordBackend record backend)
=> [(Key record, record)] -> ReaderT backend m ()
repsertMany = mapM_ (uncurry repsert)
-- | Replace the record in the database with the given
-- key. Note that the result is undefined if such record does
-- not exist, so you must use 'insertKey' or 'repsert' in
-- these cases.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1 schama-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > replaceSpj :: MonadIO m => User -> ReaderT SqlBackend m ()
-- > replaceSpj record = replace spjId record
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |Mike |45 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
replace :: (MonadIO m, PersistRecordBackend record backend)
=> Key record -> record -> ReaderT backend m ()
-- | Delete a specific record by identifier. Does nothing if record does
-- not exist.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > deleteSpj :: MonadIO m => ReaderT SqlBackend m ()
-- > deleteSpj = delete spjId
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
delete :: (MonadIO m, PersistRecordBackend record backend)
=> Key record -> ReaderT backend m ()
-- | Update individual fields on a specific record.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > updateSpj :: MonadIO m => [Update User] -> ReaderT SqlBackend m ()
-- > updateSpj updates = update spjId updates
--
-- > updateSpj [UserAge +=. 100]
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |140 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
update :: (MonadIO m, PersistRecordBackend record backend)
=> Key record -> [Update record] -> ReaderT backend m ()
-- | Update individual fields on a specific record, and retrieve the
-- updated value from the database.
--
-- Note that this function will throw an exception if the given key is not
-- found in the database.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > updateGetSpj :: MonadIO m => [Update User] -> ReaderT SqlBackend m User
-- > updateGetSpj updates = updateGet spjId updates
--
-- > spj <- updateGetSpj [UserAge +=. 100]
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |140 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
updateGet :: (MonadIO m, PersistRecordBackend record backend)
=> Key record -> [Update record] -> ReaderT backend m record
updateGet key ups = do
update key ups
get key >>= maybe (liftIO $ throwIO $ KeyNotFound $ show key) return
-- | Same as 'get', but for a non-null (not Maybe) foreign key.
-- Unsafe unless your database is enforcing that the foreign key is valid.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > getJustSpj :: MonadIO m => ReaderT SqlBackend m User
-- > getJustSpj = getJust spjId
--
-- > spj <- getJust spjId
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will get this record:
--
-- > +----+------+-----+
-- > | id | name | age |
-- > +----+------+-----+
-- > | 1 | SPJ | 40 |
-- > +----+------+-----+
--
-- > getJustUnknown :: MonadIO m => ReaderT SqlBackend m User
-- > getJustUnknown = getJust unknownId
--
-- mrx <- getJustUnknown
--
-- This just throws an error.
getJust :: ( PersistStoreRead backend
, PersistRecordBackend record backend
, MonadIO m
) => Key record -> ReaderT backend m record
getJust key = get key >>= maybe
(liftIO $ throwIO $ PersistForeignConstraintUnmet $ T.pack $ show key)
return
-- | Same as 'getJust', but returns an 'Entity' instead of just the record.
--
-- @since 2.6.1
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > getJustEntitySpj :: MonadIO m => ReaderT SqlBackend m (Entity User)
-- > getJustEntitySpj = getJustEntity spjId
--
-- > spjEnt <- getJustEntitySpj
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will get this entity:
--
-- > +----+------+-----+
-- > | id | name | age |
-- > +----+------+-----+
-- > | 1 | SPJ | 40 |
-- > +----+------+-----+
getJustEntity
:: (PersistEntityBackend record ~ BaseBackend backend
,MonadIO m
,PersistEntity record
,PersistStoreRead backend)
=> Key record -> ReaderT backend m (Entity record)
getJustEntity key = do
record <- getJust key
return $
Entity
{ entityKey = key
, entityVal = record
}
-- | Curry this to make a convenience function that loads an associated model.
--
-- > foreign = belongsTo foreignId
belongsTo ::
( PersistStoreRead backend
, PersistEntity ent1
, PersistRecordBackend ent2 backend
, MonadIO m
) => (ent1 -> Maybe (Key ent2)) -> ent1 -> ReaderT backend m (Maybe ent2)
belongsTo foreignKeyField model = case foreignKeyField model of
Nothing -> return Nothing
Just f -> get f
-- | Same as 'belongsTo', but uses @getJust@ and therefore is similarly unsafe.
belongsToJust ::
( PersistStoreRead backend
, PersistEntity ent1
, PersistRecordBackend ent2 backend
, MonadIO m
)
=> (ent1 -> Key ent2) -> ent1 -> ReaderT backend m ent2
belongsToJust getForeignKey model = getJust $ getForeignKey model
-- | Like @insert@, but returns the complete @Entity@.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > insertHaskellEntity :: MonadIO m => ReaderT SqlBackend m (Entity User)
-- > insertHaskellEntity = insertEntity $ User "Haskell" 81
--
-- > haskellEnt <- insertHaskellEntity
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +----+---------+-----+
-- > | id | name | age |
-- > +----+---------+-----+
-- > | 1 | SPJ | 40 |
-- > +----+---------+-----+
-- > | 2 | Simon | 41 |
-- > +----+---------+-----+
-- > | 3 | Haskell | 81 |
-- > +----+---------+-----+
insertEntity ::
( PersistStoreWrite backend
, PersistRecordBackend e backend
, MonadIO m
) => e -> ReaderT backend m (Entity e)
insertEntity e = do
eid <- insert e
return $ Entity eid e
-- | Like @get@, but returns the complete @Entity@.
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > getSpjEntity :: MonadIO m => ReaderT SqlBackend m (Maybe (Entity User))
-- > getSpjEntity = getEntity spjId
--
-- > mSpjEnt <- getSpjEntity
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will get this entity:
--
-- > +----+------+-----+
-- > | id | name | age |
-- > +----+------+-----+
-- > | 1 | SPJ | 40 |
-- > +----+------+-----+
getEntity ::
( PersistStoreRead backend
, PersistRecordBackend e backend
, MonadIO m
) => Key e -> ReaderT backend m (Maybe (Entity e))
getEntity key = do
maybeModel <- get key
return $ fmap (key `Entity`) maybeModel
-- | Like 'insertEntity' but just returns the record instead of 'Entity'.
--
-- @since 2.6.1
--
-- === __Example usage__
--
-- With <#schema-persist-store-1 schema-1> and <#dataset-persist-store-1 dataset-1>,
--
-- > insertDaveRecord :: MonadIO m => ReaderT SqlBackend m User
-- > insertDaveRecord = insertRecord $ User "Dave" 50
--
-- > dave <- insertDaveRecord
--
-- The above query when applied on <#dataset-persist-store-1 dataset-1>, will produce this:
--
-- > +-----+------+-----+
-- > |id |name |age |
-- > +-----+------+-----+
-- > |1 |SPJ |40 |
-- > +-----+------+-----+
-- > |2 |Simon |41 |
-- > +-----+------+-----+
-- > |3 |Dave |50 |
-- > +-----+------+-----+
insertRecord
:: (PersistEntityBackend record ~ BaseBackend backend
,PersistEntity record
,MonadIO m
,PersistStoreWrite backend)
=> record -> ReaderT backend m record
insertRecord record = do
insert_ record
return $ record
| creichert/persistent | persistent/Database/Persist/Class/PersistStore.hs | mit | 25,467 | 0 | 17 | 6,276 | 2,652 | 1,627 | 1,025 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE UndecidableInstances #-} -- FIXME
module PersistentTestModels where
import Data.Aeson
import Database.Persist.Sql
import Database.Persist.TH
import Init
import PersistTestPetType
import PersistTestPetCollarType
share [mkPersist persistSettings { mpsGeneric = True }, mkMigrate "testMigrate", mkDeleteCascade persistSettings, mkSave "_ignoredSave"] [persistUpperCase|
-- Dedented comment
-- Header-level comment
-- Indented comment
Person json
name Text
age Int "some ignored -- \" attribute"
color Text Maybe -- this is a comment sql=foobarbaz
PersonNameKey name -- this is a comment sql=foobarbaz
deriving Show Eq
Person1
-- Dedented comment
-- Header-level comment
-- Indented comment
name Text
age Int
deriving Show Eq
PersonMaybeAge
name Text
age Int Maybe
PersonMay json
name Text Maybe
color Text Maybe
deriving Show Eq
Pet
ownerId PersonId
name Text
-- deriving Show Eq
-- Dedented comment
-- Header-level comment
-- Indented comment
type PetType
MaybeOwnedPet
ownerId PersonId Maybe
name Text
type PetType
-- Dedented comment
-- Header-level comment
-- Indented comment
NeedsPet
petKey PetId
OutdoorPet
ownerId PersonId
collar PetCollar
type PetType
-- From the scaffold
UserPT
ident Text
password Text Maybe
UniqueUserPT ident
EmailPT
email Text
user UserPTId Maybe
verkey Text Maybe
UniqueEmailPT email
Upsert
email Text
attr Text
extra Text
age Int
UniqueUpsert email
deriving Eq Show
UpsertBy
email Text
city Text
attr Text
UniqueUpsertBy email
UniqueUpsertByCity city
deriving Eq Show
Strict
!yes Int
~no Int
def Int
|]
deriving instance Show (BackendKey backend) => Show (PetGeneric backend)
deriving instance Eq (BackendKey backend) => Eq (PetGeneric backend)
share [mkPersist persistSettings { mpsPrefixFields = False, mpsGeneric = True }
, mkMigrate "noPrefixMigrate"
] [persistLowerCase|
NoPrefix1
someFieldName Int
NoPrefix2
someOtherFieldName Int
unprefixedRef NoPrefix1Id
+NoPrefixSum
unprefixedLeft Int
unprefixedRight String
deriving Show Eq
|]
deriving instance Show (BackendKey backend) => Show (NoPrefix1Generic backend)
deriving instance Eq (BackendKey backend) => Eq (NoPrefix1Generic backend)
deriving instance Show (BackendKey backend) => Show (NoPrefix2Generic backend)
deriving instance Eq (BackendKey backend) => Eq (NoPrefix2Generic backend)
-- | Reverses the order of the fields of an entity. Used to test
-- @??@ placeholders of 'rawSql'.
newtype ReverseFieldOrder a = RFO {unRFO :: a} deriving (Eq, Show)
instance ToJSON (Key (ReverseFieldOrder a)) where toJSON = error "ReverseFieldOrder"
instance FromJSON (Key (ReverseFieldOrder a)) where parseJSON = error "ReverseFieldOrder"
instance (PersistEntity a) => PersistEntity (ReverseFieldOrder a) where
type PersistEntityBackend (ReverseFieldOrder a) = PersistEntityBackend a
newtype Key (ReverseFieldOrder a) = RFOKey { unRFOKey :: BackendKey SqlBackend } deriving (Show, Read, Eq, Ord, PersistField, PersistFieldSql)
keyFromValues = fmap RFOKey . fromPersistValue . head
keyToValues = (:[]) . toPersistValue . unRFOKey
entityDef = revFields . entityDef . liftM unRFO
where
revFields ed = ed { entityFields = reverse (entityFields ed) }
toPersistFields = reverse . toPersistFields . unRFO
newtype EntityField (ReverseFieldOrder a) b = EFRFO {unEFRFO :: EntityField a b}
persistFieldDef = persistFieldDef . unEFRFO
fromPersistValues = fmap RFO . fromPersistValues . reverse
newtype Unique (ReverseFieldOrder a) = URFO {unURFO :: Unique a }
persistUniqueToFieldNames = reverse . persistUniqueToFieldNames . unURFO
persistUniqueToValues = reverse . persistUniqueToValues . unURFO
persistUniqueKeys = map URFO . reverse . persistUniqueKeys . unRFO
persistIdField = error "ReverseFieldOrder.persistIdField"
fieldLens = error "ReverseFieldOrder.fieldLens"
cleanDB
:: (MonadIO m, PersistQuery backend, PersistStoreWrite (BaseBackend backend))
=> ReaderT backend m ()
cleanDB = do
deleteWhere ([] :: [Filter (PersonGeneric backend)])
deleteWhere ([] :: [Filter (Person1Generic backend)])
deleteWhere ([] :: [Filter (PetGeneric backend)])
deleteWhere ([] :: [Filter (MaybeOwnedPetGeneric backend)])
deleteWhere ([] :: [Filter (NeedsPetGeneric backend)])
deleteWhere ([] :: [Filter (OutdoorPetGeneric backend)])
deleteWhere ([] :: [Filter (UserPTGeneric backend)])
deleteWhere ([] :: [Filter (EmailPTGeneric backend)])
| creichert/persistent | persistent-test/src/PersistentTestModels.hs | mit | 4,803 | 0 | 12 | 984 | 943 | 494 | 449 | -1 | -1 |
-- | This module should provide all that is required to write further
-- refactorings.
-- NOTE: it is currently unstable, and may change without notice on minor version number bumps
module Language.Haskell.Refact.API
(
-- * from `Language.Haskell.Refact.Utils.Monad`
ParseResult
, VerboseLevel(..)
, RefactSettings(..)
, TargetModule
, RefactFlags(..)
, StateStorage(..)
-- ** The GHC Monad
, RefactGhc
, runRefactGhc
, getRefacSettings
, defaultSettings
, logSettings
, logm
, logDataWithAnns
, logExactprint
, logParsedSource
-- * from `Language.Haskell.Refact.Utils.Utils`
-- ** Managing the GHC / project environment
, parseSourceFileGhc
, getTargetGhc
-- ** The bits that do the work
, runRefacSession
, runMultRefacSession
, applyRefac
, applyRefac'
, refactDone
, ApplyRefacResult
, RefacSource(..)
, nameSybQuery
, fileNameFromModSummary
, getModuleName
, clientModsAndFiles
, serverModsAndFiles
-- , lookupAnns
, stripCallStack
-- * from `Language.Haskell.Refact.Utils.MonadFunctions`
-- ** Conveniences for state access
, getTypecheckedModule
, RefacResult(..)
, getRefactStreamModified
, setRefactStreamModified
, getRefactInscopes
, getRefactRenamed
, putRefactRenamed
, getRefactParsed
, putRefactParsed
, putParsedModule
, clearParsedModule
, getRefactFileName
, getRefactTargetModule
, getRefactNameMap
, getRefactModule
, getRefactModuleName
-- * New ghc-exactprint interfacing
-- , refactRunTransform
, liftT
-- ** State flags for managing generic traversals
, getRefactDone
, setRefactDone
, clearRefactDone
, setStateStorage
, getStateStorage
, fetchAnnsFinal
-- * Parsing source
, parseDeclWithAnns
-- , logm
-- * from `Language.Haskell.Refact.Utils.LocUtils`
, SimpPos
, getGhcLoc
, getGhcLocEnd
, getLocatedStart
, getLocatedEnd
, getStartEndLoc
, startEndLocGhc
, emptyList
, nonEmptyList
-- * from `Language.Haskell.Refact.Utils.TypeSyn`
, InScopes
, ghead
, glast
, gtail
, gfromJust
-- * from `Language.Haskell.Refact.Utils.TypeUtils`
-- ** Program Analysis
-- ** Imports and exports
, inScopeInfo, isInScopeAndUnqualified, isInScopeAndUnqualifiedGhc, inScopeNames
, isExported, isExplicitlyExported, modIsExported
, equivalentNameInNewMod
, hsQualifier
-- *** Variable analysis
, isFieldName
, isClassName
, isInstanceName
, hsTypeVbls
, hsNamessRdr
, isDeclaredInRdr
, FreeNames(..),DeclaredNames(..)
, hsFreeAndDeclaredNameStrings
, hsFreeAndDeclaredRdr
, hsFreeAndDeclaredPNs
, getDeclaredVarsRdr
, hsVisibleNamesRdr
, hsFDsFromInsideRdr, hsFDNamesFromInsideRdr, hsFDNamesFromInsideRdrPure
, hsVisibleDsRdr
, rdrName2Name, rdrName2NamePure
, eqRdrNamePure
, sameNameSpace
-- *** Property checking
,isVarId,isConId,isOperator,isTopLevelPN,isLocalPN,isNonLibraryName -- ,isTopLevelPNT
,isQualifiedPN, isFunOrPatName,isTypeSig
,isFunBindP,isFunBindR,isPatBindP,isPatBindR,isSimplePatBind,isSimplePatDecl
,isComplexPatBind,isComplexPatDecl,isFunOrPatBindP,isFunOrPatBindR
,usedWithoutQualR
,findNameInRdr
,findNamesRdr, findEntity, findEntity'
,sameOccurrence
, findIdForName
, getTypeForName
, definesTypeSigRdr,definesSigDRdr
, sameBindRdr
, UsedByRhs(..)
-- *** Modules and files
, isMainModule
, getModule
-- *** Locations
,defineLoc, useLoc, locToExp
,findLRdrName
,locToNameRdr
,locToNameRdrPure
,locToRdrName
,getName
-- * Program transformation
-- *** Adding
,addDecl, addItemsToImport, addItemsToExport, addHiding
,addParamsToDecls, addParamsToSigs, addActualParamsToRhs, addImportDecl, duplicateDecl
-- *** Removing
,rmDecl, rmTypeSig, rmTypeSigs
-- *** Updating
-- ,Update(update)
,rmQualifier,qualifyToplevelName,renamePN, HowToQual(..), autoRenameLocalVar
-- *** Identifiers, expressions, patterns and declarations
, expToNameRdr
, patToNameRdr
, nameToString
, pNtoPat
, definedPNsRdr,definedNamesRdr
, definingDeclsRdrNames, definingDeclsRdrNames', definingSigsRdrNames
, definingTyClDeclsNames
-- *** Others
, divideDecls
, mkRdrName,mkQualifiedRdrName,mkNewGhcName,mkNewName,mkNewToplevelName
, registerRdrName
-- The following functions are not in the the API yet.
, causeNameClashInExports
, declsSybTransform
-- ** Typed AST traversals (added by CMB)
-- ** Miscellous
-- ** from `Language.Haskell.Refact.Utils.GhcUtils`
-- ** SYB versions
, everywhereMStaged'
, everywhereStaged
, everywhereStaged'
, listifyStaged
, everywhereButM
-- ** Scrap Your Zipper versions
, zeverywhereStaged
, zopenStaged
, zsomewhereStaged
, transZ
, transZM
, zopenStaged'
, ztransformStagedM
-- *** SYZ utilities
, upUntil
, findAbove
-- * from `Language.Haskell.Refact.Utils.GhcVersionSpecific`
, showGhc
, showGhcQual
, prettyprint
, prettyprint2
, ppType
, setGhcContext
-- * from `Language.Haskell.Refact.Utils.TokenUtils`
, NameMap
-- * from `Language.Haskell.Refact.Utils.ExactPrint'`
, replace
, setRefactAnns
, mergeRefactAnns
, setAnnKeywordDP
, copyAnn
, clearPriorComments
, balanceAllComments
, exactPrintParsed
, exactPrintExpr
, zeroDP
, setDP
, handleParseResult
, removeAnns
, synthesizeAnns
, addNewKeyword
, addNewKeywords
, addEmptyAnn
, addAnnVal
, addAnn
-- from Language.Haskell.Refact.Utils.Synonyms
, UnlocParsedHsBind
, ParsedGRHSs
, ParsedMatchGroup
, ParsedLMatch
, ParsedExpr
, ParsedLStmt
, ParsedLExpr
, ParsedBind
-- from Language.Haskell.Refact.Utils.Transform
, addSimpleImportDecl
, wrapInLambda
, wrapInPars
, removePars
, addNewLines
, wrapInParsWithDPs
, locate
-- from Language.Haskell.Refact.Utils.Query
, getVarAndRHS
, getHsBind
, isHsVar
) where
import Language.Haskell.Refact.Utils.ExactPrint
import Language.Haskell.Refact.Utils.GhcUtils
import Language.Haskell.Refact.Utils.GhcVersionSpecific
import Language.Haskell.Refact.Utils.LocUtils
import Language.Haskell.Refact.Utils.Monad
import Language.Haskell.Refact.Utils.MonadFunctions
import Language.Haskell.Refact.Utils.TypeSyn
import Language.Haskell.Refact.Utils.TypeUtils
import Language.Haskell.Refact.Utils.Types
import Language.Haskell.Refact.Utils.Utils
import Language.Haskell.Refact.Utils.Variables
import Language.Haskell.Refact.Utils.Transform
import Language.Haskell.GHC.ExactPrint.Utils
import Language.Haskell.Refact.Utils.Synonyms
import Language.Haskell.Refact.Utils.Query
| RefactoringTools/HaRe | src/Language/Haskell/Refact/API.hs | bsd-3-clause | 7,267 | 0 | 5 | 1,787 | 915 | 639 | 276 | 201 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS -fno-warn-orphans #-}
module Lib.Parsec (showErr, showPos) where
import Prelude.Compat hiding (FilePath)
import Control.DeepSeq (NFData(..))
import Data.Binary (Binary(..))
import qualified Text.Parsec.Error as ParseError
import qualified Text.Parsec.Pos as ParsecPos
instance Binary ParsecPos.SourcePos where
put pos = do
put $ ParsecPos.sourceName pos
put $ ParsecPos.sourceLine pos
put $ ParsecPos.sourceColumn pos
get = mkPos <$> get <*> get <*> get
where
mkPos name line column =
flip ParsecPos.setSourceColumn column $
flip ParsecPos.setSourceLine line $
ParsecPos.initialPos name
{-# INLINE get #-}
{-# INLINE put #-}
instance NFData ParsecPos.SourcePos where
rnf pos =
rnf (ParsecPos.sourceName pos) `seq`
rnf (ParsecPos.sourceLine pos) `seq`
rnf (ParsecPos.sourceColumn pos)
{-# INLINE rnf #-}
showPos :: ParsecPos.SourcePos -> String
showPos pos = concat [path, ":", show line, ":", show col]
where
col = ParsecPos.sourceColumn pos
line = ParsecPos.sourceLine pos
path = ParsecPos.sourceName pos
showErr :: ParseError.ParseError -> String
showErr err = concat [showPos pos, ":", str]
where
pos = ParseError.errorPos err
msgs = ParseError.errorMessages err
str = ParseError.showErrorMessages
"or" "unknown parse error"
"expecting" "unexpected" "end of input"
msgs
| da-x/buildsome-tst | src/Lib/Parsec.hs | bsd-3-clause | 1,455 | 0 | 11 | 307 | 402 | 215 | 187 | 39 | 1 |
{-# OPTIONS_GHC -Wall -fno-warn-name-shadowing #-}
{-# LANGUAGE RankNTypes, ScopedTypeVariables, GADTs, EmptyDataDecls, PatternGuards, TypeFamilies, NamedFieldPuns , FlexibleInstances, MultiParamTypeClasses, TypeSynonymInstances #-}
module Eval (evalProg, ErrorM) where
import Control.Monad.Error
import qualified Data.Map as M
import Prelude hiding (succ)
import EvalMonad
import Compiler.Hoopl
import IR
-- Evaluation functions
evalProg :: EvalTarget v => [Proc] -> [v] -> String -> [v] -> ErrorM (State v, [v])
evalProg procs vs main args = runProg procs vs $ evalProc main args
evalProc :: EvalTarget v => String -> [v] -> EvalM v [v]
evalProc proc_name actuals =
do event $ CallEvt proc_name actuals
proc <- get_proc proc_name
evalProc' proc actuals
evalProc' :: EvalTarget v => Proc -> [v] -> EvalM v [v]
evalProc' (Proc {name=_, args, body, entry}) actuals =
if length args == length actuals then
evalBody (M.fromList $ zip args actuals) body entry
else throwError $ "Param/actual mismatch: " ++ show args ++ " = " ++ show actuals
-- Responsible for allocating and deallocating its own stack frame.
evalBody :: EvalTarget v => VarEnv v -> Graph Insn C C -> Label -> EvalM v [v]
evalBody vars graph entry = inNewFrame vars graph $ get_block entry >>= evalB
evalB :: forall v . EvalTarget v => Block Insn C C -> EvalM v [v]
evalB b = foldBlockNodesF3 (lift evalF, lift evalM, lift evalL) b $ return ()
where
lift :: forall e x y . (Insn e x -> EvalM v y) -> Insn e x -> EvalM v () -> EvalM v y
lift f n z = z >> f n
evalF :: EvalTarget v => Insn C O -> EvalM v ()
evalF (Label _) = return ()
evalM :: EvalTarget v => Insn O O -> EvalM v ()
evalM (Assign var e) =
do v_e <- eval e
set_var var v_e
evalM (Store addr e) =
do v_addr <- eval addr >>= toAddr
v_e <- eval e
-- StoreEvt recorded in set_heap
set_heap v_addr v_e
evalL :: EvalTarget v => Insn O C -> EvalM v [v]
evalL (Branch bid) =
do b <- get_block bid
evalB b
evalL (Cond e t f) =
do v_e <- eval e >>= toBool
evalL $ Branch $ if v_e then t else f
evalL (Call ress f args succ) =
do v_args <- mapM eval args
-- event is recorded in evalProc
f_ress <- evalProc f v_args
if length ress == length f_ress then return ()
else throwError $ "function " ++ f ++ " returned unexpected # of args"
_ <- mapM (uncurry set_var) $ zip ress f_ress
evalL $ Branch succ
evalL (Return es) =
do vs <- mapM eval es
event $ RetEvt vs
return vs
class Show v => EvalTarget v where
toAddr :: v -> EvalM v Integer
toBool :: v -> EvalM v Bool
eval :: Expr -> EvalM v v
instance EvalTarget Value where
toAddr (I i) = return i
toAddr (B _) = throwError "conversion to address failed"
toBool (B b) = return b
toBool (I _) = throwError "conversion to bool failed"
eval (Lit (Int i)) = return $ I i
eval (Lit (Bool b)) = return $ B b
eval (Var var) = get_var var
eval (Load addr) =
do v_addr <- eval addr >>= toAddr
get_heap v_addr
eval (Binop bop e1 e2) =
do v1 <- eval e1
v2 <- eval e2
liftBinOp bop v1 v2
where
liftBinOp = liftOp
where liftOp Add = i (+)
liftOp Sub = i (-)
liftOp Mul = i (*)
liftOp Div = i div
liftOp Eq = b (==)
liftOp Ne = b (/=)
liftOp Gt = b (>)
liftOp Lt = b (<)
liftOp Gte = b (>=)
liftOp Lte = b (<=)
i = liftX I fromI
b = liftX B fromB
liftX :: Monad m => (a -> b) -> (b -> m a) -> (a -> a -> a) -> b -> b -> m b
liftX up dwn = \ op x y -> do v_x <- dwn x
v_y <- dwn y
return $ up $ op v_x v_y
fromI (I x) = return x
fromI (B _) = throwError "fromI: got a B"
fromB (I _) = throwError "fromB: got an I"
fromB (B x) = return x
-- I'm under no delusion that the following example is useful,
-- but it demonstrates how the evaluator can use a new kind
-- of evaluator.
instance EvalTarget Integer where
toAddr i = return i
toBool i = return $ i /= 0
eval (Lit (Int i)) = return i
eval (Lit (Bool True)) = return 1
eval (Lit (Bool False)) = return 0
eval (Var var) = get_var var
eval (Load addr) =
do v_addr <- eval addr >>= toAddr
get_heap v_addr
eval (Binop bop e1 e2) =
do v1 <- eval e1
v2 <- eval e2
return $ liftBinOp bop v1 v2
where
liftBinOp = liftOp
where liftOp Add = i (+)
liftOp Sub = i (-)
liftOp Mul = i (*)
liftOp Div = i div
liftOp Eq = b (==)
liftOp Ne = b (/=)
liftOp Gt = b (>)
liftOp Lt = b (<)
liftOp Gte = b (>=)
liftOp Lte = b (<=)
i = id
b opr x y = if opr x y then 1 else 0
-- Symbolic evaluation.
-- Hard questions:
-- - how do we get heap addresses?
-- - how do we get conditionals?
-- - how do we compare symbolic expressions?
data Sym = L Lit
| In Integer -- In x indicates a value on entry to the program
| Ld Sym
| BO BinOp Sym Sym
deriving Show
-- sym_vsupply :: [Sym]
-- sym_vsupply = [In n | n <- [0..]]
instance EvalTarget Sym where
toAddr _ = undefined
toBool _ = undefined
eval (Lit l) = return $ L l
eval (Var var) = get_var var
eval (Load addr) =
do v_addr <- eval addr >>= toAddr
get_heap v_addr
eval (Binop bop e1 e2) =
do v1 <- eval e1
v2 <- eval e2
return $ BO bop v1 v2
| ezyang/hoopl | testing/Eval.hs | bsd-3-clause | 5,748 | 0 | 16 | 1,898 | 2,171 | 1,065 | 1,106 | 139 | 3 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-----------------------------------------------------------------------------
-- | This module provides access to internal garbage collection and
-- memory usage statistics. These statistics are not available unless
-- a program is run with the @-T@ RTS flag.
--
-- This module is GHC-only and should not be considered portable.
--
-- @since 4.5.0.0
-----------------------------------------------------------------------------
-- TODO: Implement
module GHC.Stats
(
-- GCStats(..)
-- , getGCStats
-- , getGCStatsEnabled
) where
-- import Control.Monad
-- import Data.Int
-- import GHC.Base
-- import GHC.Read ( Read )
-- import GHC.Show ( Show )
-- import GHC.IO.Exception
-- import Foreign.Marshal.Alloc
-- import Foreign.Storable
-- import Foreign.Ptr
-- #include "Rts.h"
-- foreign import ccall "getGCStats" getGCStats_ :: Ptr () -> IO ()
-- -- | Returns whether GC stats have been enabled (with @+RTS -T@, for example).
-- --
-- -- @since 4.6.0.0
-- foreign import ccall "getGCStatsEnabled" getGCStatsEnabled :: IO Bool
-- -- I'm probably violating a bucket of constraints here... oops.
-- -- | Global garbage collection and memory statistics.
-- --
-- -- @since 4.5.0.0
-- data GCStats = GCStats
-- { bytesAllocated :: !Int64 -- ^ Total number of bytes allocated
-- , numGcs :: !Int64 -- ^ Number of garbage collections performed
-- , maxBytesUsed :: !Int64 -- ^ Maximum number of live bytes seen so far
-- , numByteUsageSamples :: !Int64 -- ^ Number of byte usage samples taken
-- -- | Sum of all byte usage samples, can be used with
-- -- 'numByteUsageSamples' to calculate averages with
-- -- arbitrary weighting (if you are sampling this record multiple
-- -- times).
-- , cumulativeBytesUsed :: !Int64
-- , bytesCopied :: !Int64 -- ^ Number of bytes copied during GC
-- , currentBytesUsed :: !Int64 -- ^ Current number of live bytes
-- , currentBytesSlop :: !Int64 -- ^ Current number of bytes lost to slop
-- , maxBytesSlop :: !Int64 -- ^ Maximum number of bytes lost to slop at any one time so far
-- , peakMegabytesAllocated :: !Int64 -- ^ Maximum number of megabytes allocated
-- -- | CPU time spent running mutator threads. This does not include
-- -- any profiling overhead or initialization.
-- , mutatorCpuSeconds :: !Double
-- -- | Wall clock time spent running mutator threads. This does not
-- -- include initialization.
-- , mutatorWallSeconds :: !Double
-- , gcCpuSeconds :: !Double -- ^ CPU time spent running GC
-- , gcWallSeconds :: !Double -- ^ Wall clock time spent running GC
-- , cpuSeconds :: !Double -- ^ Total CPU time elapsed since program start
-- , wallSeconds :: !Double -- ^ Total wall clock time elapsed since start
-- -- | Number of bytes copied during GC, minus space held by mutable
-- -- lists held by the capabilities. Can be used with
-- -- 'parMaxBytesCopied' to determine how well parallel GC utilized
-- -- all cores.
-- , parTotBytesCopied :: !Int64
-- -- | Sum of number of bytes copied each GC by the most active GC
-- -- thread each GC. The ratio of 'parTotBytesCopied' divided by
-- -- 'parMaxBytesCopied' approaches 1 for a maximally sequential
-- -- run and approaches the number of threads (set by the RTS flag
-- -- @-N@) for a maximally parallel run.
-- , parMaxBytesCopied :: !Int64
-- } deriving (Show, Read)
-- {-
-- , initCpuSeconds :: !Double
-- , initWallSeconds :: !Double
-- -}
-- -- | Retrieves garbage collection and memory statistics as of the last
-- -- garbage collection. If you would like your statistics as recent as
-- -- possible, first run a 'System.Mem.performGC'.
-- --
-- -- @since 4.5.0.0
-- getGCStats :: IO GCStats
-- getGCStats = do
-- statsEnabled <- getGCStatsEnabled
-- unless statsEnabled . ioError $ IOError
-- Nothing
-- UnsupportedOperation
-- ""
-- "getGCStats: GC stats not enabled. Use `+RTS -T -RTS' to enable them."
-- Nothing
-- Nothing
-- allocaBytes (#size GCStats) $ \p -> do
-- getGCStats_ p
-- bytesAllocated <- (# peek GCStats, bytes_allocated) p
-- numGcs <- (# peek GCStats, num_gcs ) p
-- numByteUsageSamples <- (# peek GCStats, num_byte_usage_samples ) p
-- maxBytesUsed <- (# peek GCStats, max_bytes_used ) p
-- cumulativeBytesUsed <- (# peek GCStats, cumulative_bytes_used ) p
-- bytesCopied <- (# peek GCStats, bytes_copied ) p
-- currentBytesUsed <- (# peek GCStats, current_bytes_used ) p
-- currentBytesSlop <- (# peek GCStats, current_bytes_slop) p
-- maxBytesSlop <- (# peek GCStats, max_bytes_slop) p
-- peakMegabytesAllocated <- (# peek GCStats, peak_megabytes_allocated ) p
-- {-
-- initCpuSeconds <- (# peek GCStats, init_cpu_seconds) p
-- initWallSeconds <- (# peek GCStats, init_wall_seconds) p
-- -}
-- mutatorCpuSeconds <- (# peek GCStats, mutator_cpu_seconds) p
-- mutatorWallSeconds <- (# peek GCStats, mutator_wall_seconds) p
-- gcCpuSeconds <- (# peek GCStats, gc_cpu_seconds) p
-- gcWallSeconds <- (# peek GCStats, gc_wall_seconds) p
-- cpuSeconds <- (# peek GCStats, cpu_seconds) p
-- wallSeconds <- (# peek GCStats, wall_seconds) p
-- parTotBytesCopied <- (# peek GCStats, par_tot_bytes_copied) p
-- parMaxBytesCopied <- (# peek GCStats, par_max_bytes_copied) p
-- return GCStats { .. }
{-
-- Nontrivial to implement: TaskStats needs arbitrarily large
-- amounts of memory, spark stats wants to use SparkCounters
-- but that needs a new rts/ header.
data TaskStats = TaskStats
{ taskMutCpuSeconds :: Int64
, taskMutWallSeconds :: Int64
, taskGcCpuSeconds :: Int64
, taskGcWallSeconds :: Int64
} deriving (Show, Read)
data SparkStats = SparkStats
{ sparksCreated :: Int64
, sparksDud :: Int64
, sparksOverflowed :: Int64
, sparksConverted :: Int64
, sparksGcd :: Int64
, sparksFizzled :: Int64
} deriving (Show, Read)
-- We also could get per-generation stats, which requires a
-- non-constant but at runtime known about of memory.
-}
| pparkkin/eta | libraries/base/GHC/Stats.hs | bsd-3-clause | 6,290 | 0 | 3 | 1,338 | 127 | 124 | 3 | 6 | 0 |
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE ScopedTypeVariables #-}
import GHC.Exts
import GHC.IO
import ClosureSizeUtils
data A = A (Array# Int)
data MA = MA (MutableArray# RealWorld Int)
data BA = BA ByteArray#
data MBA = MBA (MutableByteArray# RealWorld)
data APC a = APC a
main :: IO ()
main = do
assertSize 'a' 2
assertSize (Just ()) 2
assertSize (Nothing :: Maybe ()) 2
assertSize ((1,2) :: (Int,Int)) 3
assertSize ((1,2,3) :: (Int,Int,Int)) 4
MA ma <- IO $ \s ->
case newArray# 0# 0 s of
(# s1, x #) -> (# s1, MA x #)
A a <- IO $ \s ->
case freezeArray# ma 0# 0# s of
(# s1, x #) -> (# s1, A x #)
MBA mba <- IO $ \s ->
case newByteArray# 0# s of
(# s1, x #) -> (# s1, MBA x #)
BA ba <- IO $ \s ->
case newByteArray# 0# s of
(# s1, x #) ->
case unsafeFreezeByteArray# x s1 of
(# s2, y #) -> (# s2, BA y #)
assertSizeUnlifted ma 3
assertSizeUnlifted a 3
assertSizeUnlifted mba 2
assertSizeUnlifted ba 2
| sdiehl/ghc | libraries/ghc-heap/tests/closure_size.hs | bsd-3-clause | 1,126 | 0 | 17 | 330 | 447 | 227 | 220 | 38 | 1 |
{-# LANGUAGE MagicHash #-}
module T15460 where
import GHC.Int
main :: IO ()
main = do
let x = I# (0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff#)
print x
| sdiehl/ghc | testsuite/tests/warnings/should_compile/T15460.hs | bsd-3-clause | 182 | 0 | 11 | 31 | 46 | 24 | 22 | 7 | 1 |
module LiftToToplevel.PatBindIn3 where
--A definition can be lifted from a where or let to the top level binding group.
--Lifting a definition widens the scope of the definition.
--In this example, lift 'sq' defined in 'sumSquares'
--This example aims to test changing a constant to a function.
sumSquares x = sq + sq
where
sq = x^pow
pow =2
anotherFun 0 y = sq y
where sq x = x^2
| kmate/HaRe | test/testdata/LiftToToplevel/PatBindIn3.hs | bsd-3-clause | 447 | 0 | 7 | 136 | 65 | 36 | 29 | 6 | 1 |
module Tests.Ratio where
import Data.Ratio
runTest = do
print $ (1 % 4) -- Pass
print $ ((1 % 4) :: Ratio Int) -- Fail
print $ ((1 % 4) :: TypeT) -- Fail
print $ DataT {d1 = 1 % 4} -- Fail
print $ DataT (1 % 4) -- Fail
print $ d3 $ NewtypeT (1 % 4) -- Fail
data DataT = DataT {d1 :: Ratio Int} deriving (Show)
newtype NewtypeT = NewtypeT { d3 :: Ratio Int }
type TypeT = Ratio Int
| beni55/haste-compiler | Tests/Ratio.hs | bsd-3-clause | 438 | 0 | 10 | 144 | 186 | 103 | 83 | 12 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Package
-- Copyright : Isaac Jones 2003-2004
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- Defines a package identifier along with a parser and pretty printer for it.
-- 'PackageIdentifier's consist of a name and an exact version. It also defines
-- a 'Dependency' data type. A dependency is a package name and a version
-- range, like @\"foo >= 1.2 && < 2\"@.
module Distribution.Package (
-- * Package ids
PackageName(..),
PackageIdentifier(..),
PackageId,
-- * Installed package identifiers
InstalledPackageId(..),
-- * Package keys (used for linker symbols)
PackageKey(..),
mkPackageKey,
packageKeyHash,
packageKeyLibraryName,
-- * Library name (used for install path, package key)
LibraryName(..),
emptyLibraryName,
getHSLibraryName,
-- * Package source dependencies
Dependency(..),
thisPackageVersion,
notThisPackageVersion,
simplifyDependency,
-- * Package classes
Package(..), packageName, packageVersion,
HasInstalledPackageId(..),
PackageInstalled(..),
) where
import Distribution.Version
( Version(..), VersionRange, anyVersion, thisVersion
, notThisVersion, simplifyVersionRange )
import Distribution.Text (Text(..), display)
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP ((<++))
import qualified Text.PrettyPrint as Disp
import Control.DeepSeq (NFData(..))
import Distribution.Compat.Binary (Binary)
import qualified Data.Char as Char
( isDigit, isAlphaNum, isUpper, isLower, ord, chr )
import Data.Data ( Data )
import Data.List ( intercalate, foldl', sort )
import Data.Typeable ( Typeable )
import Data.Word ( Word64 )
import GHC.Fingerprint ( Fingerprint(..), fingerprintString )
import GHC.Generics (Generic)
import Numeric ( showIntAtBase )
import Text.PrettyPrint ((<>), (<+>), text)
newtype PackageName = PackageName { unPackageName :: String }
deriving (Generic, Read, Show, Eq, Ord, Typeable, Data)
instance Binary PackageName
instance Text PackageName where
disp (PackageName n) = Disp.text n
parse = do
ns <- Parse.sepBy1 component (Parse.char '-')
return (PackageName (intercalate "-" ns))
where
component = do
cs <- Parse.munch1 Char.isAlphaNum
if all Char.isDigit cs then Parse.pfail else return cs
-- each component must contain an alphabetic character, to avoid
-- ambiguity in identifiers like foo-1 (the 1 is the version number).
instance NFData PackageName where
rnf (PackageName pkg) = rnf pkg
-- | Type alias so we can use the shorter name PackageId.
type PackageId = PackageIdentifier
-- | The name and version of a package.
data PackageIdentifier
= PackageIdentifier {
pkgName :: PackageName, -- ^The name of this package, eg. foo
pkgVersion :: Version -- ^the version of this package, eg 1.2
}
deriving (Generic, Read, Show, Eq, Ord, Typeable, Data)
instance Binary PackageIdentifier
instance Text PackageIdentifier where
disp (PackageIdentifier n v) = case v of
Version [] _ -> disp n -- if no version, don't show version.
_ -> disp n <> Disp.char '-' <> disp v
parse = do
n <- parse
v <- (Parse.char '-' >> parse) <++ return (Version [] [])
return (PackageIdentifier n v)
instance NFData PackageIdentifier where
rnf (PackageIdentifier name version) = rnf name `seq` rnf version
-- ------------------------------------------------------------
-- * Installed Package Ids
-- ------------------------------------------------------------
-- | An InstalledPackageId uniquely identifies an instance of an installed
-- package. There can be at most one package with a given 'InstalledPackageId'
-- in a package database, or overlay of databases.
--
newtype InstalledPackageId = InstalledPackageId String
deriving (Generic, Read,Show,Eq,Ord,Typeable,Data)
instance Binary InstalledPackageId
instance Text InstalledPackageId where
disp (InstalledPackageId str) = text str
parse = InstalledPackageId `fmap` Parse.munch1 abi_char
where abi_char c = Char.isAlphaNum c || c `elem` "-_."
-- ------------------------------------------------------------
-- * Package Keys
-- ------------------------------------------------------------
-- | A 'PackageKey' is the notion of "package ID" which is visible to the
-- compiler. Why is this not a 'PackageId'? The 'PackageId' is a user-visible
-- concept written explicity in Cabal files; on the other hand, a 'PackageKey'
-- may contain, for example, information about the transitive dependency
-- tree of a package. Why is this not an 'InstalledPackageId'? A 'PackageKey'
-- should be stable so that we can incrementally recompile after a source edit;
-- however, an 'InstalledPackageId' may change even with source.
--
-- Package keys may be generated either by Cabal or GHC. In particular,
-- ordinary, "old-style" packages which don't use Backpack features can
-- have their package keys generated directly by Cabal and coincide with
-- 'LibraryName's. However, Backpack keys are generated by GHC may exhibit
-- more variation than a 'LibraryName'.
--
data PackageKey
-- | Modern package key which is a hash of the PackageId and the transitive
-- dependency key. It's manually inlined here so we can get the instances
-- we need. There's an optional prefix for compatibility with GHC 7.10.
= PackageKey (Maybe String) {-# UNPACK #-} !Word64 {-# UNPACK #-} !Word64
-- | Old-style package key which is just a 'PackageId'. Required because
-- old versions of GHC assume that the 'sourcePackageId' recorded for an
-- installed package coincides with the package key it was compiled with.
| OldPackageKey !PackageId
deriving (Generic, Read, Show, Eq, Ord, Typeable, Data)
instance Binary PackageKey
-- | Convenience function which converts a fingerprint into a new-style package
-- key.
fingerprintPackageKey :: Fingerprint -> PackageKey
fingerprintPackageKey (Fingerprint a b) = PackageKey Nothing a b
-- | Generates a 'PackageKey' from a 'PackageId', sorted package keys of the
-- immediate dependencies.
mkPackageKey :: Bool -- are modern style package keys supported?
-> PackageId
-> [LibraryName] -- dependencies
-> PackageKey
mkPackageKey True pid deps =
fingerprintPackageKey . fingerprintString $
display pid ++ "\n" ++
concat [ display dep ++ "\n" | dep <- sort deps ]
mkPackageKey False pid _ = OldPackageKey pid
-- The base-62 code is based off of 'locators'
-- ((c) Operational Dynamics Consulting, BSD3 licensed)
-- Note: Instead of base-62 encoding a single 128-bit integer
-- (ceil(21.49) characters), we'll base-62 a pair of 64-bit integers
-- (2 * ceil(10.75) characters). Luckily for us, it's the same number of
-- characters! In the long term, this should go in GHC.Fingerprint,
-- but not now...
-- | Size of a 64-bit word when written as a base-62 string
word64Base62Len :: Int
word64Base62Len = 11
-- | Converts a 64-bit word into a base-62 string
toBase62 :: Word64 -> String
toBase62 w = pad ++ str
where
pad = replicate len '0'
len = word64Base62Len - length str -- 11 == ceil(64 / lg 62)
str = showIntAtBase 62 represent w ""
represent :: Int -> Char
represent x
| x < 10 = Char.chr (48 + x)
| x < 36 = Char.chr (65 + x - 10)
| x < 62 = Char.chr (97 + x - 36)
| otherwise = error ("represent (base 62): impossible!")
-- | Parses a base-62 string into a 64-bit word
fromBase62 :: String -> Word64
fromBase62 ss = foldl' multiply 0 ss
where
value :: Char -> Int
value c
| Char.isDigit c = Char.ord c - 48
| Char.isUpper c = Char.ord c - 65 + 10
| Char.isLower c = Char.ord c - 97 + 36
| otherwise = error ("value (base 62): impossible!")
multiply :: Word64 -> Char -> Word64
multiply acc c = acc * 62 + (fromIntegral $ value c)
-- | Parses a base-62 string into a fingerprint.
readBase62Fingerprint :: String -> Fingerprint
readBase62Fingerprint s = Fingerprint w1 w2
where (s1,s2) = splitAt word64Base62Len s
w1 = fromBase62 s1
w2 = fromBase62 (take word64Base62Len s2)
-- | Compute the hash (without a prefix) of a package key. In GHC 7.12
-- this is equivalent to display.
packageKeyHash :: PackageKey -> String
packageKeyHash (PackageKey _ w1 w2) = toBase62 w1 ++ toBase62 w2
packageKeyHash (OldPackageKey pid) = display pid
-- | Legacy function for GHC 7.10 to compute a LibraryName based on
-- the package key.
packageKeyLibraryName :: PackageId -> PackageKey -> LibraryName
packageKeyLibraryName pid (PackageKey _ w1 w2) =
LibraryName (display pid ++ "-" ++ toBase62 w1 ++ toBase62 w2)
packageKeyLibraryName _ (OldPackageKey pid) = LibraryName (display pid)
instance Text PackageKey where
disp (PackageKey mb_prefix w1 w2)
= maybe Disp.empty (\r -> Disp.text r <> Disp.char '_') mb_prefix <>
Disp.text (toBase62 w1) <> Disp.text (toBase62 w2)
disp (OldPackageKey pid) = disp pid
parse = parseNewWithAnnot <++ parseNew <++ parseOld
where parseNew = do
fmap (fingerprintPackageKey . readBase62Fingerprint)
. Parse.count (word64Base62Len * 2)
$ Parse.satisfy Char.isAlphaNum
parseNewWithAnnot = do
-- this is ignored
prefix <- Parse.munch1 (\c -> Char.isAlphaNum c || c `elem` "-")
_ <- Parse.char '_' -- if we use '-' it's ambiguous
PackageKey _ w1 w2 <- parseNew
return (PackageKey (Just prefix) w1 w2)
parseOld = do pid <- parse
return (OldPackageKey pid)
instance NFData PackageKey where
rnf (PackageKey mb _ _) = rnf mb
rnf (OldPackageKey pid) = rnf pid
-- ------------------------------------------------------------
-- * Library names
-- ------------------------------------------------------------
-- | A library name consists of not only a source package
-- id ('PackageId') but also the library names of all textual
-- dependencies; thus, a library name uniquely identifies an
-- installed package up to the dependency resolution done by Cabal.
-- Create using 'packageKeyLibraryName'. Library names are opaque,
-- Cabal-defined strings.
newtype LibraryName
= LibraryName String
deriving (Generic, Read, Show, Eq, Ord, Typeable, Data)
instance Binary LibraryName
-- | Default library name for when it is not known.
emptyLibraryName :: LibraryName
emptyLibraryName = LibraryName ""
-- | Returns library name prefixed with HS, suitable for filenames
getHSLibraryName :: LibraryName -> String
getHSLibraryName (LibraryName s) = "HS" ++ s
instance Text LibraryName where
disp (LibraryName s) = Disp.text s
parse = LibraryName `fmap` Parse.munch1 hash_char
where hash_char c = Char.isAlphaNum c || c `elem` "-_."
instance NFData LibraryName where
rnf (LibraryName s) = rnf s
-- ------------------------------------------------------------
-- * Package source dependencies
-- ------------------------------------------------------------
-- | Describes a dependency on a source package (API)
--
data Dependency = Dependency PackageName VersionRange
deriving (Generic, Read, Show, Eq, Typeable, Data)
instance Binary Dependency
instance Text Dependency where
disp (Dependency name ver) =
disp name <+> disp ver
parse = do name <- parse
Parse.skipSpaces
ver <- parse <++ return anyVersion
Parse.skipSpaces
return (Dependency name ver)
thisPackageVersion :: PackageIdentifier -> Dependency
thisPackageVersion (PackageIdentifier n v) =
Dependency n (thisVersion v)
notThisPackageVersion :: PackageIdentifier -> Dependency
notThisPackageVersion (PackageIdentifier n v) =
Dependency n (notThisVersion v)
-- | Simplify the 'VersionRange' expression in a 'Dependency'.
-- See 'simplifyVersionRange'.
--
simplifyDependency :: Dependency -> Dependency
simplifyDependency (Dependency name range) =
Dependency name (simplifyVersionRange range)
-- | Class of things that have a 'PackageIdentifier'
--
-- Types in this class are all notions of a package. This allows us to have
-- different types for the different phases that packages go though, from
-- simple name\/id, package description, configured or installed packages.
--
-- Not all kinds of packages can be uniquely identified by a
-- 'PackageIdentifier'. In particular, installed packages cannot, there may be
-- many installed instances of the same source package.
--
class Package pkg where
packageId :: pkg -> PackageIdentifier
packageName :: Package pkg => pkg -> PackageName
packageName = pkgName . packageId
packageVersion :: Package pkg => pkg -> Version
packageVersion = pkgVersion . packageId
instance Package PackageIdentifier where
packageId = id
-- | Packages that have an installed package ID
class Package pkg => HasInstalledPackageId pkg where
installedPackageId :: pkg -> InstalledPackageId
-- | Class of installed packages.
--
-- The primary data type which is an instance of this package is
-- 'InstalledPackageInfo', but when we are doing install plans in Cabal install
-- we may have other, installed package-like things which contain more metadata.
-- Installed packages have exact dependencies 'installedDepends'.
class HasInstalledPackageId pkg => PackageInstalled pkg where
installedDepends :: pkg -> [InstalledPackageId]
| rimmington/cabal | Cabal/Distribution/Package.hs | bsd-3-clause | 13,824 | 0 | 17 | 2,861 | 2,612 | 1,408 | 1,204 | 196 | 1 |
module ConstructorIn3 where
data BTree b
= Empty | T b (BTree b) (BTree b) deriving Show
buildtree :: Ord a => [a] -> BTree a
buildtree [] = Empty
buildtree ((x : xs)) = insert x (buildtree xs)
insert :: Ord a => a -> (BTree a) -> BTree a
insert val Empty = T val Empty Empty
insert val tree@(T tval left right)
| val > tval = T tval left (insert val right)
| otherwise = T tval (insert val left) right
main :: BTree Int
main = buildtree [3, 1, 2]
| kmate/HaRe | old/testing/renaming/ConstructorIn3_AstOut.hs | bsd-3-clause | 473 | 0 | 9 | 120 | 240 | 122 | 118 | 13 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE StaticPointers #-}
-- | A test to try the static form in splices, which should fail.
--
module Main(main) where
import GHC.Fingerprint
import GHC.StaticPtr
main = print $ $(case staticKey (static 'a') of
Fingerprint w0 w1 ->
let w0i = fromIntegral w0 :: Integer
w1i = fromIntegral w1 :: Integer
in
[| fmap (\p -> deRefStaticPtr p :: Char) $ unsafeLookupStaticPtr $
Fingerprint (fromIntegral w0i) (fromIntegral w1i)
|]
)
| siddhanathan/ghc | testsuite/tests/th/TH_StaticPointers02.hs | bsd-3-clause | 513 | 0 | 15 | 122 | 91 | 51 | 40 | 11 | 1 |
module Main where
import System.Environment
import System.IO
main :: IO ()
main = do
(_:source:target:_) <- getArgs
let f '0' = '1'
f c = c
writeFile target . map f =<< readFile source
| themoritz/cabal | cabal-testsuite/PackageTests/BuildTools/Internal/MyCustomPreprocessor.hs | bsd-3-clause | 200 | 0 | 11 | 49 | 91 | 45 | 46 | 9 | 2 |
module Main (main) where
import System.Exit
import System.IO
main :: IO ()
main = do putStrLn "Q1"
exitFailure
putStrLn "Q2"
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/ghc-e/should_run/T3890.hs | bsd-3-clause | 147 | 0 | 7 | 42 | 49 | 25 | 24 | 7 | 1 |
{-# LANGUAGE TypeFamilies #-}
module Simple18 where
type family F a
type instance F Int = [Int]
foo :: F Int
foo = [1] | urbanslug/ghc | testsuite/tests/indexed-types/should_compile/Simple18.hs | bsd-3-clause | 122 | 0 | 5 | 27 | 40 | 25 | 15 | 6 | 1 |
{-|
Module: Math.Ftensor.Algebra
Copyright: (c) 2015 Michael Benfield
License: ISC
Classes supporting arithmetic operations. Haskell's numeric classes are too
coarse for our needs. Types which are instances of more than one of these
classes should satisfy the obvious compatibility relations between the various
operations.
For instance, the equalities @x -. x = zero@ and @x *. (y +. z) = x *. y +. x
*. z@ and @one *. x = x@ should hold. If the type in question implements
inexact arithmetic, they should hold approximately.
For an instance of @Num@, these should hold whenever the type is also an
instance of the classes defining these functions:
* @(+.) = (+)@
* @(*.) = (*)@
* @(-.) = (-)@
* @neg = negate@
* @one = fromInteger 1@
* @zero = fromInteger 0@
Similarly, for an instance of @Fractional@
* @(\/.) = (\/)@
* @inv = recip@
-}
{-# LANGUAGE CPP #-}
module Math.Ftensor.Algebra (
Additive(..),
WithZero(..),
WithNegatives(..),
Multiplicative(..),
WithOne(..),
WithReciprocals(..),
WithScalars(..),
Rg,
Rng,
Rig,
Ring,
Field,
Module,
VectorSpace,
Algebra,
UnitalAlgebra,
) where
import Data.Complex (Complex)
import Data.Int
import Data.Ratio (Ratio)
import Data.Word
import Numeric.Natural
infixl 6 +.
class Additive a where
(+.) :: a -> a -> a
-- | Types that have an additive identity. Should satisfy:
--
-- * @x +. zero = zero +. x = x@
class Additive a => WithZero a where
zero :: a
infixl 6 -.
-- | Types that have additive inverses. Should satisfy:
-- @neg x = zero -. x@ and @x -. x = zero@.
class WithZero a => WithNegatives a where
{-# MINIMAL neg | (-.) #-}
neg :: a -> a
neg x = zero -. x
(-.) :: a -> a -> a
lhs -. rhs = lhs +. neg rhs
infixl 7 *.
class Multiplicative a where
(*.) :: a -> a -> a
-- | Types with a multiplicative identity. Should satisfy:
--
-- * @one *. x = x *. one = x@.
class Multiplicative a => WithOne a where
one :: a
infixl 7 /.
-- | Types with multiplicative inverse.
-- @inv x@ and @y /. x@
-- may throw an exception or behave in undefined ways if
-- @invertible x = False@.
--
-- * @inv x = one ./ x@
--
-- * @y *. x /. y = x@
--
-- * @x /. y *. x = y@
class WithOne a => WithReciprocals a where
{-# MINIMAL invertible, (inv | (/.)) #-}
invertible :: a -> Bool
inv :: a -> a
inv x = one /. x
(/.) :: a -> a -> a
lhs /. rhs = lhs *. inv rhs
infixl 7 *:
-- | Types like mathematical vectors that can be multiplied by another type.
class WithScalars a where
type Scalar a
(*:) :: Scalar a -> a -> a
-- | A @Rg@ is a Ring without one or negatives.
type Rg a = (WithZero a, Multiplicative a)
-- | A @Rng@ is a Ring without one.
type Rng a = (Rg a, WithNegatives a)
-- | A @Rig@ is a Ring without negatives.
type Rig a = (Rg a, WithOne a)
type Ring a = (Rng a, Rig a)
type Field a = (Ring a, WithReciprocals a)
type Module a = (WithNegatives a, WithScalars a, Ring (Scalar a))
type VectorSpace a = (Module a, Field (Scalar a))
type Algebra a = (VectorSpace a, Rng a)
type UnitalAlgebra a = (Algebra a, WithOne a)
#define INSTANCES_NUM(typ, ctxt) \
instance ctxt Additive (typ) where { \
; {-# INLINE (+.) #-} \
; (+.) = (+) \
} ; instance ctxt WithZero (typ) where { \
; {-# INLINE zero #-} \
; zero = 0 \
} ; instance ctxt WithNegatives (typ) where { \
; {-# INLINE neg #-} \
; neg = negate \
; {-# INLINE (-.) #-} \
; (-.) = (-) \
} ; instance ctxt Multiplicative (typ) where { \
; {-# INLINE (*.) #-} \
; (*.) = (*) \
} ; instance ctxt WithOne (typ) where { \
; {-# INLINE one #-} \
; one = 1 \
} ; instance ctxt WithScalars (typ) where { \
; type Scalar (typ) = typ \
; {-# INLINE (*:) #-} \
; (*:) = (*) \
}
#define INSTANCES_FRACTIONAL(typ, ctxt) \
INSTANCES_NUM(typ, ctxt) ; \
instance ctxt WithReciprocals (typ) where { \
; {-# INLINE invertible #-} \
; invertible = (/= 0) \
; {-# INLINE inv #-} \
; inv = recip \
; {-# INLINE (/.) #-} \
; (/.) = (/) \
}
INSTANCES_NUM(Natural, )
INSTANCES_NUM(Integer, )
INSTANCES_NUM(Int, )
INSTANCES_NUM(Int8, )
INSTANCES_NUM(Int16, )
INSTANCES_NUM(Int32, )
INSTANCES_NUM(Int64, )
INSTANCES_NUM(Word, )
INSTANCES_NUM(Word8, )
INSTANCES_NUM(Word16, )
INSTANCES_NUM(Word32, )
INSTANCES_NUM(Word64, )
INSTANCES_FRACTIONAL(Float, )
INSTANCES_FRACTIONAL(Double, )
INSTANCES_FRACTIONAL(Complex a, RealFloat a =>)
INSTANCES_FRACTIONAL(Ratio a, Integral a =>)
| mikebenfield/ftensor | src/Math/Ftensor/Algebra.hs | isc | 4,419 | 3 | 9 | 989 | 811 | 458 | 353 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
-- | @futhark doc@
module Futhark.CLI.Doc (main) where
import Control.Monad.State
import Data.FileEmbed
import Data.List (nubBy)
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
import Futhark.Compiler (Imports, dumpError, fileProg, newFutharkConfig, readProgramFiles)
import Futhark.Doc.Generator
import Futhark.Pipeline (FutharkM, Verbosity (..), runFutharkM)
import Futhark.Util (directoryContents, trim)
import Futhark.Util.Options
import Language.Futhark.Syntax (DocComment (..), progDoc)
import System.Directory (createDirectoryIfMissing)
import System.Exit
import System.FilePath
import System.IO
import Text.Blaze.Html.Renderer.Text
-- | Run @futhark doc@.
main :: String -> [String] -> IO ()
main = mainWithOptions initialDocConfig commandLineOptions "options... -o outdir programs..." f
where
f [dir] config = Just $ do
res <- runFutharkM (m config dir) Verbose
case res of
Left err -> liftIO $ do
dumpError newFutharkConfig err
exitWith $ ExitFailure 2
Right () ->
return ()
f _ _ = Nothing
m :: DocConfig -> FilePath -> FutharkM ()
m config dir =
case docOutput config of
Nothing -> liftIO $ do
hPutStrLn stderr "Must specify output directory with -o."
exitWith $ ExitFailure 1
Just outdir -> do
files <- liftIO $ futFiles dir
when (docVerbose config) $
liftIO $ do
mapM_ (hPutStrLn stderr . ("Found source file " <>)) files
hPutStrLn stderr "Reading files..."
(_w, imports, _vns) <- readProgramFiles [] files
liftIO $ printDecs config outdir files $ nubBy sameImport imports
sameImport (x, _) (y, _) = x == y
futFiles :: FilePath -> IO [FilePath]
futFiles dir = filter isFut <$> directoryContents dir
where
isFut = (== ".fut") . takeExtension
printDecs :: DocConfig -> FilePath -> [FilePath] -> Imports -> IO ()
printDecs cfg dir files imports = do
let direct_imports = map (normalise . dropExtension) files
(file_htmls, _warnings) =
renderFiles direct_imports $
filter (not . ignored) imports
mapM_ (write . fmap renderHtml) file_htmls
write ("style.css", cssFile)
where
write :: (String, T.Text) -> IO ()
write (name, content) = do
let file = dir </> makeRelative "/" name
when (docVerbose cfg) $
hPutStrLn stderr $ "Writing " <> file
createDirectoryIfMissing True $ takeDirectory file
T.writeFile file content
-- Some files are not worth documenting; typically because
-- they contain tests. The current crude mechanism is to
-- recognise them by a file comment containing "ignore".
ignored (_, fm) =
case progDoc (fileProg fm) of
Just (DocComment s _) -> trim s == "ignore"
_ -> False
cssFile :: T.Text
cssFile = $(embedStringFile "rts/futhark-doc/style.css")
data DocConfig = DocConfig
{ docOutput :: Maybe FilePath,
docVerbose :: Bool
}
initialDocConfig :: DocConfig
initialDocConfig =
DocConfig
{ docOutput = Nothing,
docVerbose = False
}
type DocOption = OptDescr (Either (IO ()) (DocConfig -> DocConfig))
commandLineOptions :: [DocOption]
commandLineOptions =
[ Option
"o"
["output-directory"]
( ReqArg
(\dirname -> Right $ \config -> config {docOutput = Just dirname})
"DIR"
)
"Directory in which to put generated documentation.",
Option
"v"
["verbose"]
(NoArg $ Right $ \config -> config {docVerbose = True})
"Print status messages on stderr."
]
| diku-dk/futhark | src/Futhark/CLI/Doc.hs | isc | 3,764 | 0 | 18 | 921 | 1,063 | 564 | 499 | 94 | 4 |
{-# htermination intersectFM_C :: Ord a => (b1 -> b2 -> b3) -> FiniteMap [a] b1 -> FiniteMap [a] b2 -> FiniteMap [a] b3 #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_intersectFM_C_4.hs | mit | 141 | 0 | 3 | 27 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE DeriveGeneric #-}
module Models.MatchingRequest where
import GHC.Generics
import Data.Aeson (FromJSON)
newtype NinoAttribute = NinoAttribute { nino :: String } deriving (Show, Generic)
instance FromJSON NinoAttribute
newtype Cycle3Dataset = Cycle3Dataset { attributes :: NinoAttribute } deriving (Show, Generic)
instance FromJSON Cycle3Dataset
data LevelOfAssurance = LEVEL_1 | LEVEL_2 | LEVEL_3 | LEVEL_4 deriving (Show, Generic)
instance FromJSON LevelOfAssurance
newtype MatchingDatasetString = MatchingDatasetString { value :: String } deriving (Show, Generic)
instance FromJSON MatchingDatasetString
newtype MatchingDataset = MatchingDataset { surnames :: [MatchingDatasetString] } deriving (Show, Generic)
instance FromJSON MatchingDataset
data MatchingRequest = MatchingRequest {
levelOfAssurance :: LevelOfAssurance,
cycle3Dataset :: Maybe Cycle3Dataset,
matchingDataset :: MatchingDataset
} deriving (Show, Generic)
instance FromJSON MatchingRequest
| richardTowers/verify-sample-local-matching-services | haskell/Models/MatchingRequest.hs | mit | 987 | 0 | 9 | 125 | 231 | 134 | 97 | 20 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Yesod.Form.I18n.Russian where
import Yesod.Form.Types (FormMessage (..))
import Data.Monoid (mappend)
import Data.Text (Text)
russianFormMessage :: FormMessage -> Text
russianFormMessage (MsgInvalidInteger t) = "Неверно записано целое число: " `Data.Monoid.mappend` t
russianFormMessage (MsgInvalidNumber t) = "Неверный формат числа: " `mappend` t
russianFormMessage (MsgInvalidEntry t) = "Неверный выбор: " `mappend` t
russianFormMessage MsgInvalidTimeFormat = "Неверно указано время, используйте формат ЧЧ:ММ[:СС]"
russianFormMessage MsgInvalidDay = "Неверно указана дата, используйте формат ГГГГ-ММ-ДД"
russianFormMessage (MsgInvalidUrl t) = "Неверно указан URL адрес: " `mappend` t
russianFormMessage (MsgInvalidEmail t) = "Неверно указана электронная почта: " `mappend` t
russianFormMessage (MsgInvalidHour t) = "Неверно указан час: " `mappend` t
russianFormMessage (MsgInvalidMinute t) = "Неверно указаны минуты: " `mappend` t
russianFormMessage (MsgInvalidSecond t) = "Неверно указаны секунды: " `mappend` t
russianFormMessage MsgCsrfWarning = "Для защиты от межсайтовой подделки запросов (CSRF), пожалуйста, подтвердите отправку данных формы."
russianFormMessage MsgValueRequired = "Обязательно к заполнению"
russianFormMessage (MsgInputNotFound t) = "Поле не найдено: " `mappend` t
russianFormMessage MsgSelectNone = "<Не выбрано>"
russianFormMessage (MsgInvalidBool t) = "Неверное логическое значение: " `mappend` t
russianFormMessage MsgBoolYes = "Да"
russianFormMessage MsgBoolNo = "Нет"
russianFormMessage MsgDelete = "Удалить?"
russianFormMessage (MsgInvalidHexColorFormat t) = "Недопустимое значение цвета, должен быть в шестнадцатеричном формате #rrggbb: " `mappend` t
| yesodweb/yesod | yesod-form/Yesod/Form/I18n/Russian.hs | mit | 2,196 | 0 | 7 | 198 | 340 | 189 | 151 | 25 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
-- | Defines an Sqlite event store.
module Eventful.Store.Sqlite
( sqliteEventStoreWriter
, initializeSqliteEventStore
, module Eventful.Store.Class
, module Eventful.Store.Sql
) where
import Control.Monad.Reader
import Data.Monoid
import Data.Text (Text)
import Database.Persist
import Database.Persist.Sql
import Eventful.Store.Class
import Eventful.Store.Sql
-- | An 'EventStoreWriter' that uses an SQLite database as a backend. Use
-- 'SqlEventStoreConfig' to configure this event store.
sqliteEventStoreWriter
:: (MonadIO m, PersistEntity entity, PersistEntityBackend entity ~ SqlBackend)
=> SqlEventStoreConfig entity serialized
-> VersionedEventStoreWriter (SqlPersistT m) serialized
sqliteEventStoreWriter config = EventStoreWriter $ transactionalExpectedWriteHelper getLatestVersion storeEvents'
where
getLatestVersion = sqlMaxEventVersion config maxSqliteVersionSql
storeEvents' = sqlStoreEvents config Nothing maxSqliteVersionSql
maxSqliteVersionSql :: DBName -> DBName -> DBName -> Text
maxSqliteVersionSql (DBName tableName) (DBName uuidFieldName) (DBName versionFieldName) =
"SELECT IFNULL(MAX(" <> versionFieldName <> "), -1) FROM " <> tableName <> " WHERE " <> uuidFieldName <> " = ?"
-- | This functions runs the migrations required to create the events table and
-- also adds an index on the UUID column.
initializeSqliteEventStore
:: (MonadIO m, PersistEntity entity, PersistEntityBackend entity ~ SqlBackend)
=> SqlEventStoreConfig entity serialized
-> ConnectionPool
-> m ()
initializeSqliteEventStore SqlEventStoreConfig{..} pool = do
-- Run migrations
_ <- liftIO $ runSqlPool (runMigrationSilent migrateSqlEvent) pool
-- Create index on uuid field so retrieval is very fast
let
(DBName tableName) = tableDBName (sqlEventStoreConfigSequenceMakeEntity undefined undefined undefined)
(DBName uuidFieldName) = fieldDBName sqlEventStoreConfigSequenceNumberField
indexSql =
"CREATE INDEX IF NOT EXISTS " <>
uuidFieldName <> "_index" <>
" ON " <> tableName <>
" (" <> uuidFieldName <> ")"
liftIO $ flip runSqlPool pool $ rawExecute indexSql []
return ()
| jdreaver/eventful | eventful-sqlite/src/Eventful/Store/Sqlite.hs | mit | 2,262 | 0 | 16 | 357 | 441 | 231 | 210 | 42 | 1 |
module Main where
-- friends
import qualified BitBoard
import qualified Piece
import qualified Search
import qualified ProofNumberSearch
import qualified Util
import qualified Move
import qualified Eval
-- GHC
-- libraries
import Text.Printf (printf)
import Criterion.Main
-- std
import Test.HUnit
import Test.Framework
import Test.Framework.Providers.HUnit
import Data.List
import Debug.Trace
b = Piece.B
w = Piece.W
main :: IO ()
main = Test.Framework.defaultMain $ hUnitTestToTests $ TestList [
-- "ProofNumberSearch Very Basic Test" ~: TestList [
-- "pnsTest1" ~: pnsProven [] @=? pns 10 "OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO-- O",
-- "pnsTest2" ~: pnsProven [] @=? pns 10 "OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO-- O",
-- "pnsTest3" ~: pnsProven ["Bh8"] @=? pns 10 "OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOX- O",
-- "pnsTest4" ~: pnsDisproven ["Bf8","Wg8"] @=? pns 10 "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXOX--- O",
-- "pnsTest5" ~: pnsProven [] @=? pns 10 "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXOX--- X",
-- "pnsTest6" ~: pnsDisproven ["Bg8","Wh8"] @=? pns 10 "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXOX-- O",
-- "pnsTest7" ~: pnsDisproven ["Bh8"] @=? pns 10 "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXOX- O",
-- "pnsTest8" ~: pnsDisproven ["Bh8"] @=? pns 10 "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXOXXXXXXXOXXXXXOX- O",
-- "pnsTest9" ~: pnsProven ["Bh8"] @=? pns 10 "OOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOXOOOOOOOXOOOOXXX- O"
-- ],
-- 15 depth, cannot execute right now (can freeze)
-- "ProofNumberSearch Test - 12 depth" ~: TestList [
-- "pnsTest01" ~: pnsProven ["Bb6"] @=? pns 15 "O--XXXX-XXXXXXX-OXOXOOXXOOXXOXXXOOXXOXXXO-OOOOXX--OOOO-X--OOOO-- O",
-- "pnsTest02" ~: pnsProven ["Ba3"] @=? pns 15 "--XXXXX-OOXXXXX--OOXXXXOXXOOOXXOXXOOOXXOXXXXXOXOO-OOOOOO-----O-O O",
-- "pnsTest03" ~: pnsProven ["Bg2"] @=? pns 15 "-XXX----O-XXXX-OOXXXXXXXOXXOXXXXOXOXOOXXOOXXXXOXO-XXXOOO---XOO-O O",
-- "pnsTest04" ~: pnsDisproven ["Bh1"] @=? pns 15 "O--XOO--XOOXXXXXXXXOXXXXXXXXOXXX--XOOOX---OOXOOO-OOOOOOO-OOOOOO- O",
-- "pnsTest05" ~: pnsProven ["Ba6"] @=? pns 15 "OOOOOXO---OOOOO-XXOXXXXX-XXOOXXXXXXXXXXX-XOOOXXXX-OOOO----OOOOO- O",
-- "pnsTest06" ~: pnsDisproven ["Ba4"] @=? pns 15 "X-OOOOO-XXOOOOX-XOXOOXXX-XOXXOXX-OXOXXXXO-XOXXXO---OOXXO--XO-OXO O",
-- "pnsTest07" ~: pnsDisproven ["Bh4"] @=? pns 15 "-X-OOOO---XXXO---OOXOX-OOOOOXXX-OOOXOXXXOOXXXOXXOOOXXXXX--OXXXXO O",
-- "pnsTest08" ~: pnsDisproven ["Be2"] @=? pns 15 "XXXXXO--XOOX-O--XXXOOO--XXOOOOO-XOOXOXO-XOXXOOXOO-XXOXX--OOXXXXO O",
-- "pnsTest09" ~: pnsDisproven ["Ba8"] @=? pns 15 "--XXXXX-O-XXXX-XOXXXXXXXOXXXXXOXOXOXOOO-X-OOOOO--XOOOO---XXXXXXX O",
-- "pnsTest10" ~: pnsProven ["Bc8"] @=? pns 15 "O-OXXXXO-OXXXXX-XXOXXOX--XOOXOXOXXXOXXXOOXXXXXXO--XXXXX----OOOO- O",
-- "pnsTest11" ~: pnsProven ["Ba8"] @=? pns 15 "--OXXXXO--XOOOOOOXOXOXOOXXXOOOXOO-OXOXOO-OXOXOOO--XXOOO--XXXXO-- O",
-- "pnsTest12" ~: pnsProven ["Ba8"] @=? pns 15 "--XXXX----XXXX--OOOOOXOXOOOOOOXXOOOXOOXXOOXOXOX-OXXXXXXO---XXXXO O",
-- "pnsTest13" ~: pnsDisproven ["Bh3"] @=? pns 15 "-XXXXXX-OOOOXO--OOOXOXO-OXOOOOX-OXXXXXXXOOOOOOO-O-OXOO----OXOOOO O",
-- "pnsTest14" ~: pnsDisproven ["Bb3"] @=? pns 15 "-OOOOOO---XXXX--X-XXOXXXOOXXOXX-XOXXOXX-XOOXXXX-OOOOXOX--XXXXXXX O"
-- ]
-- "AlphabeaSearch Test" ~: TestList [
-- "alphabetaTest" ~: alphabetaTest
-- ]
"Evaluate - openness Test" ~: TestList [
"openness Test01" ~: 30 @=? opennessTest "----O-X------X-----XXXO-OXXXXXOO-XXOOXOOXXOXXXOO--OOOO-O----OO-- O"
],
-- "BitBoard - numPeripherals Test" ~: TestList [ -- 1*******2&&&&&&&3*******4&&&&&&&5*******&&&&&&&&********&&&&&&&&
-- "numPeripherals Test00" ~: 0 @=? numPeripheralsTest "---------------------------X------------------------------------ O" w 4 4,
-- "numPeripherals Test01" ~: 1 @=? numPeripheralsTest "---------------------------XX----------------------------------- O" w 4 4,
-- "numPeripherals Test02" ~: 2 @=? numPeripheralsTest "--------------------------XXX----------------------------------- O" w 4 4,
-- "numPeripherals Test03" ~: 3 @=? numPeripheralsTest "-------------------X-------XX------X---------------------------- O" w 4 4,
-- "numPeripherals Test04" ~: 4 @=? numPeripheralsTest "------------------X-------XX-------XX--------------------------- O" w 4 4
-- ],
"BitBoard - transpose Test" ~: TestList [ -- 1*******2&&&&&&&3*******4&&&&&&&5*******&&&&&&&&********&&&&&&&&
"transpose Test00" ~:
"O------OO------OO------OO------OO------OO------OO------OO------O O" @=?
transposeTest "OOOOOOOO------------------------------------------------OOOOOOOO O",
"transpose Test01" ~:
"O-------O-------O-------O-------O-------O-------O-------OOOOOOOO O" @=?
-- 1*******2&&&&&&&3*******4&&&&&&&5*******&&&&&&&&********&&&&&&&&
transposeTest "OOOOOOOOO-------O-------O-------O-------O-------O-------O------- O"
],
"Evaluate - fixedPieces Test" ~: TestList [ -- 1*******2&&&&&&&3*******4&&&&&&&5*******&&&&&&&&********&&&&&&&&
"fixedPieces Test00" ~: 0 @=? fixedPiecesTest "---------------------------X------------------------------------ O",
"fixedPieces Test01" ~: 10 @=? fixedPiecesTest "OOOOOOOO-------------------X------------------------------------ O",
"fixedPieces Test02" ~: 10 @=? fixedPiecesTest "---------------------------X----------------------------OOOOOOOO O",
"fixedPieces Test03" ~: 10 @=? fixedPiecesTest "O-------O-------O-------O--X----O-------O-------O-------O------- O",
"fixedPieces Test04" ~: 10 @=? fixedPiecesTest "-------O-------O-------O---X---O-------O-------O-------O-------O O",
"fixedPieces Test05" ~: 7 @=? fixedPiecesTest "OO---OOO-------------------------------------------------------- O",
"fixedPieces Test05" ~: 7 @=? fixedPiecesTest "O-------O-------O-------------------------------O-------O------- O"
]
]
showBB :: String -> BitBoard.Bb
showBB bbStr = trace (BitBoard.showBitBoardsWOGuides $ BitBoard.fromString bbStr) BitBoard.fromString bbStr
transposeTest bb = BitBoard.toString $ BitBoard.Bb
(BitBoard.transpose $ BitBoard.black $ showBB bb)
BitBoard.emptyBoard
Piece.B
fixedPiecesTest bb = Eval.fixedPieces (showBB bb)
numPeripheralsTest bb piece col row = BitBoard.numPeripherals (showBB bb) (Piece.Pc piece) (Util.posFromCoord (col, row))
opennessTest bb = Eval.openness (showBB bb)
pns q r = res
where
res = ProofNumberSearch.proofNumberSearch t q board
t = BitBoard.turn board
board = BitBoard.fromString r
alphabetaTest = do
print $ Search.alphabeta 8 BitBoard.initialBoard
print $ Search.alphabeta 9 BitBoard.initialBoard
1 @=? 1
pnsProven mvs = ProofNumberSearch.Result (ProofNumberSearch.ProofDisproofNumber 9223372036854775807 0) (map Move.fromString mvs)
pnsDisproven mvs = ProofNumberSearch.Result (ProofNumberSearch.ProofDisproofNumber 0 9223372036854775807) (map Move.fromString mvs)
pnsZeros mvs = ProofNumberSearch.Result (ProofNumberSearch.ProofDisproofNumber 0 0) (map Move.fromString mvs)
| ysnrkdm/Hamlet | src/test/Main.hs | mit | 7,868 | 0 | 12 | 1,547 | 703 | 384 | 319 | 56 | 1 |
module Main where
import Valentine.ParserSpec
import Data.Either (lefts)
import Data.List (null)
import Test.Tasty
-- main :: IO ()
-- main = if null allTestSuites
-- then print ("All Tests Passed" :: String)
-- else (putStrLn . unwords $ allTestSuites )*>
-- fail ("Error, some tests failed" :: String)
-- where
-- allTestSuites = lefts.concat $ [valentineParserSpec] ::[String]
main :: IO ()
main = runTests testParseStringTrees
| plow-technologies/valentine | test/Main.hs | mit | 465 | 0 | 6 | 99 | 59 | 37 | 22 | 7 | 1 |
import Data.Numbers.Primes
main = do
print (maximum (primeFactors 600851475143))
| adsmit14/haskell | ProjectEuler/3.hs | mit | 87 | 0 | 11 | 15 | 30 | 15 | 15 | 3 | 1 |
{-|
Module : Solve
Description : Solve project euler problems
Copyright : (C) Alexander Mattes, 2015
License : MIT
Maintainer : Alexander Mattes <[email protected]>
Stability : experimental
Portability : portable
Get solutions for project euler problems.
-}
module Solve
( ProblemID
, Solution
, minProblemID
, maxProblemID
, solveProblem
, solutions
, numberSolvedProblems
, tp
) where
import Data.Maybe
import Utils (ProblemID, Solution, time)
import Solutions.Solutions0XX
import Solutions.Solutions1XX
import Solutions.Solutions2XX
import Solutions.Solutions3XX
import Solutions.Solutions4XX
import Solutions.Solutions5XX
import Solutions.Solutions6XX
-- | Minimal id for available project euler problems.
minProblemID :: ProblemID
minProblemID = 1
-- | Maximal id for available project euler problems.
maxProblemID :: ProblemID
maxProblemID = 522
-- | Returns the solution for a project euler problem.
solveProblem :: ProblemID -- ^ The problem id.
-> Maybe Solution -- ^ The solution, if implemented, otherwise 'Nothing'.
solveProblem n
| n <= 0 = Nothing
| n < 100 = solveProblem0XX n
| n < 200 = solveProblem1XX n
| n < 300 = solveProblem2XX n
| n < 400 = solveProblem3XX n
| n < 500 = solveProblem4XX n
| n < 600 = solveProblem5XX n
| n < 700 = solveProblem6XX n
| otherwise = Nothing
-- | List of solutions for all solved problems.
solutions :: [(ProblemID,Solution)]
solutions = [(n,fromJust s)| n<-[minProblemID..maxProblemID], let s=solveProblem n, isJust s]
-- | The number of solved problems.
numberSolvedProblems :: Int
numberSolvedProblems = length solutions
-- | Helper function to quickly benchmark the time to calculate a problem solution.
--
-- >>> tp 1
-- "123456789"
-- CPU time: 0.00s
tp :: ProblemID -> IO ()
tp = time . showSolution where
showSolution :: ProblemID -> String
showSolution n = fromMaybe ("Problem "++ show n ++ " not solved yet") (solveProblem n) | alexandermattes/HaskellEuler | src/Solve.hs | mit | 2,075 | 0 | 11 | 472 | 412 | 221 | 191 | 42 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
module Syntax where
import Language.Twelf.IntSyn
type PredName = String
type VarName = String
type RefName = String
type HoleName = String
type Arity = Int
-- | The type of a "box" is a possibly empty list of hypothetical terms and
-- proof references followed by a conclusion.
data Sequent =
Sequent { antecedent :: [Either VarName Formula]
-- If this is a partial sequent with a hole, the consequent
-- is an application of a hole to some list of objects from the context.
-- Otherwise, the consequent is a concrete formula.
, consequent :: (Either (VarName, [VarName]) Formula)
}
deriving (Eq, Ord, Show)
-- | Unit types representing the unindexed type constants "box", "term" and "prop"
data SequentConst = SequentConst deriving (Eq, Ord, Show)
data TermConst = TermConst deriving (Eq, Ord, Show)
data PropConst = PropConst deriving (Eq, Ord, Show)
-- | Union of types of objects that may appear in a valid context.
data ObjType = TermTy (Open TermConst)
| PropTy (Open PropConst)
| RefTy (Open RefType)
| SequentTy (Open SequentConst)
| ProofTy (Open ProofType)
deriving (Eq, Ord, Show)
data Obj = ObjTerm (Open Term)
| ObjProp (Open Formula)
| ObjRef (Open Ref)
| ObjSequent (Open Sequent)
| ObjProof (Open ProofTerm)
deriving (Eq, Ord, Show)
-- | A binder for a hypothetical object.
data HypBinding = HypBinding { bindVar :: Maybe VarName
, bindImplicit :: Bool
, bindTy :: ObjType
}
deriving (Eq, Ord, Show)
data Ref = RefApp VarName [Obj]
deriving (Eq, Ord, Show)
-- | A reference type is simply a wrapped box type.
data RefType = RefType Sequent deriving (Eq, Ord, Show)
-- | The type of open proofs. This includes the box type that the proof proves, as well
-- as a list of hypothetical objects.
data ProofType = ProofType Sequent deriving (Eq, Ord, Show)
-- | General type of open objects.
data Open a = Open [HypBinding] a deriving (Eq, Ord, Show)
-- | Formula data type. Represents propositions.
data Formula = Top
| Bot
| Conj Formula Formula
| Disj Formula Formula
| Imp Formula Formula
| Neg Formula
| Pred PredName [Obj]
| Eq Term Term
| All VarName Formula
| Exi VarName Formula
deriving (Eq, Ord, Show)
-- | Term representation.
data Term = Var VarName
| App VarName [Obj]
deriving (Eq, Ord, Show)
-- | Fitch proof term.
data ProofTerm = VarIntro Sequent (Maybe VarName) ProofTerm
| Assumption Sequent Formula (Maybe RefName) ProofTerm
| Seq Sequent Sequent ProofTerm (Maybe RefName) ProofTerm
| Copy Formula RefName
| TopI
| ConI Formula Formula RefName RefName
| ConE1 Formula Formula RefName
| ConE2 Formula Formula RefName
| DisE Formula Formula Formula RefName RefName RefName
| DisI1 Formula Formula RefName
| DisI2 Formula Formula RefName
| ImpI Formula Formula RefName
| ImpE Formula Formula RefName RefName
| NegI Formula RefName
| NegE Formula RefName RefName
| BotE Formula RefName
| AllI (Maybe VarName) Formula RefName
| AllE (Maybe VarName) Formula Term RefName
| ExiI (Maybe VarName) Formula Term RefName
| ExiE (Maybe VarName) Formula Formula RefName RefName
| EqI Term
| EqE Term Term (Maybe VarName) Formula RefName RefName
| LEM Formula
| NNE Formula RefName
| PBC Formula RefName
| NNI Formula RefName
| MT Formula Formula RefName RefName
| Hole ProofType VarName [Obj] -- A by Hole(dep1, ..., depn)
deriving (Eq, Ord, Show)
isClosedTermTy :: ObjType -> Bool
isClosedTermTy (TermTy (Open [] TermConst)) = True
isClosedTermTy _ = False
isClosedPropTy :: ObjType -> Bool
isClosedPropTy (PropTy (Open [] PropConst)) = True
isClosedPropTy _ = False
isExoticTermTy :: ObjType -> Bool
isExoticTermTy (TermTy (Open termHyps TermConst)) =
any (not . isClosedTermTy . bindTy) termHyps
isExoticTermTy _ = False
isExoticPropTy :: ObjType -> Bool
isExoticPropTy (PropTy (Open propHyps PropConst)) =
any (not . isClosedTermTy . bindTy) propHyps
isExoticPropTy _ = False
isHoleTy :: ObjType -> Bool
isHoleTy (RefTy _) = True
isHoleTy (SequentTy _) = True
isHoleTy (ProofTy _) = True
isHoleTy _ = False
-- | Given a type of the form
-- term -> term -> ... -> term,
-- returns the number of "term" occurrences in negative position.
termArity' :: A -> Int
termArity' (A bindings _) = length bindings
-- | Given a type of the form
-- term -> term -> ... -> prop,
-- returns the number of "term" occurrences in negative position.
propArity :: A -> Int
propArity (A bindings _) = length bindings
convertObj :: ObjType -> M -> Obj
convertObj t (M bindings r) =
let binder = Open (map convertHypothesis bindings)
in case t of
TermTy _ -> ObjTerm . binder . convertTerm $ M [] r
PropTy _ -> ObjProp . binder . convertProp $ M [] r
RefTy _ -> ObjRef . binder . convertRef $ M [] r
SequentTy _ -> ObjSequent . binder . convertSequent $ M [] r
ProofTy _ -> ObjProof . binder . convertProofTerm $ M [] r
convertRef :: M -> Ref
convertRef (M [] (R (RVar x (A bindings _)) args)) =
RefApp x (zipWith convertObj (map (bindTy . convertHypothesis) bindings) args)
convertRef m = error . concat $ ["Malformed reference: ", show m]
convertTerm :: M -> Term
convertTerm m =
case m of
M [] (R (RVar f (A bindings _)) args)
| not (null args) -> App f (zipWith
convertObj
(map (bindTy . convertHypothesis) bindings)
args)
M [] (R (RVar x _) []) -> Var x
_ -> error $ concat ["Malformed term: ", show m]
convertProp :: M -> Formula
convertProp m =
case m of
M [] (R (RConst "top") []) -> Top
M [] (R (RConst "bot") []) -> Bot
M [] (R (RConst "\\/") [m1, m2]) -> Disj (convertProp m1) (convertProp m2)
M [] (R (RConst "/\\") [m1, m2]) -> Conj (convertProp m1) (convertProp m2)
M [] (R (RConst "=>") [m1, m2]) -> Imp (convertProp m1) (convertProp m2)
M [] (R (RConst "~") [m1]) -> Neg (convertProp m1)
M [] (R (RConst "==") [m1, m2]) -> Eq (convertTerm m1) (convertTerm m2)
M [] (R (RConst "all")
[M [(mx, _, _)] r]) -> All (maybe "_" id mx)
(convertProp (M [] r))
M [] (R (RConst "exi")
[M [(mx, _, _)] r]) -> Exi (maybe "_" id mx)
(convertProp (M [] r))
M [] (R (RVar p (A bindings _)) args) ->
Pred p (zipWith
convertObj
(map (bindTy . convertHypothesis) bindings)
args)
_ -> error $ concat ["Malformed proposition: ", show m]
convertSequent :: M -> Sequent
convertSequent m =
case m of
M [] (R (RConst ",") [m1, m2]) ->
let assmForm = convertProp m1
Sequent as cf = convertSequent m2
in Sequent (Right assmForm:as) cf
M [] (R (RConst "tm") [M [(mv, _, _)] r]) ->
let Sequent as cf = convertSequent (M [] r)
assmTerm = maybe "_" id mv
in Sequent (Left assmTerm:as) cf
M [] (R (RConst "|-") [pm]) ->
Sequent [] (Right $ convertProp pm)
M [] (R (RVar x _) ys) ->
Sequent [] (Left (x, map convertVarName ys))
_ -> error $ concat ["Malformed sequent: ", show m]
convertRefType :: P -> RefType
convertRefType (P "ref" [bt]) = RefType (convertSequent bt)
convertRefType p = error $ "Not a ref type: " ++ show p
convertSequentConst :: P -> SequentConst
convertSequentConst (P "sequent" []) = SequentConst
convertSequentConst p = error $ "Not a sequent constant: " ++ show p
convertProofType :: P -> ProofType
convertProofType (P "proof" [bt]) = ProofType (convertSequent bt)
convertProofType p = error $ "Not a proof type: " ++ show p
convertTermType :: P -> TermConst
convertTermType (P "term" []) = TermConst
convertTermType p = error $ "Not a term constant: " ++ show p
convertPropType :: P -> PropConst
convertPropType (P "prop" []) = PropConst
convertPropType p = error $ "Not a prop constant: " ++ show p
convertOpen :: (P -> a) -> A -> Open a
convertOpen f (A bindings p) = Open (map convertHypothesis bindings) (f p)
convertHypothesis :: Binding -> HypBinding
convertHypothesis b@(mn, _, a@(A _ (P name _))) =
HypBinding mn (isImplicit b) $
case name of
"term" -> TermTy (convertOpen convertTermType a)
"prop" -> PropTy (convertOpen convertPropType a)
"proof" -> ProofTy (convertOpen convertProofType a)
"ref" -> RefTy (convertOpen convertRefType a)
"sequent" -> SequentTy (convertOpen convertSequentConst a)
_ -> error $ concat ["Hypothetical object '"
, show mn, "' has unknown type: ", show a]
convertVarName :: M -> VarName
convertVarName (M [] (R (RVar x _) _)) = x
convertVarName m = error $ concat ["Not a variable: ", show m]
splitBinder :: M -> (Maybe String, M)
splitBinder (M [(mv, _, _)] r) = (mv, M [] r)
splitBinder m = error $ concat ["Not a single-argument lambda: ", show m]
convertOpenProofTerm :: A -> M -> Open (ProofTerm, Sequent)
convertOpenProofTerm (A _ (P "proof" [sq])) (M hyps r) =
Open (map convertHypothesis hyps) (convertProofTerm (M [] r)
,convertSequent sq)
convertOpenProofTerm _ _ = error "Malformed open proof term"
convertProofTerm :: M -> ProofTerm
convertProofTerm m =
case m of
M [] (R (RConst "var") [mOBS, mOPT]) ->
let (mv, mOPT') = splitBinder mOPT
(_, mOBS') = splitBinder mOBS
in VarIntro (convertSequent mOBS') mv (convertProofTerm mOPT')
M [] (R (RConst "assumption;") [mBS, mPhi, mPT]) ->
let (mv, mPT') = splitBinder mPT in
Assumption (convertSequent mBS) (convertProp mPhi) mv (convertProofTerm mPT')
M [] (R (RConst ";") [mBS, mBT, mPT1, mPT2]) ->
let (mv, mPT2') = splitBinder mPT2 in
Seq (convertSequent mBS) (convertSequent mBT)
(convertProofTerm mPT1) mv (convertProofTerm mPT2')
M [] (R (RConst "copy") [mPhi, mRef]) ->
Copy (convertProp mPhi) (convertVarName mRef)
M [] (R (RConst "top_i") []) -> TopI
M [] (R (RConst "con_i") [mPA, mPB, mRefA, mRefB]) ->
ConI (convertProp mPA) (convertProp mPB)
(convertVarName mRefA) (convertVarName mRefB)
M [] (R (RConst "con_e1") [mPA, mPB, mRef]) ->
ConE1 (convertProp mPA) (convertProp mPB) (convertVarName mRef)
M [] (R (RConst "con_e2") [mPA, mPB, mRef]) ->
ConE2 (convertProp mPA) (convertProp mPB) (convertVarName mRef)
M [] (R (RConst "dis_e") [mPA, mPB, mPC, mRef1, mRef2, mRef3]) ->
DisE (convertProp mPA) (convertProp mPB) (convertProp mPC)
(convertVarName mRef1) (convertVarName mRef2) (convertVarName mRef3)
M [] (R (RConst "dis_i1") [mPA, mPB, mRef]) ->
DisI1 (convertProp mPA) (convertProp mPB) (convertVarName mRef)
M [] (R (RConst "dis_i2") [mPA, mPB, mRef]) ->
DisI2 (convertProp mPA) (convertProp mPB) (convertVarName mRef)
M [] (R (RConst "imp_i") [mPA, mPB, mRef]) ->
ImpI (convertProp mPA) (convertProp mPB) (convertVarName mRef)
M [] (R (RConst "imp_e") [mPA, mPB, mRef1, mRef2]) ->
ImpE (convertProp mPA) (convertProp mPB)
(convertVarName mRef1) (convertVarName mRef2)
M [] (R (RConst "neg_i") [mPA, mRef]) ->
NegI (convertProp mPA) (convertVarName mRef)
M [] (R (RConst "neg_e") [mPA, mRef1, mRef2]) ->
NegE (convertProp mPA) (convertVarName mRef1) (convertVarName mRef2)
M [] (R (RConst "bot_e") [mPA, mRef]) ->
BotE (convertProp mPA) (convertVarName mRef)
M [] (R (RConst "all_i") [mOPA, mRef]) ->
let (mv, mOPA') = splitBinder mOPA in
AllI mv (convertProp mOPA') (convertVarName mRef)
M [] (R (RConst "all_e") [mOPA, mT, mRef]) ->
let (mv, mOPA') = splitBinder mOPA in
AllE mv (convertProp mOPA') (convertTerm mT) (convertVarName mRef)
M [] (R (RConst "exi_i") [mOPA, mT, mRef]) ->
let (mv, mOPA') = splitBinder mOPA in
ExiI mv (convertProp mOPA') (convertTerm mT) (convertVarName mRef)
M [] (R (RConst "exi_e") [mOPA, mPB, mRef1, mRef2]) ->
let (mv, mOPA') = splitBinder mOPA in
ExiE mv (convertProp mOPA') (convertProp mPB)
(convertVarName mRef1) (convertVarName mRef2)
M [] (R (RConst "eq_i") [mT]) ->
EqI (convertTerm mT)
M [] (R (RConst "eq_e") [mS, mT, mOPA, mRef1, mRef2]) ->
let (mv, mOPA') = splitBinder mOPA in
EqE (convertTerm mS) (convertTerm mT) mv (convertProp mOPA')
(convertVarName mRef1) (convertVarName mRef2)
M [] (R (RConst "lem") [mPA]) ->
LEM (convertProp mPA)
M [] (R (RConst "nne") [mPA, mRef]) ->
NNE (convertProp mPA) (convertVarName mRef)
M [] (R (RConst "pbc") [mPA, mRef]) ->
PBC (convertProp mPA) (convertVarName mRef)
M [] (R (RConst "mt") [mPA, mPB, mRef1, mRef2]) ->
MT (convertProp mPA) (convertProp mPB) (convertVarName mRef1) (convertVarName mRef2)
M [] (R (RConst "nni") [mPA, mRef]) ->
NNI (convertProp mPA) (convertVarName mRef)
M (_:_) _ -> error $ concat ["Encountered unexpected open proof term."]
M [] (R (RVar hole (A bindings p)) args) ->
Hole (convertProofType p)
hole
(zipWith convertObj (map (bindTy . convertHypothesis) bindings) args)
M _ (R root args) ->
error $ concat ["Encountered unknown proof-term with root '"
,show root, "' and "
,show $ length args
," arguments."]
| ulrikrasmussen/BoxProver | src/Syntax.hs | mit | 14,199 | 0 | 15 | 3,970 | 5,309 | 2,726 | 2,583 | 286 | 30 |
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- For the use of MarkedString
{-# OPTIONS_GHC -fno-warn-deprecations #-}
-- | Test for JSON serialization
module JsonSpec where
import Language.LSP.Types
import qualified Data.Aeson as J
import Data.List(isPrefixOf)
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck hiding (Success)
import Test.QuickCheck.Instances ()
-- import Debug.Trace
-- ---------------------------------------------------------------------
{-# ANN module ("HLint: ignore Redundant do" :: String) #-}
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "dispatcher" jsonSpec
describe "ResponseMessage" responseMessageSpec
-- ---------------------------------------------------------------------
jsonSpec :: Spec
jsonSpec = do
describe "General JSON instances round trip" $ do
-- DataTypesJSON
prop "LanguageString" (propertyJsonRoundtrip :: LanguageString -> Property)
prop "MarkedString" (propertyJsonRoundtrip :: MarkedString -> Property)
prop "MarkupContent" (propertyJsonRoundtrip :: MarkupContent -> Property)
prop "HoverContents" (propertyJsonRoundtrip :: HoverContents -> Property)
prop "ResponseError" (propertyJsonRoundtrip :: ResponseError -> Property)
prop "WatchedFiles" (propertyJsonRoundtrip :: DidChangeWatchedFilesRegistrationOptions -> Property)
prop "ResponseMessage Initialize"
(propertyJsonRoundtrip :: ResponseMessage 'TextDocumentHover -> Property)
-- prop "ResponseMessage JSON value"
-- (propertyJsonRoundtrip :: ResponseMessage J.Value -> Property)
describe "JSON decoding regressions" $
it "CompletionItem" $
(J.decode "{\"jsonrpc\":\"2.0\",\"result\":[{\"label\":\"raisebox\"}],\"id\":1}" :: Maybe (ResponseMessage 'TextDocumentCompletion))
`shouldNotBe` Nothing
responseMessageSpec :: Spec
responseMessageSpec = do
describe "edge cases" $ do
it "decodes result = null" $ do
let input = "{\"jsonrpc\": \"2.0\", \"id\": 123, \"result\": null}"
in J.decode input `shouldBe` Just
((ResponseMessage "2.0" (Just (IdInt 123)) (Right J.Null)) :: ResponseMessage 'WorkspaceExecuteCommand)
it "handles missing params field" $ do
J.eitherDecode "{ \"jsonrpc\": \"2.0\", \"id\": 15, \"method\": \"shutdown\"}"
`shouldBe` Right (RequestMessage "2.0" (IdInt 15) SShutdown Empty)
describe "invalid JSON" $ do
it "throws if neither result nor error is present" $ do
(J.eitherDecode "{\"jsonrpc\":\"2.0\",\"id\":1}" :: Either String (ResponseMessage 'Initialize))
`shouldBe` Left ("Error in $: both error and result cannot be Nothing")
it "throws if both result and error are present" $ do
(J.eitherDecode
"{\"jsonrpc\":\"2.0\",\"id\": 1,\"result\":{\"capabilities\": {}},\"error\":{\"code\":-32700,\"message\":\"\",\"data\":null}}"
:: Either String (ResponseMessage 'Initialize))
`shouldSatisfy`
(either (\err -> "Error in $: both error and result cannot be present" `isPrefixOf` err) (\_ -> False))
-- ---------------------------------------------------------------------
propertyJsonRoundtrip :: (Eq a, Show a, J.ToJSON a, J.FromJSON a) => a -> Property
propertyJsonRoundtrip a = J.Success a === J.fromJSON (J.toJSON a)
-- ---------------------------------------------------------------------
instance Arbitrary LanguageString where
arbitrary = LanguageString <$> arbitrary <*> arbitrary
instance Arbitrary MarkedString where
arbitrary = oneof [PlainString <$> arbitrary, CodeString <$> arbitrary]
instance Arbitrary MarkupContent where
arbitrary = MarkupContent <$> arbitrary <*> arbitrary
instance Arbitrary MarkupKind where
arbitrary = oneof [pure MkPlainText,pure MkMarkdown]
instance Arbitrary HoverContents where
arbitrary = oneof [ HoverContentsMS <$> arbitrary
, HoverContents <$> arbitrary
]
instance Arbitrary UInt where
arbitrary = fromInteger <$> arbitrary
instance Arbitrary Uri where
arbitrary = Uri <$> arbitrary
instance Arbitrary Position where
arbitrary = Position <$> arbitrary <*> arbitrary
instance Arbitrary Location where
arbitrary = Location <$> arbitrary <*> arbitrary
instance Arbitrary Range where
arbitrary = Range <$> arbitrary <*> arbitrary
instance Arbitrary Hover where
arbitrary = Hover <$> arbitrary <*> arbitrary
instance Arbitrary (ResponseResult m) => Arbitrary (ResponseMessage m) where
arbitrary =
oneof
[ ResponseMessage
<$> arbitrary
<*> arbitrary
<*> (Right <$> arbitrary)
, ResponseMessage
<$> arbitrary
<*> arbitrary
<*> (Left <$> arbitrary)
]
instance Arbitrary (LspId m) where
arbitrary = oneof [IdInt <$> arbitrary, IdString <$> arbitrary]
instance Arbitrary ResponseError where
arbitrary = ResponseError <$> arbitrary <*> arbitrary <*> pure Nothing
instance Arbitrary ErrorCode where
arbitrary =
elements
[ ParseError
, InvalidRequest
, MethodNotFound
, InvalidParams
, InternalError
, ServerErrorStart
, ServerErrorEnd
, ServerNotInitialized
, UnknownErrorCode
, RequestCancelled
, ContentModified
]
-- | make lists of maximum length 3 for test performance
smallList :: Gen a -> Gen [a]
smallList = resize 3 . listOf
instance (Arbitrary a) => Arbitrary (List a) where
arbitrary = List <$> arbitrary
-- ---------------------------------------------------------------------
instance Arbitrary DidChangeWatchedFilesRegistrationOptions where
arbitrary = DidChangeWatchedFilesRegistrationOptions <$> arbitrary
instance Arbitrary FileSystemWatcher where
arbitrary = FileSystemWatcher <$> arbitrary <*> arbitrary
instance Arbitrary WatchKind where
arbitrary = WatchKind <$> arbitrary <*> arbitrary <*> arbitrary
-- ---------------------------------------------------------------------
| wz1000/haskell-lsp | lsp/test/JsonSpec.hs | mit | 6,290 | 0 | 23 | 1,250 | 1,232 | 649 | 583 | -1 | -1 |
module Rebase.GHC.Conc.Signal
(
module GHC.Conc.Signal
)
where
import GHC.Conc.Signal
| nikita-volkov/rebase | library/Rebase/GHC/Conc/Signal.hs | mit | 89 | 0 | 5 | 12 | 23 | 16 | 7 | 4 | 0 |
--where clause lets us define inside a function
bmiTell :: (RealFloat a) => a -> a -> String
bmiTell weight height
| bmi <= 18.5 = "You're underweight, you emo, you!"
| bmi <= 25.0 = "You're supposedly normal. Pffft, I bet you're ugly!"
| bmi <= 30.0 = "You're fat! Lose some weight, fatty!"
| otherwise = "You're a whale, congratulations!"
where bmi = weight / height ^ 2
--we can define multiple where clauses
bmiTell' :: (RealFloat a) => a -> a -> String
bmiTell' weight height
| bmi <= skinny = "You're underweight, you emo, you!"
| bmi <= normal = "You're supposedly normal. Pffft, I bet you're ugly!"
| bmi <= fat = "You're fat! Lose some weight, fatty!"
| otherwise = "You're a whale, congratulations!"
where bmi = weight / height ^ 2
skinny = 18.5
normal = 25.0
fat = 30.0
--we can also do patter matching in where clauses
bmiTell'' :: (RealFloat a) => a -> a -> String
bmiTell'' weight height
| bmi <= skinny = "You're underweight, you emo, you!"
| bmi <= normal = "You're supposedly normal. Pffft, I bet you're ugly!"
| bmi <= fat = "You're fat! Lose some weight, fatty!"
| otherwise = "You're a whale, congratulations!"
where bmi = weight / height ^ 2
(skinny, normal, fat) = (18.5, 25.0, 30.0)
initials :: String -> String -> String
initials firstname lastname = [f] ++ ". " ++ [l] ++ "."
where (f:_) = firstname
(l:_) = lastname
-- we can slo define functions in where blocks
calcBmis :: (RealFloat a) => [(a, a)] -> [a]
calcBmis xs = [bmi w h | (w, h) <- xs]
where bmi weight height = weight / height ^ 2
| luisgepeto/HaskellLearning | 04 Syntax In Functions/03_where.hs | mit | 1,680 | 0 | 8 | 451 | 469 | 244 | 225 | 32 | 1 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable, BangPatterns #-}
module Types where
import Data.DeriveTH
import Data.Data
import Data.HashMap.Strict hiding (map, foldr)
import Language.Haskell.TH hiding (Name)
import Control.DeepSeq
import Control.DeepSeq.TH
import Control.Monad.State
import Safe
-- Fully strict versions
mapAccumL' :: (acc -> x -> (acc, y)) -> acc -> [x] -> (acc, [y])
mapAccumL' _ !s ![] = (s, [])
mapAccumL' f !s (x:xs) = (s'',y:ys)
where
!(!s', !y) = f s x
!(!s'', !ys) = s' `seq` mapAccumL' f s' xs
map' :: (a -> b) -> [a] -> [b]
map' _ ![] = []
map' f (x:xs) = a : b
where
!a = f x
!b = map' f xs
filter' :: (a -> Bool) -> [a] -> [a]
filter' _ ![] = []
filter' pred (x:xs)
| pred x = addRecurse
| otherwise = recurse
where
!recurse = filter' pred xs
!addRecurse = x : recurse
-- Errors
data LError = NumArgs !Integer ![LispVal]
| TypeMismatch !Name ![LispVal]
| NotImplemented !Name !LispVal
| NotDefined ![Name]
| NotCallable !LispVal
| IllegalArgument !Name !LispVal
| Custom !LispVal !String
| Parser !String
| SyntaxError !String
| Default !String
deriving (Eq, Data, Typeable)
showError :: LError -> String
showError (NumArgs expected found) = "((make-error :num-args) " ++ show expected ++ " [" ++ initSafe (concatMap (\l -> show l ++ " ") found) ++ "])"
showError (TypeMismatch message values) = "((make-error :type-mismatch) " ++ show message++ " [" ++ initSafe (concatMap (\l -> show l ++ " ") values) ++ "])"
showError (NotImplemented message unhandled) = "((make-error :not-implemented) " ++ show message ++ " " ++ show unhandled ++ ")"
showError (NotDefined names) = "((make-error :not-defined) " ++ " [" ++ initSafe (concatMap (\l -> show l ++ " ") names) ++ "])"
showError (NotCallable name) = "((make-error :not-callable) " ++ show name ++ ")"
showError (IllegalArgument name val) = "((make-error :illegal-argument) " ++ show name ++ " " ++ show val ++ ")"
showError (Custom name message) = "((make-error :custom) " ++ show name ++ " " ++ show message ++ ")"
showError (Parser parseErr) = "Parse Error: " ++ parseErr ++ ""
showError (SyntaxError message) = "((make-error :syntax-error) " ++ show message ++ ")"
showError (Default message) = "((make-error :custom) " ++ show message ++ ")"
instance Show LError where show = showError
-- Truthiness
truthy :: LispVal -> LispVal
truthy val@(Boolean _) = val
truthy val@(LispError _) = val
truthy _ = Boolean False
isLispVal :: LispVal -> Bool
isLispVal _ = True
-- Environments
data Environment = Environment {
symbols :: !(HashMap Name (Either Int LispVal)),
closures :: !(HashMap Int (HashMap Name LispVal)),
parent :: !(Maybe Environment)
} deriving (Eq, Show)
-- Values
type Name = String
data LispVal
= Var !Name
| Boolean !Bool
| Int !Integer
| Float !Double
| String !String
| Keyword !String
| List ![LispVal]
| Quote !LispVal
| Quasiquote !LispVal
| Unquote !LispVal
| UnquoteSplicing !LispVal
| If !LispVal !LispVal !LispVal
| Definition !Name !LispVal
| Assignment !Name !LispVal
| Sequence !LispVal
| Macro !Name ![Name] !LispVal
| Function ![Name] !LispVal
| Closure ![Name] !LispVal !Int
| MakeClosure ![Name] !LispVal ![Name]
| BuiltinFunction !Name !Int !Int
| Call !LispVal ![LispVal]
| TailCall !LispVal ![LispVal]
| TailRecursiveCall !LispVal ![LispVal]
| LispError !LError
deriving (Eq, Data, Typeable)
showLispVal :: LispVal -> String
showLispVal (Var name) = name
showLispVal (Boolean bool) = if bool then "#t" else "#f"
showLispVal (Int val) = show val
showLispVal (Float val) = show val
showLispVal (String str) = str
showLispVal (Keyword kw) = ":" ++ kw
showLispVal (List lis) = "[" ++ initSafe (concatMap (\l -> show l ++ " ") lis) ++ "]"
showLispVal (Quote val) = "'" ++ show val
showLispVal (Quasiquote val) = "`" ++ show val
showLispVal (Unquote val) = "," ++ show val
showLispVal (UnquoteSplicing val) = ",@" ++ show val
showLispVal (If pred then' else') = "(if " ++ show pred ++ " " ++ show then' ++ " " ++ show else' ++ ")"
showLispVal (Definition name val) = "(define " ++ name ++ " " ++ show val ++ ")"
showLispVal (Assignment name val) = "(set! " ++ name ++ " " ++ show val ++ ")"
showLispVal (Sequence seq) = "(begin " ++ tail (init $ show seq) ++ ")"
showLispVal (Macro name args expr) = "(defmacro " ++ name ++ " (" ++ initSafe (concatMap (++ " ") args) ++ ") " ++ show expr ++ ")"
showLispVal (Function args expr) = "(lambda (" ++ initSafe (concatMap (++ " ") args) ++ ") " ++ show expr ++ ")"
showLispVal (Closure args expr num) = "(closure (" ++ initSafe (concatMap (++ " ") args) ++ ") " ++ show expr ++ " " ++ show num ++ ")"
showLispVal (MakeClosure args expr freevars) = "(make-closure (" ++ initSafe (concatMap (++ " ") args) ++ ") " ++ show expr ++ " [" ++ initSafe (concatMap (++ " ") freevars) ++ "])"
showLispVal (BuiltinFunction name _ _) = name
showLispVal (Call expr args) = "(" ++ show expr ++ " " ++ initSafe (concatMap (\l -> show l ++ " ") args) ++ ")"
showLispVal (TailCall expr args) = "(tail " ++ show expr ++ " " ++ initSafe (concatMap (\l -> show l ++ " ") args) ++ ")"
showLispVal (TailRecursiveCall expr args) = "(tailrec " ++ show expr ++ " " ++ initSafe (concatMap (\l -> show l ++ " ") args) ++ ")"
showLispVal (LispError err) = show err
instance Show LispVal where show = showLispVal
$( derive makeIs ''LispVal )
$( derive makeFrom ''LispVal )
$( deriveNFData ''Environment )
$( deriveNFData ''LError )
$( deriveNFData ''LispVal )
| mhlakhani/hlisp | src/Types.hs | mit | 5,679 | 0 | 14 | 1,231 | 2,250 | 1,126 | 1,124 | 235 | 2 |
data Arbre a = Arbre | Feuille
main = do txt <- readFile "18.txt"
let text = (map (words) (lines txt))
let content = [(map readint l) | l <- text]
readint :: String -> Int
readint = read
adjacent :: [a] -> | t00n/ProjectEuler | projectEuler.hs | epl-1.0 | 233 | 0 | 13 | 70 | 107 | 56 | 51 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.PackageFlags
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
--
-- | Pane for saving, restoring and editing flags for specific cabal
-- commands.
--
---------------------------------------------------------------------------------
module IDE.Pane.PackageFlags (
readFlags
, writeFlags
, IDEFlags(..)
, FlagsState
, getFlags
) where
import qualified Text.PrettyPrint.HughesPJ as PP
import Data.Typeable
import System.FilePath.Posix
import IDE.Core.State
import Graphics.UI.Editor.Basics
import Graphics.UI.Editor.MakeEditor
import Graphics.UI.Editor.Simple
import Graphics.UI.Editor.Parameters
import Text.PrinterParser hiding (fieldParser,parameters)
import Control.Event (registerEvent)
import Graphics.UI.Editor.DescriptionPP
(flattenFieldDescriptionPPToS,
extractFieldDescription,
FieldDescriptionPP(..),
mkFieldPP)
import Text.ParserCombinators.Parsec hiding(Parser)
import IDE.Utils.GUIUtils (__)
import Control.Monad (void)
import Data.Text (Text)
import Data.Monoid ((<>))
import qualified Data.Text as T (unwords, unpack, pack)
import Control.Applicative ((<$>))
import GI.Gtk.Objects.VBox (vBoxNew, VBox(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.GI.Base.ManagedPtr (unsafeCastTo)
import GI.Gtk.Objects.Widget (widgetSetSensitive, Widget(..))
import GI.Gtk.Objects.HButtonBox (hButtonBoxNew)
import GI.Gtk.Objects.Box (boxSetSpacing)
import GI.Gtk.Objects.ButtonBox (buttonBoxSetLayout)
import GI.Gtk.Enums
(PolicyType(..), ShadowType(..), ButtonBoxStyle(..))
import GI.Gtk.Objects.Button (onButtonClicked, buttonNewFromStock)
import GI.Gtk.Objects.Adjustment (noAdjustment)
import GI.Gtk.Objects.ScrolledWindow
(scrolledWindowSetPolicy, scrolledWindowAddWithViewport,
scrolledWindowSetShadowType, scrolledWindowNew)
data IDEFlags = IDEFlags {
flagsBox :: VBox
} deriving Typeable
data FlagsState = FlagsState
deriving(Eq,Ord,Read,Show,Typeable)
instance Pane IDEFlags IDEM
where
primPaneName _ = __ "Package Flags"
getAddedIndex _ = 0
getTopWidget = liftIO . unsafeCastTo Widget . flagsBox
paneId b = "*Flags"
instance RecoverablePane IDEFlags FlagsState IDEM where
saveState p = do
mbFlags :: Maybe IDEFlags <- getPane
case mbFlags of
Nothing -> return Nothing
Just p -> return (Just FlagsState)
recoverState pp st = do
mbPack <- readIDE activePack
case mbPack of
Just pack -> do
pp <- getBestPathForId "*Flags"
nb <- getNotebook pp
case mbPack of
Nothing -> return Nothing
Just pack -> buildThisPane pp nb builder
Nothing -> return Nothing
builder pp nb w =
let flagsDesc = extractFieldDescription flagsDescription
flatflagsDesc = flattenFieldDescription flagsDesc
in do
mbPack <- readIDE activePack
case mbPack of
Nothing -> return (Nothing,[])
Just p -> reifyIDE $ \ideR -> builder' p flagsDesc flatflagsDesc pp nb window ideR
-- | Builds the Flags pane
builder' idePackage flagsDesc flatflagsDesc pp nb window ideR = do
vb <- vBoxNew False 0
let flagsPane = IDEFlags vb
bb <- hButtonBoxNew
boxSetSpacing bb 6
buttonBoxSetLayout bb ButtonBoxStyleSpread
saveB <- buttonNewFromStock "gtk-save"
widgetSetSensitive saveB False
cancelB <- buttonNewFromStock "gtk-cancel"
boxPackStart' bb cancelB PackNatural 0
boxPackStart' bb saveB PackNatural 0
(widget,injb,ext,notifier)
<- buildEditor flagsDesc idePackage
sw <- scrolledWindowNew noAdjustment noAdjustment
scrolledWindowSetShadowType sw ShadowTypeIn
scrolledWindowAddWithViewport sw widget
scrolledWindowSetPolicy sw PolicyTypeAutomatic PolicyTypeAutomatic
onButtonClicked saveB (do
mbPackWithNewFlags <- extract idePackage [ext]
case mbPackWithNewFlags of
Nothing -> return ()
Just packWithNewFlags -> do
reflectIDE (do
changePackage packWithNewFlags
closePane flagsPane) ideR
writeFields (dropExtension (ipdCabalFile packWithNewFlags) ++
leksahFlagFileExtension)
packWithNewFlags flatFlagsDescription)
onButtonClicked cancelB (reflectIDE (void (closePane flagsPane)) ideR)
registerEvent notifier FocusIn (\e -> do
reflectIDE (makeActive flagsPane) ideR
return (e{gtkReturn=False}))
registerEvent notifier MayHaveChanged (\e -> (`reflectIDE` ideR) $ do
mbP <- liftIO $ extract idePackage [ext]
let hasChanged = case mbP of
Nothing -> False
Just p -> p /= idePackage
topWidget <- getTopWidget flagsPane
markLabel nb topWidget hasChanged
widgetSetSensitive saveB hasChanged
return (e{gtkReturn=False}))
boxPackStart' vb sw PackGrow 0
boxPackEnd' vb bb PackNatural 6
return (Just flagsPane,[])
-- | Gets the Flags pane
getFlags :: Maybe PanePath -> IDEM IDEFlags
getFlags Nothing = forceGetPane (Right "*Flags")
getFlags (Just pp) = forceGetPane (Left pp)
-- | Quote the string if it contains spaces and escape
-- any other quotes.
quoteArg :: String -> String
quoteArg s | ' ' `elem` s = "\"" <> escapeQuotes s <> "\""
quoteArg s = s
escapeQuotes = foldr (\c s -> if c == '"' then '\\':c:s else c:s) ""
-- | Parse any (escaped) character (ignoring a prefixed @\@)
quotedArgCharParser :: CharParser () Char
quotedArgCharParser = try (do
char '\\'
anyChar)
<|> try (
noneOf "\"")
<?> "argsParser"
-- | Parse an argument that is either quoted or does not
-- contain spaces
argParser :: CharParser () Text
argParser = try (do
char '"'
s <- many quotedArgCharParser
char '"'
return $ T.pack s)
<|> try (
T.pack <$> many1 (noneOf " "))
<?> "argParser"
-- | Parse many arguments, possibly seperated by spaces
argsParser :: CharParser () [Text]
argsParser = try (
many (do
many (char ' ')
argParser))
<?> "argsParser"
-- | Quote all arguments and concatenate them
unargs :: [Text] -> Text
unargs = T.unwords . map (T.pack . quoteArg . T.unpack)
-- | Parse a list of arguments from a given string
args :: Text -> [Text]
args s = case parse argsParser "" $ T.unpack s of
Right result -> result
_ -> [s]
-- | The flattened description of the fields in the pane
flatFlagsDescription :: [FieldDescriptionS IDEPackage]
flatFlagsDescription = flattenFieldDescriptionPPToS flagsDescription
-- | The description of the fields in the pane
flagsDescription :: FieldDescriptionPP IDEPackage IDEM
flagsDescription = VFDPP emptyParams [
mkFieldPP
(paraName <<<- ParaName (__ "Config flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdConfigFlags)
(\ b a -> a{ipdConfigFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())
, mkFieldPP
(paraName <<<- ParaName (__ "Build flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdBuildFlags)
(\ b a -> a{ipdBuildFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())
, mkFieldPP
(paraName <<<- ParaName (__ "Test flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdTestFlags)
(\ b a -> a{ipdTestFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())
, mkFieldPP
(paraName <<<- ParaName (__ "Benchmark flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdBenchmarkFlags)
(\ b a -> a{ipdBenchmarkFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())
, mkFieldPP
(paraName <<<- ParaName (__ "Haddock flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdHaddockFlags)
(\ b a -> a{ipdHaddockFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())
, mkFieldPP
(paraName <<<- ParaName (__ "Executable flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdExeFlags)
(\ b a -> a{ipdExeFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())
, mkFieldPP
(paraName <<<- ParaName (__ "Install flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdInstallFlags)
(\ b a -> a{ipdInstallFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())
, mkFieldPP
(paraName <<<- ParaName (__ "Register flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdRegisterFlags)
(\ b a -> a{ipdRegisterFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())
, mkFieldPP
(paraName <<<- ParaName (__ "Unregister flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdUnregisterFlags)
(\ b a -> a{ipdUnregisterFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())
, mkFieldPP
(paraName <<<- ParaName (__ "Source Distribution flags") $ emptyParams)
(PP.text . show)
readParser
(unargs . ipdSdistFlags)
(\ b a -> a{ipdSdistFlags = args b})
(textEditor (const True) True)
(\ _ -> return ())]
-- ------------------------------------------------------------
-- * Parsing
-- ------------------------------------------------------------
-- | Read all the field values from the given 'FilePath'
readFlags :: FilePath -> IDEPackage -> IO IDEPackage
readFlags fn = readFields fn flatFlagsDescription
-- ------------------------------------------------------------
-- * Printing
-- ------------------------------------------------------------
-- | Write all field values to the given 'FilePath'
writeFlags :: FilePath -> IDEPackage -> IO ()
writeFlags fpath flags = writeFields fpath flags flatFlagsDescription
| JPMoresmau/leksah | src/IDE/Pane/PackageFlags.hs | gpl-2.0 | 11,320 | 0 | 21 | 3,305 | 2,882 | 1,521 | 1,361 | 249 | 3 |
{-| Provides all lens-related functions.
-}
{-
Copyright (C) 2014 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Lens
( module Control.Lens
, makeCustomLenses
, makeCustomLenses'
) where
import Control.Lens
import Control.Monad
import qualified Data.Set as S
import Language.Haskell.TH
lensFieldName :: String -> String
lensFieldName = (++ "L")
-- | Internal helper method for constructing partial set of lenses.
makeCustomLensesFiltered :: (String -> Bool) -> Name -> Q [Dec]
makeCustomLensesFiltered f = makeLensesWith customRules
where
customRules :: LensRules
customRules = set lensField (fmap lensFieldName . mfilter f . Just)
defaultRules
-- | Create lenses for all fields of a given data type.
makeCustomLenses :: Name -> Q [Dec]
makeCustomLenses = makeCustomLensesFiltered (const True)
-- | Create lenses for some fields of a given data type.
makeCustomLenses' :: Name -> [Name] -> Q [Dec]
makeCustomLenses' name lst = makeCustomLensesFiltered f name
where
allowed = S.fromList . map nameBase $ lst
f = flip S.member allowed
| kawamuray/ganeti | src/Ganeti/Lens.hs | gpl-2.0 | 1,757 | 0 | 11 | 327 | 238 | 132 | 106 | 21 | 1 |
module Graph.Builder.Instance ( InstanceBuilder
, Target (..)
, Elem (..)
, RuleOp (..)
, build
, newRule
, newNode
, newEdge
, addRuleNode
, addRuleEdge
, getGraph
, putGraph
, getRules
, putRules
, getType
, putType
, getCurrentRule
, setCurrentRule
, getCurrentRuleId
, setCurrentRuleId
, setRuleOperation
) where
import Data.List
import Data.Maybe
import Data.IntMap (keys)
import qualified Data.IntMap as M
import Control.Monad.State
import Control.Monad.IO.Class
import Graph.Digraph
import Graph.Rewriting
type St a b = (Digraph a b, Digraph a b, [(Int, Rule a b)], Int)
type InstanceBuilder a b = StateT (St a b)
data Target = Inst -- Instance graph
| Type -- Type graph
| Rule -- Rule span
data RuleOp = Preserve -- Element will be preserved
| Delete -- Element will be deleted
| Create -- Element will be created
data Elem = N -- Acts on Nodes
| E -- Acts on Edges
type TypeId = Int
-- | Returns the current graph
getGraph :: Monad m => InstanceBuilder a b m (Digraph a b)
getGraph = do (g, t, rs, r) <- get
return g
-- | Sets the current graph
putGraph :: Monad m => Digraph a b -> InstanceBuilder a b m ()
putGraph g = do (_, t, rs, r) <- get
put $ (g, t, rs, r)
-- | Gets the current type graph
getType :: Monad m => InstanceBuilder a b m (Digraph a b)
getType = do (g, t, rs, r) <- get
return t
-- | Saves the current type graph
putType :: Monad m => Digraph a b -> InstanceBuilder a b m ()
putType t = do (g, _, rs, r) <- get
put $ (g, t, rs, r)
-- | Get the current list of rules
getRules :: Monad m => InstanceBuilder a b m [(Int, Rule a b)]
getRules = do (g, t, rs, r) <- get
return rs
-- | Puts a new list of rules
putRules :: Monad m => [(Int, Rule a b)] -> InstanceBuilder a b m ()
putRules rs = do (g, t, _, r) <- get
put $ (g, t, rs, r)
-- | Gets the id of the currently selected rule
getCurrentRuleId :: Monad m => InstanceBuilder a b m Int
getCurrentRuleId = do (g, t, rs, r) <- get
return r
-- | Selects a new rule id
setCurrentRuleId :: Monad m => Int -> InstanceBuilder a b m ()
setCurrentRuleId r = do (g, t, rs, _) <- get
put $ (g, t, rs, r)
-- | Gets the currently selected rule
getCurrentRule :: Monad m => InstanceBuilder a b m (Rule a b)
getCurrentRule = do rs <- getRules
i <- getCurrentRuleId
return $ fromJust $ lookup i rs
-- | Puts a new rule on the current id
setCurrentRule :: Monad m => Rule a b -> InstanceBuilder a b m ()
setCurrentRule r = do rs <- getRules
i <- getCurrentRuleId
let rs' = filter ((/= i) . fst) rs
putRules $ (i, r):rs'
-- | Builds the instance
build :: Monad m => InstanceBuilder a b m r -> m (TypedDigraph a b, [Rule a b])
build i = do
(g, t, rs, _) <- liftM snd $ runStateT i (empty, empty, [], -1)
return (TypedDigraph g t, map snd rs)
-- | Creates a new rule, returns the id
newRule :: Monad m => InstanceBuilder a b m Int
newRule = do rs <- getRules
let newId = length rs
putRules $ (newId, Morphism [] []):rs
return $ newId
-- | Deletes the selected rule.
deleteRule :: Monad m => Int -> InstanceBuilder a b m ()
deleteRule i = do rs <- getRules
let rs' = filter ((/= i) . fst) rs
putRules rs'
-- | Checks if the graph and all rules are consistent, calls fail if not.
checkConsistency :: Monad m => InstanceBuilder a b m ()
checkConsistency = undefined
-- | Creates a new node on the selected target
newNode :: Monad m => Target -> TypeId -> a -> InstanceBuilder a b m Int
newNode Rule t p = do (Morphism ns es) <- getCurrentRule
let ks = (map nodeID $ catMaybes $ map fst ns)
newNode = (Just $ Node (newId ks) t p)
newRule = (newNode, newNode)
setCurrentRule $ Morphism (newRule:ns) es
return $ newId ks
newNode Type t p = do (Digraph ns es) <- getType
let ks = keys ns
newNode = Node (newId ks) (newId ks) p
newGraph = Digraph (M.insert (newId ks) newNode ns) es
putType newGraph
return $ newId ks -- thank you, lazyness
newNode Inst t p = do (Digraph ns es) <- getGraph
let ks = keys ns
newNode = Node (newId ks) t p
newGraph = Digraph (M.insert (newId ks) newNode ns) es
putGraph newGraph
return $ newId ks -- thank you, lazyness
-- | Creates a new edge on the selected target
newEdge :: Monad m => Target -> TypeId -> (Int, Int) -> b -> InstanceBuilder a b m Int
newEdge Inst t c p = do (Digraph ns es) <- getGraph
let ks = keys es
newNode = Edge (newId ks) c t p
newGraph = Digraph ns (M.insert (newId ks) newNode es)
putGraph newGraph
return $ newId ks -- thank you, lazyness
newEdge Type t c p = do (Digraph ns es) <- getType
let ks = keys es
newNode = Edge (newId ks) c (newId ks) p
newGraph = Digraph ns (M.insert (newId ks) newNode es)
putType newGraph
return $ newId ks -- thank you, lazyness
newEdge Rule t c p = do (Morphism ns es) <- getCurrentRule
let kext f = map edgeID $ catMaybes $ map f es
ks = kext fst `union` kext snd
newEdge = (Just $ Edge (newId ks) c t p)
newRule = (newEdge, newEdge)
setCurrentRule $ Morphism ns (newRule:es)
return $ newId ks
addRuleNode :: Monad m => RuleOp -> TypeId -> a -> InstanceBuilder a b m Int
addRuleNode o t p = do nid <- newNode Rule t p
setRuleOperation o N nid
return nid
addRuleEdge :: Monad m => RuleOp -> TypeId -> (Int, Int) -> b -> InstanceBuilder a b m Int
addRuleEdge o t c p = do eid <- newEdge Rule t c p
setRuleOperation o E eid
return eid
-- | Sets the operation on the selected rule node.
setRuleOperation :: Monad m => RuleOp -> Elem -> Int -> InstanceBuilder a b m ()
setRuleOperation o N i = do (Morphism ns es) <- getCurrentRule
let rl = filter (not . selectAction (byElementId i)) ns
[rc] = filter (selectAction (byElementId i)) ns
setCurrentRule $ Morphism (ruleOP o rc:rl) es
setRuleOperation o E i = do (Morphism ns es) <- getCurrentRule
let rl = filter (not . selectAction (byElementId i)) es
[rc] = filter (selectAction (byElementId i)) es
setCurrentRule $ Morphism ns (ruleOP o rc:rl)
-- Internal functions
byElementId :: Element a => Int -> Maybe a -> Bool
byElementId _ Nothing = False
byElementId i (Just x) = elemId x == i
selectAction :: Element a => (Maybe a -> Bool) -> (Maybe a, Maybe a) -> Bool
selectAction f (x, y) = f x || f y
ruleOP :: RuleOp -> (Maybe a, Maybe a) -> (Maybe a, Maybe a)
ruleOP Preserve (Just x, Nothing) = (Just x, Just x)
ruleOP Preserve (Nothing, Just x) = (Just x, Just x)
ruleOP Preserve x = x
ruleOP Delete (_, Just x) = (Just x, Nothing)
ruleOP Delete x = x
ruleOP Create (Just x, _) = (Nothing, Just x)
ruleOP Create x = x
ids :: [Int]
ids = [1..]
newId :: [Int] -> Int
newId = head . (ids \\)
| rodrigo-machado/ufrgs-grt | src/Graph/Builder/Instance.hs | gpl-3.0 | 8,583 | 0 | 15 | 3,487 | 2,807 | 1,438 | 1,369 | 161 | 1 |
{-# LANGUAGE DeriveGeneric #-}
module PetaVision.Utility.DFT.Plan
( module HM
, DFTPlanID(..)
, DFTType(..)
, DFTPlan
, FFTWLock
, getFFTWLock
, getDFTPlan
, getEmptyPlan
, dftExecuteWithPlan
, dftExecuteBatch
, dftExecute
, dft2dPlan
, idft2dPlan
, dft1dGPlan
, idft1dGPlan
) where
import Control.Concurrent.MVar
import Control.Monad as M
import Data.Complex
import Data.Hashable
import Data.HashMap.Strict as HM
import Data.List as L
import Data.Vector.Storable as VS
import Data.Vector.Storable.Mutable as VSM
import Foreign.Marshal.Array
import GHC.Generics (Generic)
import PetaVision.Utility.DFT.FFI
import PetaVision.Utility.DFT.Base (Flag (..), Sign (..), measure,
unFlag, unSign)
data DFTType
= DFT1DG
| IDFT1DG
| DFT2D
| IDFT2D
deriving (Show, Eq, Generic)
instance Hashable DFTType
data DFTPlanID = DFTPlanID
{ dftType :: DFTType
, dftDims :: [Int]
, dftIdx :: [Int]
} deriving (Show, Eq, Generic)
instance Hashable DFTPlanID
type DFTPlan = HashMap DFTPlanID Plan
type FFTWLock = MVar ()
{-# INLINE getFFTWLock #-}
getFFTWLock :: IO FFTWLock
getFFTWLock = newMVar ()
{-# INLINE getEmptyPlan #-}
getEmptyPlan :: DFTPlan
getEmptyPlan = HM.empty
{-# INLINE getDFTPlan #-}
getDFTPlan :: DFTPlan -> DFTPlanID -> Plan
getDFTPlan plan planID =
case HM.lookup planID plan of
Nothing -> error $ "getDFTPlan: couldn't find plan for ID " L.++ show planID
Just p -> p
{-# INLINE dftExecuteWithPlan #-}
dftExecuteWithPlan :: DFTPlanID
-> Plan
-> VS.Vector (Complex Double)
-> IO (VS.Vector (Complex Double))
dftExecuteWithPlan planID@(DFTPlanID t d i) plan vec = do
v <- VSM.new (L.product d)
VS.unsafeWith vec $ \ip -> VSM.unsafeWith v $ \op -> c_execute_dft plan ip op
u <- VS.freeze v
if t == IDFT1DG || t == IDFT2D
then if L.null i
then return $ VS.map (/ (fromIntegral . L.product $ d)) u
else if size == 0
then error $
"dftExecuteWithPlan: dimension error.\n" L.++ "dims: " L.++
show d L.++
"\nIdx: " L.++
show i
else return $ VS.map (/ (fromIntegral size)) u
else return u
where
size = L.product . L.take (L.last i - L.head i + 1) . L.drop (L.head i) $ d
{-# INLINE dftExecuteBatch #-}
dftExecuteBatch
:: DFTPlan
-> DFTPlanID
-> [VS.Vector (Complex Double)]
-> IO [VS.Vector (Complex Double)]
dftExecuteBatch hashMap planID@(DFTPlanID t d i) vecs =
case HM.lookup planID hashMap of
Nothing ->
error $
"dftExecuteBatch: couldn't find plan for ID " L.++ show planID
Just plan -> M.mapM (dftExecuteWithPlan planID plan) vecs
{-# INLINE dftExecute #-}
dftExecute
:: DFTPlan
-> DFTPlanID
-> VS.Vector (Complex Double)
-> IO (VS.Vector (Complex Double))
dftExecute hashMap planID@(DFTPlanID t d i) vec =
case HM.lookup planID hashMap of
Nothing -> error $ "dftExecute: couldn't find plan for ID " L.++ show planID
Just plan -> dftExecuteWithPlan planID plan vec
{-# INLINE dft2dG #-}
dft2dG
:: MVar ()
-> Int
-> Int
-> VS.Vector (Complex Double)
-> Sign
-> Flag
-> IO (Plan, VS.Vector (Complex Double))
dft2dG lock' rows cols vec sign flag = do
v <- VSM.new (rows * cols)
x <- takeMVar lock'
p <-
VS.unsafeWith vec $ \ip ->
VSM.unsafeWith v $ \op -> do
c_plan_dft_2d
(fromIntegral rows)
(fromIntegral cols)
ip
op
(unSign sign)
(unFlag flag)
putMVar lock' x
c_execute p
u <- VS.freeze v
return (p, u)
{-# INLINE dft2dPlan #-}
dft2dPlan
:: FFTWLock
-> DFTPlan
-> Int
-> Int
-> VS.Vector (Complex Double)
-> IO (DFTPlan, VS.Vector (Complex Double))
dft2dPlan lock' hashMap rows cols vec =
case HM.lookup planID hashMap of
Nothing -> do
(p, v) <- dft2dG lock' rows cols vec DFTForward measure
return (HM.insert planID p hashMap, v)
Just p -> do
v <- dftExecuteWithPlan planID p vec
return (hashMap, v)
where
planID = DFTPlanID DFT2D [rows, cols] []
{-# INLINE idft2dPlan #-}
idft2dPlan
:: FFTWLock
-> DFTPlan
-> Int
-> Int
-> VS.Vector (Complex Double)
-> IO (DFTPlan, VS.Vector (Complex Double))
idft2dPlan lock' hashMap rows cols vec =
case HM.lookup planID hashMap of
Nothing -> do
(p, v) <- dft2dG lock' rows cols vec DFTBackward measure
return
(HM.insert planID p hashMap, VS.map (/ (fromIntegral $ rows * cols)) v)
Just p -> do
v <- dftExecuteWithPlan planID p vec
return (hashMap, v)
where
planID = DFTPlanID IDFT2D [rows, cols] []
-- This is a generalied 1d dft, the 1 dimension can be many dimensions
-- which are ascending ordered and continued. For example, given a N
-- dimension array, the generalized 1d dft dimension is
-- either [0,1..M] or [M,M+1.. N-1], where 0 <= M and M <= N-1
-- the dimension corresponding to the largest index spins the fastest.
{-# INLINE dft1dGGeneric #-}
dft1dGGeneric
:: MVar ()
-> [Int]
-> [Int]
-> VS.Vector (Complex Double)
-> Sign
-> Flag
-> IO (Plan, VS.Vector (Complex Double))
dft1dGGeneric lock' dims dftIndex vec sign flag
| L.and (L.zipWith (\a b -> a + 1 == b) dftIndex . L.tail $ dftIndex) &&
(not . L.null $ dftIndex) &&
(L.head dftIndex == 0 || L.last dftIndex == rank - 1) = do
v <- VSM.new . L.product $ dims
x <- takeMVar lock'
p <-
VS.unsafeWith vec $ \ip ->
VSM.unsafeWith v $ \op ->
withArray (L.map fromIntegral dftDims) $ \n -> do
let totalNum = L.product dims
dftNum = L.product dftDims
stride =
if L.last dftIndex == rank - 1
then 1
else L.product . L.drop (1 + L.last dftIndex) $ dims
dist =
if L.last dftIndex == rank - 1
then dftNum
else 1
c_plan_many_dft
(fromIntegral dftRank)
n
(fromIntegral $ div totalNum dftNum)
ip
n
(fromIntegral stride)
(fromIntegral dist)
op
n
(fromIntegral stride)
(fromIntegral dist)
(unSign sign)
(unFlag flag)
c_execute p
putMVar lock' x
u <- VS.freeze v
return (p, u)
| otherwise =
error
"dft1dG: dimension list doesn't satisify the restriction of generalized 1d dft."
where
rank = L.length dims
dftRank = L.length dftIndex
dftDims =
L.take (L.last dftIndex - L.head dftIndex + 1) . L.drop (L.head dftIndex) $
dims
{-# INLINE dft1dGPlan #-}
dft1dGPlan
:: FFTWLock
-> DFTPlan
-> [Int]
-> [Int]
-> VS.Vector (Complex Double)
-> IO (DFTPlan, VS.Vector (Complex Double))
dft1dGPlan lock' hashMap dims dftIndex vec =
case HM.lookup planID hashMap of
Nothing -> do
(p, v) <- dft1dGGeneric lock' dims dftIndex vec DFTForward measure
return (HM.insert planID p hashMap, v)
Just p -> do
v <- dftExecuteWithPlan planID p vec
return (hashMap, v)
where
planID = DFTPlanID DFT1DG dims dftIndex
{-# INLINE idft1dGPlan #-}
idft1dGPlan
:: FFTWLock
-> DFTPlan
-> [Int]
-> [Int]
-> VS.Vector (Complex Double)
-> IO (DFTPlan, VS.Vector (Complex Double))
idft1dGPlan lock' hashMap dims dftIndex vec =
case HM.lookup planID hashMap of
Nothing -> do
(p, v) <- dft1dGGeneric lock' dims dftIndex vec DFTBackward measure
return (HM.insert planID p hashMap, VS.map (/ size) v)
Just p -> do
v <- dftExecuteWithPlan planID p vec
return (hashMap, v)
where
planID = DFTPlanID IDFT1DG dims dftIndex
size =
fromIntegral .
L.product .
L.take (L.last dftIndex - L.head dftIndex + 1) . L.drop (L.head dftIndex) $
dims
| XinhuaZhang/PetaVisionHaskell | PetaVision/Utility/DFT/Plan.hs | gpl-3.0 | 8,223 | 0 | 27 | 2,556 | 2,649 | 1,349 | 1,300 | 255 | 4 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE CPP #-}
-- | A data structure for modeling control flow graphs. Basic blocks can be
-- constructed separately, with arbitrary IDs attached to each one; you can
-- then generate a single circular structure for the entire graph, taking up
-- a finite amount of space.
module Data.ControlFlow
( Path(..)
, Flow
, System(..)
, usedPaths
, cleanPaths
, mapPaths
, simplifyPaths
, flow
, mapContinues
, numberPaths
, cleanContinues
, continues
) where
import Data.Void (Void)
import Data.Maybe (fromMaybe)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Control.Monad.Trans.State (execState, gets, modify)
import Control.Monad (unless)
#if __GLASGOW_HASKELL__ < 710
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
#endif
import Data.List (sort, group)
import Data.Data (Data, Typeable)
-- | A partial control flow graph, with nodes of type @a@, branch nodes of type
-- @b@, continuation labels of type @c@, and leaf nodes of type @e@.
data Path c b e a
= a :>> Path c b e a
| Branch b (Path c b e a) (Path c b e a)
| Continue c
| End e
deriving
( Eq, Ord, Show, Read
, Functor, Foldable, Traversable
, Data, Typeable )
infixr 1 :>>
-- | A control flow graph without continuation labels.
type Flow = Path Void
-- | A complete control flow graph in the form of a starting block, and a
-- mapping from labels to blocks.
data System c b e a = System
{ systemStart :: Path c b e a
, systemPaths :: Map.Map c (Path c b e a)
} deriving (Eq, Ord, Show, Read, Functor, Foldable, Traversable)
-- | Returns the set of all labels which are reachable from the start path.
usedPaths :: (Ord c) => System c b e a -> Set.Set c
usedPaths sys = go (systemStart sys) `execState` Set.empty where
go g = case g of
_ :>> x -> go x
Branch _ x y -> go x >> go y
Continue label -> gets (Set.member label) >>= \b -> unless b $ do
modify $ Set.insert label
maybe missingPath go $ Map.lookup label paths
End _ -> return ()
missingPath = error "usedPaths: missing continue"
paths = systemPaths sys
-- | Removes all unused continuations from the map.
cleanPaths :: (Ord c) => System c b e a -> System c b e a
cleanPaths sys = let
used = Map.fromDistinctAscList
[(k, undefined) | k <- Set.toAscList $ usedPaths sys]
in sys { systemPaths = systemPaths sys `Map.intersection` used }
-- | The list of all labels directly continued to by the given path. May contain
-- duplicates, if there are two continues to the same label.
continues :: (Ord c) => Path c b e a -> [c]
continues g = case g of
_ :>> x -> continues x
Branch _ x y -> continues x ++ continues y
Continue c -> [c]
End _ -> []
-- | @unroll l x g@ finds all places where @g@ continues to the label @l@, and
-- replaces them with the path @x@.
unroll :: (Eq c) => c -> Path c b e a -> Path c b e a -> Path c b e a
unroll cfrom gto = go where
go g = case g of
v :>> x -> v :>> go x
Branch b x y -> Branch b (go x) (go y)
Continue c | c == cfrom -> gto
_ -> g
-- | Applies a function to every path inside a system.
mapPaths
:: (Path c b e a -> Path c b' e' a') -> System c b e a -> System c b' e' a'
mapPaths f sys = System
{ systemStart = f $ systemStart sys
, systemPaths = fmap f $ systemPaths sys }
-- | Like 'unroll', but applied to a network of paths.
unrollSystem :: (Ord c) => c -> System c b e a -> System c b e a
unrollSystem c sys = case Map.lookup c $ systemPaths sys of
Nothing -> sys
Just path -> mapPaths (unroll c path) $
sys { systemPaths = Map.delete c $ systemPaths sys }
-- | Sorts a list, and then returns only elements that appear just once.
uniqueElems :: (Ord a) => [a] -> [a]
uniqueElems xs = [ x | [x] <- group $ sort xs ]
-- | A list of all continuation labels that only appear once.
usedOnce :: (Ord c) => System c b e a -> [c]
usedOnce sys = uniqueElems $ concatMap continues $
systemStart sys : Map.elems (systemPaths sys)
-- | For each path which is only referenced in one location, removes the path
-- and pastes its contents into the place it was referenced.
simplifyPaths :: (Ord c) => System c b e a -> System c b e a
simplifyPaths sys = foldr unrollSystem sys $ usedOnce sys
-- | Given a start point and a mapping from continutation labels to code
-- chunks, creates a single structure embedding the entire control flow.
-- The structure may be circular, but it will only take up a finite amount
-- of memory, and all continuation paths will only be computed once.
flow :: (Ord c) => System c b e a -> Flow b e a
flow sys = let
flows = fmap toFlow $ systemPaths sys
getFlow label = fromMaybe (error "flow: missing continue") $
Map.lookup label flows
toFlow g = case g of
v :>> x -> v :>> toFlow x
Branch b x y -> Branch b (toFlow x) (toFlow y)
Continue c -> getFlow c
End e -> End e
in toFlow $ systemStart sys
-- | Applies a function to every continuation label in a path.
mapContinues :: (c -> c') -> Path c b e a -> Path c' b e a
mapContinues f g = case g of
Continue c -> Continue $ f c
x :>> xs -> x :>> mapContinues f xs
Branch b x y -> Branch b (mapContinues f x) (mapContinues f y)
End e -> End e
-- | Replaces continuation labels with numbers starting from 0.
numberPaths :: (Eq c) => System c b e a -> System Int b e a
numberPaths (System st ps) = let
contToInt c = fromMaybe err $ lookup c $ zip (Map.keys ps) [0..]
err = error "numberPaths: missing continue"
in System
{ systemStart = mapContinues contToInt st
, systemPaths = Map.mapKeys contToInt $ fmap (mapContinues contToInt) ps }
-- | Removes any paths which only serve to continue on to another path.
cleanContinues :: (Ord c) => System c b e a -> System c b e a
cleanContinues sys = let
toClean = [ c | (c, Continue c') <- Map.toList $ systemPaths sys, c /= c' ]
in foldr unrollSystem sys toClean
| mtolly/rail | src/Data/ControlFlow.hs | gpl-3.0 | 6,118 | 0 | 17 | 1,464 | 1,910 | 983 | 927 | 117 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.DescribeSecurityGroups
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Describes one or more of your security groups.
--
-- A security group is for use with instances either in the EC2-Classic
-- platform or in a specific VPC. For more information, see
-- <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-network-security.html Amazon EC2 Security Groups>
-- in the /Amazon Elastic Compute Cloud User Guide/ and
-- <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_SecurityGroups.html Security Groups for Your VPC>
-- in the /Amazon Virtual Private Cloud User Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeSecurityGroups.html AWS API Reference> for DescribeSecurityGroups.
module Network.AWS.EC2.DescribeSecurityGroups
(
-- * Creating a Request
describeSecurityGroups
, DescribeSecurityGroups
-- * Request Lenses
, dsgsFilters
, dsgsGroupNames
, dsgsGroupIds
, dsgsDryRun
-- * Destructuring the Response
, describeSecurityGroupsResponse
, DescribeSecurityGroupsResponse
-- * Response Lenses
, dsgrsSecurityGroups
, dsgrsResponseStatus
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'describeSecurityGroups' smart constructor.
data DescribeSecurityGroups = DescribeSecurityGroups'
{ _dsgsFilters :: !(Maybe [Filter])
, _dsgsGroupNames :: !(Maybe [Text])
, _dsgsGroupIds :: !(Maybe [Text])
, _dsgsDryRun :: !(Maybe Bool)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeSecurityGroups' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dsgsFilters'
--
-- * 'dsgsGroupNames'
--
-- * 'dsgsGroupIds'
--
-- * 'dsgsDryRun'
describeSecurityGroups
:: DescribeSecurityGroups
describeSecurityGroups =
DescribeSecurityGroups'
{ _dsgsFilters = Nothing
, _dsgsGroupNames = Nothing
, _dsgsGroupIds = Nothing
, _dsgsDryRun = Nothing
}
-- | One or more filters. If using multiple filters for rules, the results
-- include security groups for which any combination of rules - not
-- necessarily a single rule - match all filters.
--
-- - 'description' - The description of the security group.
--
-- - 'egress.ip-permission.prefix-list-id' - The ID (prefix) of the AWS
-- service to which the security group allows access.
--
-- - 'group-id' - The ID of the security group.
--
-- - 'group-name' - The name of the security group.
--
-- - 'ip-permission.cidr' - A CIDR range that has been granted
-- permission.
--
-- - 'ip-permission.from-port' - The start of port range for the TCP and
-- UDP protocols, or an ICMP type number.
--
-- - 'ip-permission.group-id' - The ID of a security group that has been
-- granted permission.
--
-- - 'ip-permission.group-name' - The name of a security group that has
-- been granted permission.
--
-- - 'ip-permission.protocol' - The IP protocol for the permission ('tcp'
-- | 'udp' | 'icmp' or a protocol number).
--
-- - 'ip-permission.to-port' - The end of port range for the TCP and UDP
-- protocols, or an ICMP code.
--
-- - 'ip-permission.user-id' - The ID of an AWS account that has been
-- granted permission.
--
-- - 'owner-id' - The AWS account ID of the owner of the security group.
--
-- - 'tag-key' - The key of a tag assigned to the security group.
--
-- - 'tag-value' - The value of a tag assigned to the security group.
--
-- - 'vpc-id' - The ID of the VPC specified when the security group was
-- created.
--
dsgsFilters :: Lens' DescribeSecurityGroups [Filter]
dsgsFilters = lens _dsgsFilters (\ s a -> s{_dsgsFilters = a}) . _Default . _Coerce;
-- | [EC2-Classic and default VPC only] One or more security group names. You
-- can specify either the security group name or the security group ID. For
-- security groups in a nondefault VPC, use the 'group-name' filter to
-- describe security groups by name.
--
-- Default: Describes all your security groups.
dsgsGroupNames :: Lens' DescribeSecurityGroups [Text]
dsgsGroupNames = lens _dsgsGroupNames (\ s a -> s{_dsgsGroupNames = a}) . _Default . _Coerce;
-- | One or more security group IDs. Required for security groups in a
-- nondefault VPC.
--
-- Default: Describes all your security groups.
dsgsGroupIds :: Lens' DescribeSecurityGroups [Text]
dsgsGroupIds = lens _dsgsGroupIds (\ s a -> s{_dsgsGroupIds = a}) . _Default . _Coerce;
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
dsgsDryRun :: Lens' DescribeSecurityGroups (Maybe Bool)
dsgsDryRun = lens _dsgsDryRun (\ s a -> s{_dsgsDryRun = a});
instance AWSRequest DescribeSecurityGroups where
type Rs DescribeSecurityGroups =
DescribeSecurityGroupsResponse
request = postQuery eC2
response
= receiveXML
(\ s h x ->
DescribeSecurityGroupsResponse' <$>
(x .@? "securityGroupInfo" .!@ mempty >>=
may (parseXMLList "item"))
<*> (pure (fromEnum s)))
instance ToHeaders DescribeSecurityGroups where
toHeaders = const mempty
instance ToPath DescribeSecurityGroups where
toPath = const "/"
instance ToQuery DescribeSecurityGroups where
toQuery DescribeSecurityGroups'{..}
= mconcat
["Action" =:
("DescribeSecurityGroups" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
toQuery (toQueryList "Filter" <$> _dsgsFilters),
toQuery
(toQueryList "GroupName" <$> _dsgsGroupNames),
toQuery (toQueryList "GroupId" <$> _dsgsGroupIds),
"DryRun" =: _dsgsDryRun]
-- | /See:/ 'describeSecurityGroupsResponse' smart constructor.
data DescribeSecurityGroupsResponse = DescribeSecurityGroupsResponse'
{ _dsgrsSecurityGroups :: !(Maybe [SecurityGroup])
, _dsgrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeSecurityGroupsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dsgrsSecurityGroups'
--
-- * 'dsgrsResponseStatus'
describeSecurityGroupsResponse
:: Int -- ^ 'dsgrsResponseStatus'
-> DescribeSecurityGroupsResponse
describeSecurityGroupsResponse pResponseStatus_ =
DescribeSecurityGroupsResponse'
{ _dsgrsSecurityGroups = Nothing
, _dsgrsResponseStatus = pResponseStatus_
}
-- | Information about one or more security groups.
dsgrsSecurityGroups :: Lens' DescribeSecurityGroupsResponse [SecurityGroup]
dsgrsSecurityGroups = lens _dsgrsSecurityGroups (\ s a -> s{_dsgrsSecurityGroups = a}) . _Default . _Coerce;
-- | The response status code.
dsgrsResponseStatus :: Lens' DescribeSecurityGroupsResponse Int
dsgrsResponseStatus = lens _dsgrsResponseStatus (\ s a -> s{_dsgrsResponseStatus = a});
| olorin/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeSecurityGroups.hs | mpl-2.0 | 8,008 | 0 | 15 | 1,629 | 918 | 564 | 354 | 100 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.Composer
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Manages Apache Airflow environments on Google Cloud Platform.
--
-- /See:/ <https://cloud.google.com/composer/ Cloud Composer API Reference>
module Network.Google.Composer
(
-- * Service Configuration
composerService
-- * OAuth Scopes
, cloudPlatformScope
-- * API Declaration
, ComposerAPI
-- * Resources
-- ** composer.projects.locations.environments.create
, module Network.Google.Resource.Composer.Projects.Locations.Environments.Create
-- ** composer.projects.locations.environments.delete
, module Network.Google.Resource.Composer.Projects.Locations.Environments.Delete
-- ** composer.projects.locations.environments.get
, module Network.Google.Resource.Composer.Projects.Locations.Environments.Get
-- ** composer.projects.locations.environments.list
, module Network.Google.Resource.Composer.Projects.Locations.Environments.List
-- ** composer.projects.locations.environments.patch
, module Network.Google.Resource.Composer.Projects.Locations.Environments.Patch
-- ** composer.projects.locations.imageVersions.list
, module Network.Google.Resource.Composer.Projects.Locations.ImageVersions.List
-- ** composer.projects.locations.operations.delete
, module Network.Google.Resource.Composer.Projects.Locations.Operations.Delete
-- ** composer.projects.locations.operations.get
, module Network.Google.Resource.Composer.Projects.Locations.Operations.Get
-- ** composer.projects.locations.operations.list
, module Network.Google.Resource.Composer.Projects.Locations.Operations.List
-- * Types
-- ** Status
, Status
, status
, sDetails
, sCode
, sMessage
-- ** OperationSchema
, OperationSchema
, operationSchema
, osAddtional
-- ** ListImageVersionsResponse
, ListImageVersionsResponse
, listImageVersionsResponse
, livrNextPageToken
, livrImageVersions
-- ** ListEnvironmentsResponse
, ListEnvironmentsResponse
, listEnvironmentsResponse
, lerNextPageToken
, lerEnvironments
-- ** WebServerConfig
, WebServerConfig
, webServerConfig
, wscMachineType
-- ** DatabaseConfig
, DatabaseConfig
, databaseConfig
, dcMachineType
-- ** EnvironmentConfig
, EnvironmentConfig
, environmentConfig
, ecDatabaseConfig
, ecWebServerConfig
, ecNodeConfig
, ecNodeCount
, ecPrivateEnvironmentConfig
, ecEncryptionConfig
, ecSoftwareConfig
, ecDagGcsPrefix
, ecWebServerNetworkAccessControl
, ecGkeCluster
, ecAirflowURI
-- ** ListOperationsResponse
, ListOperationsResponse
, listOperationsResponse
, lorNextPageToken
, lorOperations
-- ** NodeConfig
, NodeConfig
, nodeConfig
, ncDiskSizeGb
, ncLocation
, ncNetwork
, ncOAuthScopes
, ncIPAllocationPolicy
, ncServiceAccount
, ncSubnetwork
, ncMachineType
, ncTags
-- ** Operation
, Operation
, operation
, oDone
, oError
, oResponse
, oName
, oMetadata
-- ** Empty
, Empty
, empty
-- ** OperationMetadataOperationType
, OperationMetadataOperationType (..)
-- ** SoftwareConfigEnvVariables
, SoftwareConfigEnvVariables
, softwareConfigEnvVariables
, scevAddtional
-- ** ImageVersion
, ImageVersion
, imageVersion
, ivUpgradeDisabled
, ivCreationDisabled
, ivReleaseDate
, ivImageVersionId
, ivSupportedPythonVersions
, ivIsDefault
-- ** Environment
, Environment
, environment
, eState
, eConfig
, eUuid
, eUpdateTime
, eName
, eLabels
, eCreateTime
-- ** SoftwareConfigAirflowConfigOverrides
, SoftwareConfigAirflowConfigOverrides
, softwareConfigAirflowConfigOverrides
, scacoAddtional
-- ** StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- ** AllowedIPRange
, AllowedIPRange
, allowedIPRange
, airValue
, airDescription
-- ** IPAllocationPolicy
, IPAllocationPolicy
, ipAllocationPolicy
, iapServicesSecondaryRangeName
, iapUseIPAliases
, iapClusterSecondaryRangeName
, iapClusterIPv4CIdRBlock
, iapServicesIPv4CIdRBlock
-- ** CheckUpgradeResponseContainsPypiModulesConflict
, CheckUpgradeResponseContainsPypiModulesConflict (..)
-- ** Date
, Date
, date
, dDay
, dYear
, dMonth
-- ** CheckUpgradeResponsePypiDependencies
, CheckUpgradeResponsePypiDependencies
, checkUpgradeResponsePypiDependencies
, curpdAddtional
-- ** SoftwareConfigPypiPackages
, SoftwareConfigPypiPackages
, softwareConfigPypiPackages
, scppAddtional
-- ** OperationMetadataState
, OperationMetadataState (..)
-- ** EnvironmentState
, EnvironmentState (..)
-- ** Xgafv
, Xgafv (..)
-- ** PrivateEnvironmentConfig
, PrivateEnvironmentConfig
, privateEnvironmentConfig
, pecWebServerIPv4CIdRBlock
, pecCloudSQLIPv4CIdRBlock
, pecWebServerIPv4ReservedRange
, pecPrivateClusterConfig
, pecEnablePrivateEnvironment
-- ** SoftwareConfig
, SoftwareConfig
, softwareConfig
, scImageVersion
, scPythonVersion
, scPypiPackages
, scAirflowConfigOverrides
, scEnvVariables
-- ** PrivateClusterConfig
, PrivateClusterConfig
, privateClusterConfig
, pccEnablePrivateEndpoint
, pccMasterIPv4CIdRBlock
, pccMasterIPv4ReservedRange
-- ** EncryptionConfig
, EncryptionConfig
, encryptionConfig
, ecKmsKeyName
-- ** CheckUpgradeResponse
, CheckUpgradeResponse
, checkUpgradeResponse
, curContainsPypiModulesConflict
, curBuildLogURI
, curImageVersion
, curPypiDependencies
, curPypiConflictBuildLogExtract
-- ** WebServerNetworkAccessControl
, WebServerNetworkAccessControl
, webServerNetworkAccessControl
, wsnacAllowedIPRanges
-- ** EnvironmentLabels
, EnvironmentLabels
, environmentLabels
, elAddtional
-- ** OperationMetadata
, OperationMetadata
, operationMetadata
, omState
, omResourceUuid
, omResource
, omEndTime
, omOperationType
, omCreateTime
-- ** OperationResponse
, OperationResponse
, operationResponse
, orAddtional
) where
import Network.Google.Prelude
import Network.Google.Composer.Types
import Network.Google.Resource.Composer.Projects.Locations.Environments.Create
import Network.Google.Resource.Composer.Projects.Locations.Environments.Delete
import Network.Google.Resource.Composer.Projects.Locations.Environments.Get
import Network.Google.Resource.Composer.Projects.Locations.Environments.List
import Network.Google.Resource.Composer.Projects.Locations.Environments.Patch
import Network.Google.Resource.Composer.Projects.Locations.ImageVersions.List
import Network.Google.Resource.Composer.Projects.Locations.Operations.Delete
import Network.Google.Resource.Composer.Projects.Locations.Operations.Get
import Network.Google.Resource.Composer.Projects.Locations.Operations.List
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Cloud Composer API service.
type ComposerAPI =
ProjectsLocationsOperationsListResource :<|>
ProjectsLocationsOperationsGetResource
:<|> ProjectsLocationsOperationsDeleteResource
:<|> ProjectsLocationsImageVersionsListResource
:<|> ProjectsLocationsEnvironmentsListResource
:<|> ProjectsLocationsEnvironmentsPatchResource
:<|> ProjectsLocationsEnvironmentsGetResource
:<|> ProjectsLocationsEnvironmentsCreateResource
:<|> ProjectsLocationsEnvironmentsDeleteResource
| brendanhay/gogol | gogol-composer/gen/Network/Google/Composer.hs | mpl-2.0 | 8,265 | 0 | 12 | 1,675 | 837 | 610 | 227 | 198 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Books.Personalizedstream.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns a stream of personalized book clusters
--
-- /See:/ <https://code.google.com/apis/books/docs/v1/getting_started.html Books API Reference> for @books.personalizedstream.get@.
module Network.Google.Resource.Books.Personalizedstream.Get
(
-- * REST Resource
PersonalizedstreamGetResource
-- * Creating a Request
, personalizedstreamGet
, PersonalizedstreamGet
-- * Request Lenses
, pgXgafv
, pgUploadProtocol
, pgLocale
, pgAccessToken
, pgMaxAllowedMaturityRating
, pgUploadType
, pgSource
, pgCallback
) where
import Network.Google.Books.Types
import Network.Google.Prelude
-- | A resource alias for @books.personalizedstream.get@ method which the
-- 'PersonalizedstreamGet' request conforms to.
type PersonalizedstreamGetResource =
"books" :>
"v1" :>
"personalizedstream" :>
"get" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "locale" Text :>
QueryParam "access_token" Text :>
QueryParam "maxAllowedMaturityRating"
PersonalizedstreamGetMaxAllowedMaturityRating
:>
QueryParam "uploadType" Text :>
QueryParam "source" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] Discoveryclusters
-- | Returns a stream of personalized book clusters
--
-- /See:/ 'personalizedstreamGet' smart constructor.
data PersonalizedstreamGet =
PersonalizedstreamGet'
{ _pgXgafv :: !(Maybe Xgafv)
, _pgUploadProtocol :: !(Maybe Text)
, _pgLocale :: !(Maybe Text)
, _pgAccessToken :: !(Maybe Text)
, _pgMaxAllowedMaturityRating :: !(Maybe PersonalizedstreamGetMaxAllowedMaturityRating)
, _pgUploadType :: !(Maybe Text)
, _pgSource :: !(Maybe Text)
, _pgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'PersonalizedstreamGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pgXgafv'
--
-- * 'pgUploadProtocol'
--
-- * 'pgLocale'
--
-- * 'pgAccessToken'
--
-- * 'pgMaxAllowedMaturityRating'
--
-- * 'pgUploadType'
--
-- * 'pgSource'
--
-- * 'pgCallback'
personalizedstreamGet
:: PersonalizedstreamGet
personalizedstreamGet =
PersonalizedstreamGet'
{ _pgXgafv = Nothing
, _pgUploadProtocol = Nothing
, _pgLocale = Nothing
, _pgAccessToken = Nothing
, _pgMaxAllowedMaturityRating = Nothing
, _pgUploadType = Nothing
, _pgSource = Nothing
, _pgCallback = Nothing
}
-- | V1 error format.
pgXgafv :: Lens' PersonalizedstreamGet (Maybe Xgafv)
pgXgafv = lens _pgXgafv (\ s a -> s{_pgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pgUploadProtocol :: Lens' PersonalizedstreamGet (Maybe Text)
pgUploadProtocol
= lens _pgUploadProtocol
(\ s a -> s{_pgUploadProtocol = a})
-- | ISO-639-1 language and ISO-3166-1 country code. Ex: \'en_US\'. Used for
-- generating recommendations.
pgLocale :: Lens' PersonalizedstreamGet (Maybe Text)
pgLocale = lens _pgLocale (\ s a -> s{_pgLocale = a})
-- | OAuth access token.
pgAccessToken :: Lens' PersonalizedstreamGet (Maybe Text)
pgAccessToken
= lens _pgAccessToken
(\ s a -> s{_pgAccessToken = a})
-- | The maximum allowed maturity rating of returned recommendations. Books
-- with a higher maturity rating are filtered out.
pgMaxAllowedMaturityRating :: Lens' PersonalizedstreamGet (Maybe PersonalizedstreamGetMaxAllowedMaturityRating)
pgMaxAllowedMaturityRating
= lens _pgMaxAllowedMaturityRating
(\ s a -> s{_pgMaxAllowedMaturityRating = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pgUploadType :: Lens' PersonalizedstreamGet (Maybe Text)
pgUploadType
= lens _pgUploadType (\ s a -> s{_pgUploadType = a})
-- | String to identify the originator of this request.
pgSource :: Lens' PersonalizedstreamGet (Maybe Text)
pgSource = lens _pgSource (\ s a -> s{_pgSource = a})
-- | JSONP
pgCallback :: Lens' PersonalizedstreamGet (Maybe Text)
pgCallback
= lens _pgCallback (\ s a -> s{_pgCallback = a})
instance GoogleRequest PersonalizedstreamGet where
type Rs PersonalizedstreamGet = Discoveryclusters
type Scopes PersonalizedstreamGet =
'["https://www.googleapis.com/auth/books"]
requestClient PersonalizedstreamGet'{..}
= go _pgXgafv _pgUploadProtocol _pgLocale
_pgAccessToken
_pgMaxAllowedMaturityRating
_pgUploadType
_pgSource
_pgCallback
(Just AltJSON)
booksService
where go
= buildClient
(Proxy :: Proxy PersonalizedstreamGetResource)
mempty
| brendanhay/gogol | gogol-books/gen/Network/Google/Resource/Books/Personalizedstream/Get.hs | mpl-2.0 | 5,798 | 0 | 20 | 1,392 | 872 | 505 | 367 | 124 | 1 |
import ParsecPlayground
main :: IO ()
main = do
let s = case (parseCSV "hello,aa\n1,2,3\n") of
Left parseError -> show parseError
Right x -> show x
putStrLn s
-- putStrLn "hello"
-- putStrLn $ show $ [(i,j) | i <- "ab", j <- [1..4]]
-- putStrLn $ show $ [(i,j) | i <- ["ab", "cd", "fgh"], j <- i]
-- putStrLn $ show $ [j | i <- ["ab", "cd", "fgh"], j <- i]
-- putStrLn $ show $
-- do i <- ["ab", "cd", "fgh"]
-- j <- i
-- return j | dkandalov/katas | haskell/p99/src/playground.hs | unlicense | 506 | 0 | 13 | 168 | 76 | 39 | 37 | 7 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module Includes_Types where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound, seq, succ,
pred, enumFrom, enumFromThen, enumFromThenTo,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import qualified Control.Applicative as Applicative (ZipList(..))
import Control.Applicative ( (<*>) )
import qualified Control.DeepSeq as DeepSeq
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad ( liftM, ap, when )
import qualified Data.ByteString.Lazy as BS
import Data.Functor ( (<$>) )
import qualified Data.Hashable as Hashable
import qualified Data.Int as Int
import Data.List
import qualified Data.Maybe as Maybe (catMaybes)
import qualified Data.Text.Lazy.Encoding as Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified Data.Typeable as Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as Arbitrary ( Arbitrary(..) )
import qualified Test.QuickCheck as QuickCheck ( elements )
import qualified Thrift
import qualified Thrift.Types as Types
import qualified Thrift.Serializable as Serializable
import qualified Thrift.Arbitraries as Arbitraries
import qualified Transitive_Types as Transitive_Types
type IncludedInt64 = Int.Int64
type TransitiveFoo = Transitive_Types.Foo
-- | Definition of the Included struct
data Included = Included
{ included_MyIntField :: Int.Int64
-- ^ MyIntField field of the Included struct
, included_MyTransitiveField :: Transitive_Types.Foo
-- ^ MyTransitiveField field of the Included struct
} deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Included where
encode = encode_Included
decode = decode_Included
instance Hashable.Hashable Included where
hashWithSalt salt record = salt `Hashable.hashWithSalt` included_MyIntField record `Hashable.hashWithSalt` included_MyTransitiveField record
instance DeepSeq.NFData Included where
rnf _record0 =
DeepSeq.rnf (included_MyIntField _record0) `seq`
DeepSeq.rnf (included_MyTransitiveField _record0) `seq`
()
instance Arbitrary.Arbitrary Included where
arbitrary = Monad.liftM Included (Arbitrary.arbitrary)
`Monad.ap`(Arbitrary.arbitrary)
shrink obj | obj == default_Included = []
| otherwise = Maybe.catMaybes
[ if obj == default_Included{included_MyIntField = included_MyIntField obj} then Nothing else Just $ default_Included{included_MyIntField = included_MyIntField obj}
, if obj == default_Included{included_MyTransitiveField = included_MyTransitiveField obj} then Nothing else Just $ default_Included{included_MyTransitiveField = included_MyTransitiveField obj}
]
-- | Translate a 'Included' to a 'Types.ThriftVal'
from_Included :: Included -> Types.ThriftVal
from_Included record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[ (\_v3 -> Just (1, ("MyIntField",Types.TI64 _v3))) $ included_MyIntField record
, (\_v3 -> Just (2, ("MyTransitiveField",Transitive_Types.from_Foo _v3))) $ included_MyTransitiveField record
]
-- | Write a 'Included' with the given 'Thrift.Protocol'
write_Included :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Included -> IO ()
write_Included oprot record = Thrift.writeVal oprot $ from_Included record
-- | Serialize a 'Included' in pure code
encode_Included :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Included -> BS.ByteString
encode_Included oprot record = Thrift.serializeVal oprot $ from_Included record
-- | Translate a 'Types.ThriftVal' to a 'Included'
to_Included :: Types.ThriftVal -> Included
to_Included (Types.TStruct fields) = Included{
included_MyIntField = maybe (included_MyIntField default_Included) (\(_,_val5) -> (case _val5 of {Types.TI64 _val6 -> _val6; _ -> error "wrong type"})) (Map.lookup (1) fields),
included_MyTransitiveField = maybe (included_MyTransitiveField default_Included) (\(_,_val5) -> (case _val5 of {Types.TStruct _val7 -> (Transitive_Types.to_Foo (Types.TStruct _val7)); _ -> error "wrong type"})) (Map.lookup (2) fields)
}
to_Included _ = error "not a struct"
-- | Read a 'Included' struct with the given 'Thrift.Protocol'
read_Included :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Included
read_Included iprot = to_Included <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Included)
-- | Deserialize a 'Included' in pure code
decode_Included :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Included
decode_Included iprot bs = to_Included $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Included) bs
-- | 'TypeMap' for the 'Included' struct
typemap_Included :: Types.TypeMap
typemap_Included = Map.fromList [("MyIntField",(1,Types.T_I64)),("MyTransitiveField",(2,(Types.T_STRUCT Transitive_Types.typemap_Foo)))]
-- | Default values for the 'Included' struct
default_Included :: Included
default_Included = Included{
included_MyIntField = 0,
included_MyTransitiveField = Transitive_Types.default_Foo{Transitive_Types.foo_a = 2}}
| facebook/fbthrift | thrift/compiler/test/fixtures/includes/gen-hs/Includes_Types.hs | apache-2.0 | 5,969 | 0 | 18 | 924 | 1,429 | 848 | 581 | 85 | 3 |
{-# LANGUAGE GADTSyntax #-}
{-# LANGUAGE KindSignatures #-}
{-# OPTIONS_GHC -Wno-deprecations -Wno-name-shadowing #-}
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module CodeWorld.App
{-# WARNING "This is an experimental API. It can change at any time." #-}
( Rule,
timeRule,
eventRule,
pictureRule,
multiEventRule,
multiPictureRule,
subrule,
rules,
applicationOf,
unsafeMultiApplicationOf,
)
where
import CodeWorld
import Data.List (foldl')
import System.Random (StdGen)
data Rule :: * -> * where
TimeRule :: (Double -> state -> state) -> Rule state
EventRule :: (Int -> Event -> state -> state) -> Rule state
PictureRule :: (Int -> state -> Picture) -> Rule state
Rules :: [Rule state] -> Rule state
timeRule :: (Double -> state -> state) -> Rule state
timeRule = TimeRule
eventRule :: (Event -> state -> state) -> Rule state
eventRule = EventRule . const
pictureRule :: (state -> Picture) -> Rule state
pictureRule = PictureRule . const
multiEventRule :: (Int -> Event -> state -> state) -> Rule state
multiEventRule = EventRule
multiPictureRule :: (Int -> state -> Picture) -> Rule state
multiPictureRule = PictureRule
subrule :: (a -> b) -> (b -> a -> a) -> Rule b -> Rule a
subrule getter setter (TimeRule step_b) = TimeRule step_a
where
step_a dt a = setter (step_b dt (getter a)) a
subrule getter setter (EventRule event_b) = EventRule event_a
where
event_a k ev a = setter (event_b k ev (getter a)) a
subrule getter _setter (PictureRule pic_b) = PictureRule pic_a
where
pic_a n = pic_b n . getter
subrule getter setter (Rules rules) = Rules (map (subrule getter setter) rules)
rules :: [Rule state] -> Rule state
rules = Rules
applicationOf :: world -> [Rule world] -> IO ()
applicationOf w rules = interactionOf w step event picture
where
step dt = foldl' (.) id [f dt | f <- concatMap stepHandlers rules]
event ev = foldl' (.) id [f ev | f <- concatMap eventHandlers rules]
picture w = pictures [pic w | pic <- concatMap pictureHandlers rules]
stepHandlers (TimeRule f) = [f]
stepHandlers (Rules rs) = concatMap stepHandlers rs
stepHandlers _ = []
eventHandlers (EventRule f) = [f 0]
eventHandlers (Rules rs) = concatMap eventHandlers rs
eventHandlers _ = []
pictureHandlers (PictureRule f) = [f 0]
pictureHandlers (Rules rs) = concatMap pictureHandlers rs
pictureHandlers _ = []
unsafeMultiApplicationOf :: Int -> (StdGen -> state) -> [Rule state] -> IO ()
unsafeMultiApplicationOf n initial rules =
unsafeCollaborationOf n initial step event picture
where
step dt = foldl' (.) id [f dt | f <- concatMap stepHandlers rules]
event k ev = foldl' (.) id [f k ev | f <- concatMap eventHandlers rules]
picture k w = pictures [pic k w | pic <- concatMap pictureHandlers rules]
stepHandlers (TimeRule f) = [f]
stepHandlers (Rules rs) = concatMap stepHandlers rs
stepHandlers _ = []
eventHandlers (EventRule f) = [f]
eventHandlers (Rules rs) = concatMap eventHandlers rs
eventHandlers _ = []
pictureHandlers (PictureRule f) = [f]
pictureHandlers (Rules rs) = concatMap pictureHandlers rs
pictureHandlers _ = []
| alphalambda/codeworld | codeworld-api/src/CodeWorld/App.hs | apache-2.0 | 3,764 | 0 | 11 | 785 | 1,163 | 595 | 568 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.PodTemplate where
import GHC.Generics
import Data.Text
import Kubernetes.V1.ObjectMeta
import Kubernetes.V1.PodTemplateSpec
import qualified Data.Aeson
-- | PodTemplate describes a template for creating copies of a predefined pod.
data PodTemplate = PodTemplate
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ObjectMeta -- ^ Standard object's metadata. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata
, template :: Maybe PodTemplateSpec -- ^ Template defines the pods that will be created from this pod template. http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON PodTemplate
instance Data.Aeson.ToJSON PodTemplate
| minhdoboi/deprecated-openshift-haskell-api | kubernetes/lib/Kubernetes/V1/PodTemplate.hs | apache-2.0 | 1,505 | 0 | 9 | 198 | 125 | 76 | 49 | 19 | 0 |
triangleNumber 0 = 0
triangleNumber n = n + triangleNumber (n - 1)
triangleNumbers = triangleWorker 1 0
triangleWorker n lastval = let current = n + lastval
in [current] ++ triangleWorker (n + 1) current
-- Number of divisors
numDivisors n = length $ filter (==True) $ map ((==0) . rem n) [1..n]
main = do
let limit = 500
let numbersOfInterest = filter ((>limit) . numDivisors) triangleNumbers
print $ head $ numbersOfInterest | ulikoehler/ProjectEuler | Euler12.hs | apache-2.0 | 468 | 0 | 13 | 115 | 183 | 93 | 90 | 10 | 1 |
{-# LANGUAGE ExistentialQuantification, TemplateHaskell, StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Implementation of the opcodes.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013, 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.OpCodes
( pyClasses
, OpCode(..)
, ReplaceDisksMode(..)
, DiskIndex
, mkDiskIndex
, unDiskIndex
, opID
, opReasonSrcID
, allOpIDs
, allOpFields
, opSummary
, CommonOpParams(..)
, defOpParams
, MetaOpCode(..)
, resolveDependencies
, wrapOpCode
, setOpComment
, setOpPriority
) where
import Control.Applicative
import Data.List (intercalate)
import Data.Map (Map)
import qualified Text.JSON
import Text.JSON (readJSON, JSObject, JSON, JSValue(..), fromJSObject)
import qualified Ganeti.Constants as C
import qualified Ganeti.Hs2Py.OpDoc as OpDoc
import Ganeti.JSON (DictObject(..), readJSONfromDict, showJSONtoDict)
import Ganeti.OpParams
import Ganeti.PyValue ()
import Ganeti.Query.Language (queryTypeOpToRaw)
import Ganeti.THH
import Ganeti.Types
instance PyValue DiskIndex where
showValue = showValue . unDiskIndex
instance PyValue IDiskParams where
showValue _ = error "OpCodes.showValue(IDiskParams): unhandled case"
instance PyValue RecreateDisksInfo where
showValue RecreateDisksAll = "[]"
showValue (RecreateDisksIndices is) = showValue is
showValue (RecreateDisksParams is) = showValue is
instance PyValue a => PyValue (SetParamsMods a) where
showValue SetParamsEmpty = "[]"
showValue _ = error "OpCodes.showValue(SetParamsMods): unhandled case"
instance PyValue a => PyValue (NonNegative a) where
showValue = showValue . fromNonNegative
instance PyValue a => PyValue (NonEmpty a) where
showValue = showValue . fromNonEmpty
-- FIXME: should use the 'toRaw' function instead of being harcoded or
-- perhaps use something similar to the NonNegative type instead of
-- using the declareSADT
instance PyValue ExportMode where
showValue ExportModeLocal = show C.exportModeLocal
showValue ExportModeRemote = show C.exportModeLocal
instance PyValue CVErrorCode where
showValue = cVErrorCodeToRaw
instance PyValue VerifyOptionalChecks where
showValue = verifyOptionalChecksToRaw
instance PyValue INicParams where
showValue = error "instance PyValue INicParams: not implemented"
instance PyValue a => PyValue (JSObject a) where
showValue obj =
"{" ++ intercalate ", " (map showPair (fromJSObject obj)) ++ "}"
where showPair (k, v) = show k ++ ":" ++ showValue v
instance PyValue JSValue where
showValue (JSObject obj) = showValue obj
showValue x = show x
type JobIdListOnly = Map String [(Bool, Either String JobId)]
type InstanceMultiAllocResponse =
([(Bool, Either String JobId)], NonEmptyString)
type QueryFieldDef =
(NonEmptyString, NonEmptyString, TagKind, NonEmptyString)
type QueryResponse =
([QueryFieldDef], [[(QueryResultCode, JSValue)]])
type QueryFieldsResponse = [QueryFieldDef]
-- | OpCode representation.
--
-- We only implement a subset of Ganeti opcodes: those which are actually used
-- in the htools codebase.
$(genOpCode "OpCode"
[ ("OpClusterPostInit",
[t| Bool |],
OpDoc.opClusterPostInit,
[],
[])
, ("OpClusterDestroy",
[t| NonEmptyString |],
OpDoc.opClusterDestroy,
[],
[])
, ("OpClusterQuery",
[t| JSObject JSValue |],
OpDoc.opClusterQuery,
[],
[])
, ("OpClusterVerify",
[t| JobIdListOnly |],
OpDoc.opClusterVerify,
[ pDebugSimulateErrors
, pErrorCodes
, pSkipChecks
, pIgnoreErrors
, pVerbose
, pOptGroupName
, pVerifyClutter
],
[])
, ("OpClusterVerifyConfig",
[t| Bool |],
OpDoc.opClusterVerifyConfig,
[ pDebugSimulateErrors
, pErrorCodes
, pIgnoreErrors
, pVerbose
],
[])
, ("OpClusterVerifyGroup",
[t| Bool |],
OpDoc.opClusterVerifyGroup,
[ pGroupName
, pDebugSimulateErrors
, pErrorCodes
, pSkipChecks
, pIgnoreErrors
, pVerbose
, pVerifyClutter
],
"group_name")
, ("OpClusterVerifyDisks",
[t| JobIdListOnly |],
OpDoc.opClusterVerifyDisks,
[],
[])
, ("OpGroupVerifyDisks",
[t| (Map String String, [String], Map String [[String]]) |],
OpDoc.opGroupVerifyDisks,
[ pGroupName
],
"group_name")
, ("OpClusterRepairDiskSizes",
[t| [(NonEmptyString, NonNegative Int, NonEmptyString, NonNegative Int)]|],
OpDoc.opClusterRepairDiskSizes,
[ pInstances
],
[])
, ("OpClusterConfigQuery",
[t| [JSValue] |],
OpDoc.opClusterConfigQuery,
[ pOutputFields
],
[])
, ("OpClusterRename",
[t| NonEmptyString |],
OpDoc.opClusterRename,
[ pName
],
"name")
, ("OpClusterSetParams",
[t| Either () JobIdListOnly |],
OpDoc.opClusterSetParams,
[ pForce
, pHvState
, pDiskState
, pVgName
, pEnabledHypervisors
, pClusterHvParams
, pClusterBeParams
, pOsHvp
, pClusterOsParams
, pClusterOsParamsPrivate
, pGroupDiskParams
, pCandidatePoolSize
, pMaxRunningJobs
, pMaxTrackedJobs
, pUidPool
, pAddUids
, pRemoveUids
, pMaintainNodeHealth
, pPreallocWipeDisks
, pNicParams
, withDoc "Cluster-wide node parameter defaults" pNdParams
, withDoc "Cluster-wide ipolicy specs" pIpolicy
, pDrbdHelper
, pDefaultIAllocator
, pDefaultIAllocatorParams
, pNetworkMacPrefix
, pMasterNetdev
, pMasterNetmask
, pReservedLvs
, pHiddenOs
, pBlacklistedOs
, pUseExternalMipScript
, pEnabledDiskTemplates
, pModifyEtcHosts
, pClusterFileStorageDir
, pClusterSharedFileStorageDir
, pClusterGlusterStorageDir
, pInstallImage
, pInstanceCommunicationNetwork
, pZeroingImage
, pCompressionTools
, pEnabledUserShutdown
, pEnabledDataCollectors
, pDataCollectorInterval
],
[])
, ("OpClusterRedistConf",
[t| () |],
OpDoc.opClusterRedistConf,
[],
[])
, ("OpClusterActivateMasterIp",
[t| () |],
OpDoc.opClusterActivateMasterIp,
[],
[])
, ("OpClusterDeactivateMasterIp",
[t| () |],
OpDoc.opClusterDeactivateMasterIp,
[],
[])
, ("OpClusterRenewCrypto",
[t| () |],
OpDoc.opClusterRenewCrypto,
[ pNodeSslCerts
, pSshKeys
],
[])
, ("OpQuery",
[t| QueryResponse |],
OpDoc.opQuery,
[ pQueryWhat
, pUseLocking
, pQueryFields
, pQueryFilter
],
"what")
, ("OpQueryFields",
[t| QueryFieldsResponse |],
OpDoc.opQueryFields,
[ pQueryWhat
, pQueryFieldsFields
],
"what")
, ("OpOobCommand",
[t| [[(QueryResultCode, JSValue)]] |],
OpDoc.opOobCommand,
[ pNodeNames
, withDoc "List of node UUIDs to run the OOB command against" pNodeUuids
, pOobCommand
, pOobTimeout
, pIgnoreStatus
, pPowerDelay
],
[])
, ("OpRestrictedCommand",
[t| [(Bool, String)] |],
OpDoc.opRestrictedCommand,
[ pUseLocking
, withDoc
"Nodes on which the command should be run (at least one)"
pRequiredNodes
, withDoc
"Node UUIDs on which the command should be run (at least one)"
pRequiredNodeUuids
, pRestrictedCommand
],
[])
, ("OpNodeRemove",
[t| () |],
OpDoc.opNodeRemove,
[ pNodeName
, pNodeUuid
],
"node_name")
, ("OpNodeAdd",
[t| () |],
OpDoc.opNodeAdd,
[ pNodeName
, pHvState
, pDiskState
, pPrimaryIp
, pSecondaryIp
, pReadd
, pNodeGroup
, pMasterCapable
, pVmCapable
, pNdParams
, pNodeSetup
],
"node_name")
, ("OpNodeQueryvols",
[t| [JSValue] |],
OpDoc.opNodeQueryvols,
[ pOutputFields
, withDoc "Empty list to query all nodes, node names otherwise" pNodes
],
[])
, ("OpNodeQueryStorage",
[t| [[JSValue]] |],
OpDoc.opNodeQueryStorage,
[ pOutputFields
, pOptStorageType
, withDoc
"Empty list to query all, list of names to query otherwise"
pNodes
, pStorageName
],
[])
, ("OpNodeModifyStorage",
[t| () |],
OpDoc.opNodeModifyStorage,
[ pNodeName
, pNodeUuid
, pStorageType
, pStorageName
, pStorageChanges
],
"node_name")
, ("OpRepairNodeStorage",
[t| () |],
OpDoc.opRepairNodeStorage,
[ pNodeName
, pNodeUuid
, pStorageType
, pStorageName
, pIgnoreConsistency
],
"node_name")
, ("OpNodeSetParams",
[t| [(NonEmptyString, JSValue)] |],
OpDoc.opNodeSetParams,
[ pNodeName
, pNodeUuid
, pForce
, pHvState
, pDiskState
, pMasterCandidate
, withDoc "Whether to mark the node offline" pOffline
, pDrained
, pAutoPromote
, pMasterCapable
, pVmCapable
, pSecondaryIp
, pNdParams
, pPowered
],
"node_name")
, ("OpNodePowercycle",
[t| Maybe NonEmptyString |],
OpDoc.opNodePowercycle,
[ pNodeName
, pNodeUuid
, pForce
],
"node_name")
, ("OpNodeMigrate",
[t| JobIdListOnly |],
OpDoc.opNodeMigrate,
[ pNodeName
, pNodeUuid
, pMigrationMode
, pMigrationLive
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pAllowRuntimeChgs
, pIgnoreIpolicy
, pIallocator
],
"node_name")
, ("OpNodeEvacuate",
[t| JobIdListOnly |],
OpDoc.opNodeEvacuate,
[ pEarlyRelease
, pNodeName
, pNodeUuid
, pRemoteNode
, pRemoteNodeUuid
, pIallocator
, pEvacMode
],
"node_name")
, ("OpInstanceCreate",
[t| [NonEmptyString] |],
OpDoc.opInstanceCreate,
[ pInstanceName
, pForceVariant
, pWaitForSync
, pNameCheck
, pIgnoreIpolicy
, pOpportunisticLocking
, pInstBeParams
, pInstDisks
, pOptDiskTemplate
, pOptGroupName
, pFileDriver
, pFileStorageDir
, pInstHvParams
, pHypervisor
, pIallocator
, pResetDefaults
, pIpCheck
, pIpConflictsCheck
, pInstCreateMode
, pInstNics
, pNoInstall
, pInstOsParams
, pInstOsParamsPrivate
, pInstOsParamsSecret
, pInstOs
, pPrimaryNode
, pPrimaryNodeUuid
, pSecondaryNode
, pSecondaryNodeUuid
, pSourceHandshake
, pSourceInstance
, pSourceShutdownTimeout
, pSourceX509Ca
, pSrcNode
, pSrcNodeUuid
, pSrcPath
, pBackupCompress
, pStartInstance
, pForthcoming
, pCommit
, pInstTags
, pInstanceCommunication
, pHelperStartupTimeout
, pHelperShutdownTimeout
],
"instance_name")
, ("OpInstanceMultiAlloc",
[t| InstanceMultiAllocResponse |],
OpDoc.opInstanceMultiAlloc,
[ pOpportunisticLocking
, pIallocator
, pMultiAllocInstances
],
[])
, ("OpInstanceReinstall",
[t| () |],
OpDoc.opInstanceReinstall,
[ pInstanceName
, pInstanceUuid
, pForceVariant
, pInstOs
, pTempOsParams
, pTempOsParamsPrivate
, pTempOsParamsSecret
],
"instance_name")
, ("OpInstanceRemove",
[t| () |],
OpDoc.opInstanceRemove,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreFailures
],
"instance_name")
, ("OpInstanceRename",
[t| NonEmptyString |],
OpDoc.opInstanceRename,
[ pInstanceName
, pInstanceUuid
, withDoc "New instance name" pNewName
, pNameCheck
, pIpCheck
],
[])
, ("OpInstanceStartup",
[t| () |],
OpDoc.opInstanceStartup,
[ pInstanceName
, pInstanceUuid
, pForce
, pIgnoreOfflineNodes
, pTempHvParams
, pTempBeParams
, pNoRemember
, pStartupPaused
-- timeout to cleanup a user down instance
, pShutdownTimeout
],
"instance_name")
, ("OpInstanceShutdown",
[t| () |],
OpDoc.opInstanceShutdown,
[ pInstanceName
, pInstanceUuid
, pForce
, pIgnoreOfflineNodes
, pShutdownTimeout'
, pNoRemember
, pAdminStateSource
],
"instance_name")
, ("OpInstanceReboot",
[t| () |],
OpDoc.opInstanceReboot,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreSecondaries
, pRebootType
],
"instance_name")
, ("OpInstanceReplaceDisks",
[t| () |],
OpDoc.opInstanceReplaceDisks,
[ pInstanceName
, pInstanceUuid
, pEarlyRelease
, pIgnoreIpolicy
, pReplaceDisksMode
, pReplaceDisksList
, pRemoteNode
, pRemoteNodeUuid
, pIallocator
],
"instance_name")
, ("OpInstanceFailover",
[t| () |],
OpDoc.opInstanceFailover,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreConsistency
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pIgnoreIpolicy
, pMigrationCleanup
, pIallocator
],
"instance_name")
, ("OpInstanceMigrate",
[t| () |],
OpDoc.opInstanceMigrate,
[ pInstanceName
, pInstanceUuid
, pMigrationMode
, pMigrationLive
, pMigrationTargetNode
, pMigrationTargetNodeUuid
, pAllowRuntimeChgs
, pIgnoreIpolicy
, pMigrationCleanup
, pIallocator
, pAllowFailover
, pIgnoreHVVersions
],
"instance_name")
, ("OpInstanceMove",
[t| () |],
OpDoc.opInstanceMove,
[ pInstanceName
, pInstanceUuid
, pShutdownTimeout
, pIgnoreIpolicy
, pMoveTargetNode
, pMoveTargetNodeUuid
, pMoveCompress
, pIgnoreConsistency
],
"instance_name")
, ("OpInstanceConsole",
[t| JSObject JSValue |],
OpDoc.opInstanceConsole,
[ pInstanceName
, pInstanceUuid
],
"instance_name")
, ("OpInstanceActivateDisks",
[t| [(NonEmptyString, NonEmptyString, NonEmptyString)] |],
OpDoc.opInstanceActivateDisks,
[ pInstanceName
, pInstanceUuid
, pIgnoreDiskSize
, pWaitForSyncFalse
],
"instance_name")
, ("OpInstanceDeactivateDisks",
[t| () |],
OpDoc.opInstanceDeactivateDisks,
[ pInstanceName
, pInstanceUuid
, pForce
],
"instance_name")
, ("OpInstanceRecreateDisks",
[t| () |],
OpDoc.opInstanceRecreateDisks,
[ pInstanceName
, pInstanceUuid
, pRecreateDisksInfo
, withDoc "New instance nodes, if relocation is desired" pNodes
, withDoc "New instance node UUIDs, if relocation is desired" pNodeUuids
, pIallocator
],
"instance_name")
, ("OpInstanceQueryData",
[t| JSObject (JSObject JSValue) |],
OpDoc.opInstanceQueryData,
[ pUseLocking
, pInstances
, pStatic
],
[])
, ("OpInstanceSetParams",
[t| [(NonEmptyString, JSValue)] |],
OpDoc.opInstanceSetParams,
[ pInstanceName
, pInstanceUuid
, pForce
, pForceVariant
, pIgnoreIpolicy
, pInstParamsNicChanges
, pInstParamsDiskChanges
, pInstBeParams
, pRuntimeMem
, pInstHvParams
, pOptDiskTemplate
, pExtParams
, pFileDriver
, pFileStorageDir
, pPrimaryNode
, pPrimaryNodeUuid
, withDoc "Secondary node (used when changing disk template)" pRemoteNode
, withDoc
"Secondary node UUID (used when changing disk template)"
pRemoteNodeUuid
, pOsNameChange
, pInstOsParams
, pInstOsParamsPrivate
, pWaitForSync
, withDoc "Whether to mark the instance as offline" pOffline
, pIpConflictsCheck
, pHotplug
, pHotplugIfPossible
, pOptInstanceCommunication
],
"instance_name")
, ("OpInstanceGrowDisk",
[t| () |],
OpDoc.opInstanceGrowDisk,
[ pInstanceName
, pInstanceUuid
, pWaitForSync
, pDiskIndex
, pDiskChgAmount
, pDiskChgAbsolute
, pIgnoreIpolicy
],
"instance_name")
, ("OpInstanceChangeGroup",
[t| JobIdListOnly |],
OpDoc.opInstanceChangeGroup,
[ pInstanceName
, pInstanceUuid
, pEarlyRelease
, pIallocator
, pTargetGroups
],
"instance_name")
, ("OpGroupAdd",
[t| Either () JobIdListOnly |],
OpDoc.opGroupAdd,
[ pGroupName
, pNodeGroupAllocPolicy
, pGroupNodeParams
, pGroupDiskParams
, pHvState
, pDiskState
, withDoc "Group-wide ipolicy specs" pIpolicy
],
"group_name")
, ("OpGroupAssignNodes",
[t| () |],
OpDoc.opGroupAssignNodes,
[ pGroupName
, pForce
, withDoc "List of nodes to assign" pRequiredNodes
, withDoc "List of node UUIDs to assign" pRequiredNodeUuids
],
"group_name")
, ("OpGroupSetParams",
[t| [(NonEmptyString, JSValue)] |],
OpDoc.opGroupSetParams,
[ pGroupName
, pNodeGroupAllocPolicy
, pGroupNodeParams
, pGroupDiskParams
, pHvState
, pDiskState
, withDoc "Group-wide ipolicy specs" pIpolicy
],
"group_name")
, ("OpGroupRemove",
[t| () |],
OpDoc.opGroupRemove,
[ pGroupName
],
"group_name")
, ("OpGroupRename",
[t| NonEmptyString |],
OpDoc.opGroupRename,
[ pGroupName
, withDoc "New group name" pNewName
],
[])
, ("OpGroupEvacuate",
[t| JobIdListOnly |],
OpDoc.opGroupEvacuate,
[ pGroupName
, pEarlyRelease
, pIallocator
, pTargetGroups
, pSequential
, pForceFailover
],
"group_name")
, ("OpOsDiagnose",
[t| [[JSValue]] |],
OpDoc.opOsDiagnose,
[ pOutputFields
, withDoc "Which operating systems to diagnose" pNames
],
[])
, ("OpExtStorageDiagnose",
[t| [[JSValue]] |],
OpDoc.opExtStorageDiagnose,
[ pOutputFields
, withDoc "Which ExtStorage Provider to diagnose" pNames
],
[])
, ("OpBackupPrepare",
[t| Maybe (JSObject JSValue) |],
OpDoc.opBackupPrepare,
[ pInstanceName
, pInstanceUuid
, pExportMode
],
"instance_name")
, ("OpBackupExport",
[t| (Bool, [Bool]) |],
OpDoc.opBackupExport,
[ pInstanceName
, pInstanceUuid
, pBackupCompress
, pShutdownTimeout
, pExportTargetNode
, pExportTargetNodeUuid
, pShutdownInstance
, pRemoveInstance
, pIgnoreRemoveFailures
, defaultField [| ExportModeLocal |] pExportMode
, pX509KeyName
, pX509DestCA
, pZeroFreeSpace
, pZeroingTimeoutFixed
, pZeroingTimeoutPerMiB
],
"instance_name")
, ("OpBackupRemove",
[t| () |],
OpDoc.opBackupRemove,
[ pInstanceName
, pInstanceUuid
],
"instance_name")
, ("OpTagsGet",
[t| [NonEmptyString] |],
OpDoc.opTagsGet,
[ pTagsObject
, pUseLocking
, withDoc "Name of object to retrieve tags from" pTagsName
],
"name")
, ("OpTagsSearch",
[t| [(NonEmptyString, NonEmptyString)] |],
OpDoc.opTagsSearch,
[ pTagSearchPattern
],
"pattern")
, ("OpTagsSet",
[t| () |],
OpDoc.opTagsSet,
[ pTagsObject
, pTagsList
, withDoc "Name of object where tag(s) should be added" pTagsName
],
[])
, ("OpTagsDel",
[t| () |],
OpDoc.opTagsDel,
[ pTagsObject
, pTagsList
, withDoc "Name of object where tag(s) should be deleted" pTagsName
],
[])
, ("OpTestDelay",
[t| () |],
OpDoc.opTestDelay,
[ pDelayDuration
, pDelayOnMaster
, pDelayOnNodes
, pDelayOnNodeUuids
, pDelayRepeat
, pDelayInterruptible
, pDelayNoLocks
],
"duration")
, ("OpTestAllocator",
[t| String |],
OpDoc.opTestAllocator,
[ pIAllocatorDirection
, pIAllocatorMode
, pIAllocatorReqName
, pIAllocatorNics
, pIAllocatorDisks
, pHypervisor
, pIallocator
, pInstTags
, pIAllocatorMemory
, pIAllocatorVCpus
, pIAllocatorOs
, pDiskTemplate
, pIAllocatorInstances
, pIAllocatorEvacMode
, pTargetGroups
, pIAllocatorSpindleUse
, pIAllocatorCount
, pOptGroupName
],
"iallocator")
, ("OpTestJqueue",
[t| Bool |],
OpDoc.opTestJqueue,
[ pJQueueNotifyWaitLock
, pJQueueNotifyExec
, pJQueueLogMessages
, pJQueueFail
],
[])
, ("OpTestDummy",
[t| () |],
OpDoc.opTestDummy,
[ pTestDummyResult
, pTestDummyMessages
, pTestDummyFail
, pTestDummySubmitJobs
],
[])
, ("OpNetworkAdd",
[t| () |],
OpDoc.opNetworkAdd,
[ pNetworkName
, pNetworkAddress4
, pNetworkGateway4
, pNetworkAddress6
, pNetworkGateway6
, pNetworkMacPrefix
, pNetworkAddRsvdIps
, pIpConflictsCheck
, withDoc "Network tags" pInstTags
],
"network_name")
, ("OpNetworkRemove",
[t| () |],
OpDoc.opNetworkRemove,
[ pNetworkName
, pForce
],
"network_name")
, ("OpNetworkSetParams",
[t| () |],
OpDoc.opNetworkSetParams,
[ pNetworkName
, pNetworkGateway4
, pNetworkAddress6
, pNetworkGateway6
, pNetworkMacPrefix
, withDoc "Which external IP addresses to reserve" pNetworkAddRsvdIps
, pNetworkRemoveRsvdIps
],
"network_name")
, ("OpNetworkConnect",
[t| () |],
OpDoc.opNetworkConnect,
[ pGroupName
, pNetworkName
, pNetworkMode
, pNetworkLink
, pNetworkVlan
, pIpConflictsCheck
],
"network_name")
, ("OpNetworkDisconnect",
[t| () |],
OpDoc.opNetworkDisconnect,
[ pGroupName
, pNetworkName
],
"network_name")
])
deriving instance Ord OpCode
-- | Returns the OP_ID for a given opcode value.
$(genOpID ''OpCode "opID")
-- | A list of all defined/supported opcode IDs.
$(genAllOpIDs ''OpCode "allOpIDs")
-- | Convert the opcode name to lowercase with underscores and strip
-- the @Op@ prefix.
$(genOpLowerStrip (C.opcodeReasonSrcOpcode ++ ":") ''OpCode "opReasonSrcID")
instance JSON OpCode where
readJSON = readJSONfromDict
showJSON = showJSONtoDict
-- | Generates the summary value for an opcode.
opSummaryVal :: OpCode -> Maybe String
opSummaryVal OpClusterVerifyGroup { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupVerifyDisks { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpClusterRename { opName = s } = Just (fromNonEmpty s)
opSummaryVal OpQuery { opWhat = s } = Just (queryTypeOpToRaw s)
opSummaryVal OpQueryFields { opWhat = s } = Just (queryTypeOpToRaw s)
opSummaryVal OpNodeRemove { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeAdd { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeModifyStorage { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpRepairNodeStorage { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeSetParams { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodePowercycle { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeMigrate { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpNodeEvacuate { opNodeName = s } = Just (fromNonEmpty s)
opSummaryVal OpInstanceCreate { opInstanceName = s } = Just s
opSummaryVal OpInstanceReinstall { opInstanceName = s } = Just s
opSummaryVal OpInstanceRemove { opInstanceName = s } = Just s
-- FIXME: instance rename should show both names; currently it shows none
-- opSummaryVal OpInstanceRename { opInstanceName = s } = Just s
opSummaryVal OpInstanceStartup { opInstanceName = s } = Just s
opSummaryVal OpInstanceShutdown { opInstanceName = s } = Just s
opSummaryVal OpInstanceReboot { opInstanceName = s } = Just s
opSummaryVal OpInstanceReplaceDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceFailover { opInstanceName = s } = Just s
opSummaryVal OpInstanceMigrate { opInstanceName = s } = Just s
opSummaryVal OpInstanceMove { opInstanceName = s } = Just s
opSummaryVal OpInstanceConsole { opInstanceName = s } = Just s
opSummaryVal OpInstanceActivateDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceDeactivateDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceRecreateDisks { opInstanceName = s } = Just s
opSummaryVal OpInstanceSetParams { opInstanceName = s } = Just s
opSummaryVal OpInstanceGrowDisk { opInstanceName = s } = Just s
opSummaryVal OpInstanceChangeGroup { opInstanceName = s } = Just s
opSummaryVal OpGroupAdd { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupAssignNodes { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupSetParams { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupRemove { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpGroupEvacuate { opGroupName = s } = Just (fromNonEmpty s)
opSummaryVal OpBackupPrepare { opInstanceName = s } = Just s
opSummaryVal OpBackupExport { opInstanceName = s } = Just s
opSummaryVal OpBackupRemove { opInstanceName = s } = Just s
opSummaryVal OpTagsGet { opKind = s } = Just (show s)
opSummaryVal OpTagsSearch { opTagSearchPattern = s } = Just (fromNonEmpty s)
opSummaryVal OpTestDelay { opDelayDuration = d } = Just (show d)
opSummaryVal OpTestAllocator { opIallocator = s } =
-- FIXME: Python doesn't handle None fields well, so we have behave the same
Just $ maybe "None" fromNonEmpty s
opSummaryVal OpNetworkAdd { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkRemove { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkSetParams { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkConnect { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal OpNetworkDisconnect { opNetworkName = s} = Just (fromNonEmpty s)
opSummaryVal _ = Nothing
-- | Computes the summary of the opcode.
opSummary :: OpCode -> String
opSummary op =
case opSummaryVal op of
Nothing -> op_suffix
Just s -> op_suffix ++ "(" ++ s ++ ")"
where op_suffix = drop 3 $ opID op
-- | Generic\/common opcode parameters.
$(buildObject "CommonOpParams" "op"
[ pDryRun
, pDebugLevel
, pOpPriority
, pDependencies
, pComment
, pReason
])
deriving instance Ord CommonOpParams
-- | Default common parameter values.
defOpParams :: CommonOpParams
defOpParams =
CommonOpParams { opDryRun = Nothing
, opDebugLevel = Nothing
, opPriority = OpPrioNormal
, opDepends = Nothing
, opComment = Nothing
, opReason = []
}
-- | Resolve relative dependencies to absolute ones, given the job ID.
resolveDependsCommon :: (Monad m) => CommonOpParams -> JobId -> m CommonOpParams
resolveDependsCommon p@(CommonOpParams { opDepends = Just deps}) jid = do
deps' <- mapM (`absoluteJobDependency` jid) deps
return p { opDepends = Just deps' }
resolveDependsCommon p _ = return p
-- | The top-level opcode type.
data MetaOpCode = MetaOpCode { metaParams :: CommonOpParams
, metaOpCode :: OpCode
} deriving (Show, Eq, Ord)
-- | Resolve relative dependencies to absolute ones, given the job Id.
resolveDependencies :: (Monad m) => MetaOpCode -> JobId -> m MetaOpCode
resolveDependencies mopc jid = do
mpar <- resolveDependsCommon (metaParams mopc) jid
return (mopc { metaParams = mpar })
instance DictObject MetaOpCode where
toDict (MetaOpCode meta op) = toDict meta ++ toDict op
fromDictWKeys dict = MetaOpCode <$> fromDictWKeys dict
<*> fromDictWKeys dict
instance JSON MetaOpCode where
readJSON = readJSONfromDict
showJSON = showJSONtoDict
-- | Wraps an 'OpCode' with the default parameters to build a
-- 'MetaOpCode'.
wrapOpCode :: OpCode -> MetaOpCode
wrapOpCode = MetaOpCode defOpParams
-- | Sets the comment on a meta opcode.
setOpComment :: String -> MetaOpCode -> MetaOpCode
setOpComment comment (MetaOpCode common op) =
MetaOpCode (common { opComment = Just comment}) op
-- | Sets the priority on a meta opcode.
setOpPriority :: OpSubmitPriority -> MetaOpCode -> MetaOpCode
setOpPriority prio (MetaOpCode common op) =
MetaOpCode (common { opPriority = prio }) op
| apyrgio/ganeti | src/Ganeti/OpCodes.hs | bsd-2-clause | 29,467 | 0 | 12 | 7,556 | 5,810 | 3,555 | 2,255 | 936 | 2 |
{-# LANGUAGE BangPatterns, TypeSynonymInstances, FlexibleInstances #-}
module Text.Highlighter.Types where
import Data.Char (toLower)
import Text.Regex.PCRE.Light
import qualified Data.ByteString as BS
data Match
= Match
{ mRegexp :: String
, mType :: TokenType
, mNextState :: NextState
}
| AnyOf TokenMatcher
deriving Show
type TokenMatcher = [Match]
data Lexer =
Lexer
{ lName :: String
, lAliases :: [String]
, lExtensions :: [String]
, lMimetypes :: [String]
, lStart :: TokenMatcher
, lFlags :: [PCREOption]
}
data NextState
= Continue
| GoTo TokenMatcher
| CapturesTo Callback
| Pop
| PopNum Int
| Push
| DoAll [NextState]
| Combined [TokenMatcher]
deriving Show
type Callback = [String] -> TokenMatcher
data Token =
Token
{ tType :: TokenType
, tText :: BS.ByteString
}
deriving (Show, Eq)
data TokenType
= Text
| Whitespace
| Error
| Other
| Keyword
| Name
| Literal
| String
| Number
| Operator
| Punctuation
| Comment
| Generic
| TokenType :. TokenType
-- Subtypes
-- Keyword
| Declaration
| Reserved
| Type
-- Keyword, Name.Builtin
| Pseudo
-- Keyword, Name
| Namespace
-- Nane, Name.Variable
| Class
-- Keyword, Name
| Constant
-- Name
| Attribute
| Builtin
| Decorator
| Entity
| Exception
| Function
| Identifier
| Label
| Property
| Tag
| Variable
| Global
| Instance
| Anonymous
-- Literal
| Date
| Scalar
| Plain
-- String
| Atom
| Backtick
| Char
| Doc
| Double
| Escape
| Heredoc
| Interp
| Interpol
| Regex
| Regexp
| Symbol
-- String, Comment
| Single
-- Number
| Bin
| Binary
| Decimal
| Float
| Hex
| Hexadecimal
| Int
| Integer
| Long
| Oct
| Octal
-- Operator
| Word
-- Comment
| Multiline
| Preproc
| Special
-- Generic
| Deleted
| Emph
| Heading
| Inserted
| Output
| Prompt
| Strong
| Subheading
| Traceback
| ByGroups [TokenType]
| Indicator
-- Some arbitrary token name
| Arbitrary String
-- Use another lexer to yield some tokens
| Using Lexer
deriving (Show, Eq)
instance Show Lexer where
show l = "(Lexer " ++ lName l ++ ")"
instance Show Callback where
show _ = "Callback"
tok :: String -> TokenType -> Match
tok s tt = Match s tt Continue
tokNext :: String -> TokenType -> NextState -> Match
tokNext s = Match s
anyOf :: TokenMatcher -> Match
anyOf ms = AnyOf ms
shortName :: TokenType -> String
shortName ((_ :. a) :. b) =
shortName a ++ shortName b
shortName (Name :. Constant) = "no"
shortName (Name :. Entity) = "ni"
shortName (Name :. Property) = "py"
shortName (Arbitrary "Name" :. Arbitrary "Constant") = "no"
shortName (Arbitrary "Name" :. Arbitrary "Entity") = "ni"
shortName (Arbitrary "Name" :. Arbitrary "Property") = "py"
shortName (Literal :. b) =
shortName b
shortName (Arbitrary "Literal" :. b) =
shortName b
shortName (a :. Other) =
shortName a ++ "x"
shortName (a :. Error) =
shortName a ++ "r"
shortName (a :. Single) =
shortName a ++ "1"
shortName (a :. Double) =
shortName a ++ "2"
shortName (a :. Arbitrary "Other") =
shortName a ++ "x"
shortName (a :. Arbitrary "Error") =
shortName a ++ "r"
shortName (a :. Arbitrary "Single") =
shortName a ++ "1"
shortName (a :. Arbitrary "Double") =
shortName a ++ "2"
shortName (a :. b) =
shortName a ++ shortName b
shortName Error = "err"
shortName (Arbitrary "Error") = "err"
shortName Number = "m"
shortName (Arbitrary "Number") = "m"
shortName (Arbitrary t) =
[toLower (head t)]
shortName x = [toLower (head (show x))]
| chemist/highlighter | src/Text/Highlighter/Types.hs | bsd-3-clause | 3,966 | 0 | 10 | 1,234 | 1,127 | 640 | 487 | 166 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : Andrew Martin
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Andrew Martin <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-----------------------------------------------------------------------------
module Data.Vector.Vinyl.Default.NonEmpty.Tagged
( Vector, MVector
-- * Accessors
-- ** Length information
, length, null
-- ** Indexing
, (!), (!?), head, last
, unsafeIndex, unsafeHead, unsafeLast
-- ** Monadic indexing
, indexM, headM, lastM
, unsafeIndexM, unsafeHeadM, unsafeLastM
-- ** Extracting subvectors (slicing)
, slice, init, tail, take, drop, splitAt
, unsafeSlice, unsafeInit, unsafeTail, unsafeTake, unsafeDrop
-- * Construction
-- ** Initialisation
, empty, singleton, replicate, generate, iterateN
-- ** Monadic initialisation
, replicateM, generateM, create
-- ** Unfolding
, unfoldr, unfoldrN
, constructN, constructrN
-- -- ** Enumeration
-- , enumFromN, enumFromStepN, enumFromTo, enumFromThenTo
-- ** Concatenation
, cons, snoc, (++), concat
-- ** Restricting memory usage
, force
-- * Modifying vectors
-- ** Bulk updates
, (//)
, unsafeUpd
-- , update_, unsafeUpdate_
-- ** Accumulations
, accum, unsafeAccum
-- , accumulate_, unsafeAccumulate_
-- ** Permutations
, reverse
-- , backpermute, unsafeBackpermute
-- ** Safe destructive updates
, modify
-- * Elementwise operations
-- ** Mapping
, map, imap, concatMap
-- ** Monadic mapping
, mapM, mapM_, forM, forM_
-- ** Zipping - Omitted due to me being lazy
-- , zipWith, zipWith3, zipWith4, zipWith5, zipWith6
-- , izipWith, izipWith3, izipWith4, izipWith5, izipWith6
-- ** Monadic zipping
, zipWithM, zipWithM_
-- * Working with predicates
-- ** Filtering
, filter, ifilter, filterM
, takeWhile, dropWhile
-- ** Partitioning
, partition, unstablePartition, span, break
-- ** Searching
, elem, notElem, find, findIndex
, elemIndex
-- , findIndices, elemIndices
-- * Folding
, foldl, foldl1, foldl', foldl1', foldr, foldr1, foldr', foldr1'
, ifoldl, ifoldl', ifoldr, ifoldr'
-- ** Specialised folds
, all, any
-- , sum, product
, maximum, maximumBy, minimum, minimumBy
, minIndex, minIndexBy, maxIndex, maxIndexBy
-- ** Monadic folds
, foldM, foldM', fold1M, fold1M'
, foldM_, foldM'_, fold1M_, fold1M'_
-- * Prefix sums (scans)
, prescanl, prescanl'
, postscanl, postscanl'
, scanl, scanl', scanl1, scanl1'
, prescanr, prescanr'
, postscanr, postscanr'
, scanr, scanr', scanr1, scanr1'
-- ** Lists
, toList, fromList, fromListN
-- ** Other vector types
, G.convert
-- ** Record Conversion
, toRec
, fromRec
-- ** Mutable vectors
, freeze, thaw, copy, unsafeFreeze, unsafeThaw, unsafeCopy
) where
import Control.Monad.Primitive
import Control.Monad.ST
import Data.Proxy
import Data.Tagged.Functor (TaggedFunctor (..))
import Data.Tuple.TypeLevel (Snd)
import qualified Data.Vector.Generic as G
import Data.Vector.Vinyl.Default.NonEmpty.Tagged.Internal
import Data.Vector.Vinyl.Default.Types (VectorVal)
import Data.Vinyl.Core
import Data.Vinyl.Functor (Identity (..))
import Prelude hiding (all,
any, break,
concat,
concatMap,
drop,
dropWhile,
elem, enumFromThenTo,
enumFromTo,
filter,
foldl,
foldl1,
foldr,
foldr1,
head, init,
last,
length,
map, mapM,
mapM_,
maximum,
minimum,
notElem,
null,
product,
replicate,
reverse,
scanl,
scanl1,
scanr,
scanr1,
span,
splitAt,
sum, tail,
take,
takeWhile,
unzip,
unzip3,
zip, zip3,
zipWith,
zipWith3,
(++))
-- Length
-- ------
-- | /O(1)/ Yield the length of the vector.
length :: forall (r :: (a,*)) (k :: KProxy a) rs. G.Vector VectorVal (Snd r) => (Vector k) (Rec (TaggedFunctor Identity) (r ': rs)) -> Int
length (V (TaggedFunctor r :& _)) = G.length r
{-# INLINE length #-}
-- | /O(1)/ Test whether a vector if empty
null :: forall (r :: (a,*)) (k :: KProxy a) rs. G.Vector VectorVal (Snd r) => (Vector k) (Rec (TaggedFunctor Identity) (r ': rs)) -> Bool
null (V (TaggedFunctor r :& _)) = G.null r
{-# INLINE null #-}
-- Indexing
-- --------
-- | O(1) Indexing
(!) :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Int -> Rec (TaggedFunctor Identity) rs
(!) = (G.!)
{-# INLINE (!) #-}
-- | O(1) Safe indexing
(!?) :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Int -> Maybe (Rec (TaggedFunctor Identity) rs)
(!?) = (G.!?)
{-# INLINE (!?) #-}
-- | /O(1)/ First element
head :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
head = G.head
{-# INLINE head #-}
-- | /O(1)/ Last element
last :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
last = G.last
{-# INLINE last #-}
-- | /O(1)/ Unsafe indexing without bounds checking
unsafeIndex :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Int -> Rec (TaggedFunctor Identity) rs
{-# INLINE unsafeIndex #-}
unsafeIndex = G.unsafeIndex
-- | /O(1)/ First element without checking if the vector is empty
unsafeHead :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
{-# INLINE unsafeHead #-}
unsafeHead = G.unsafeHead
-- | /O(1)/ Last element without checking if the vector is empty
unsafeLast :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
{-# INLINE unsafeLast #-}
unsafeLast = G.unsafeLast
-- Monadic indexing
-- ----------------
-- | /O(1)/ Indexing in a monad.
--
-- The monad allows operations to be strict in the vector when necessary.
-- Suppose vector copying is implemented like this:
--
-- > copy mv v = ... write mv i (v ! i) ...
--
-- For lazy vectors, @v ! i@ would not be evaluated which means that @mv@
-- would unnecessarily retain a reference to @v@ in each element written.
--
-- With 'indexM', copying can be implemented like this instead:
--
-- > copy mv v = ... do
-- > x <- indexM v i
-- > write mv i x
--
-- Here, no references to @v@ are retained because indexing (but /not/ the
-- elements) is evaluated eagerly.
--
indexM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Int -> m (Rec (TaggedFunctor Identity) rs)
indexM = G.indexM
{-# INLINE indexM #-}
-- | /O(1)/ First element of a vector in a monad. See 'indexM' for an
-- explanation of why this is useful.
headM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m (Rec (TaggedFunctor Identity) rs)
headM = G.headM
{-# INLINE headM #-}
-- | /O(1)/ Last element of a vector in a monad. See 'indexM' for an
-- explanation of why this is useful.
lastM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m (Rec (TaggedFunctor Identity) rs)
lastM = G.lastM
{-# INLINE lastM #-}
-- | /O(1)/ Indexing in a monad without bounds checks. See 'indexM' for an
-- explanation of why this is useful.
unsafeIndexM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Int -> m (Rec (TaggedFunctor Identity) rs)
unsafeIndexM = G.unsafeIndexM
{-# INLINE unsafeIndexM #-}
-- | /O(1)/ First element in a monad without checking for empty vectors.
-- See 'indexM' for an explanation of why this is useful.
unsafeHeadM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m (Rec (TaggedFunctor Identity) rs)
unsafeHeadM = G.unsafeHeadM
{-# INLINE unsafeHeadM #-}
-- | /O(1)/ Last element in a monad without checking for empty vectors.
-- See 'indexM' for an explanation of why this is useful.
unsafeLastM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m (Rec (TaggedFunctor Identity) rs)
unsafeLastM = G.unsafeLastM
{-# INLINE unsafeLastM #-}
-- Extracting subvectors (slicing)
-- -------------------------------
-- | /O(1)/ Yield a slice of the vector without copying it. The vector must
-- contain at least @i+n@ elements.
slice :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -- ^ @i@ starting index
-> Int -- ^ @n@ length
-> (Vector k) (Rec (TaggedFunctor Identity) rs)
-> (Vector k) (Rec (TaggedFunctor Identity) rs)
slice = G.slice
{-# INLINE slice #-}
-- | /O(1)/ Yield all but the last element without copying. The vector may not
-- be empty.
init :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
init = G.init
{-# INLINE init #-}
-- | /O(1)/ Yield all but the first element without copying. The vector may not
-- be empty.
tail :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
tail = G.tail
{-# INLINE tail #-}
-- | /O(1)/ Yield at the first @n@ elements without copying. The vector may
-- contain less than @n@ elements in which case it is returned unchanged.
take :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
take = G.take
{-# INLINE take #-}
-- | /O(1)/ Yield all but the first @n@ elements without copying. The vector may
-- contain less than @n@ elements in which case an empty vector is returned.
drop :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
drop = G.drop
{-# INLINE drop #-}
-- | /O(1)/ Yield the first @n@ elements paired with the remainder without copying.
--
-- Note that @'splitAt' n v@ is equivalent to @('take' n v, 'drop' n v)@
-- but slightly more efficient.
splitAt :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> ((Vector k) (Rec (TaggedFunctor Identity) rs), (Vector k) (Rec (TaggedFunctor Identity) rs))
splitAt = G.splitAt
{-# INLINE splitAt #-}
-- | /O(1)/ Yield a slice of the vector without copying. The vector must
-- contain at least @i+n@ elements but this is not checked.
unsafeSlice :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -- ^ @i@ starting index
-> Int -- ^ @n@ length
-> (Vector k) (Rec (TaggedFunctor Identity) rs)
-> (Vector k) (Rec (TaggedFunctor Identity) rs)
unsafeSlice = G.unsafeSlice
{-# INLINE unsafeSlice #-}
-- | /O(1)/ Yield all but the last element without copying. The vector may not
-- be empty but this is not checked.
unsafeInit :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
unsafeInit = G.unsafeInit
{-# INLINE unsafeInit #-}
-- | /O(1)/ Yield all but the first element without copying. The vector may not
-- be empty but this is not checked.
unsafeTail :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
unsafeTail = G.unsafeTail
{-# INLINE unsafeTail #-}
-- | /O(1)/ Yield the first @n@ elements without copying. The vector must
-- contain at least @n@ elements but this is not checked.
unsafeTake :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
unsafeTake = G.unsafeTake
{-# INLINE unsafeTake #-}
-- | /O(1)/ Yield all but the first @n@ elements without copying. The vector
-- must contain at least @n@ elements but this is not checked.
unsafeDrop :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
unsafeDrop = G.unsafeDrop
{-# INLINE unsafeDrop #-}
-- Initialisation
-- --------------
-- | /O(1)/ Empty vector
empty :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs)
empty = G.empty
{-# INLINE empty #-}
-- | /O(1)/ (Vector k) with exactly one element
singleton :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) rs)
singleton = G.singleton
{-# INLINE singleton #-}
-- | /O(n)/ (Vector k) of the given length with the same value in each position
replicate :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) rs)
replicate = G.replicate
{-# INLINE replicate #-}
-- | /O(n)/ Construct a vector of the given length by applying the function to
-- each index
generate :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> (Int -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
generate = G.generate
{-# INLINE generate #-}
-- | /O(n)/ Apply function n times to value. Zeroth element is original value.
iterateN :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) rs)
iterateN = G.iterateN
{-# INLINE iterateN #-}
-- Unfolding
-- ---------
-- | /O(n)/ Construct a vector by repeatedly applying the generator function
-- to a seed. The generator function yields 'Just' the next element and the
-- new seed or 'Nothing' if there are no more elements.
--
-- > unfoldr (\n -> if n == 0 then Nothing else Just (n,n-1)) 10
-- > = <10,9,8,7,6,5,4,3,2,1>
unfoldr :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (c -> Maybe (Rec (TaggedFunctor Identity) rs, c)) -> c -> (Vector k) (Rec (TaggedFunctor Identity) rs)
unfoldr = G.unfoldr
{-# INLINE unfoldr #-}
-- | /O(n)/ Construct a vector with at most @n@ by repeatedly applying the
-- generator function to the a seed. The generator function yields 'Just' the
-- next element and the new seed or 'Nothing' if there are no more elements.
--
-- > unfoldrN 3 (\n -> Just (n,n-1)) 10 = <10,9,8>
unfoldrN :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> (c -> Maybe (Rec (TaggedFunctor Identity) rs, c)) -> c -> (Vector k) (Rec (TaggedFunctor Identity) rs)
unfoldrN = G.unfoldrN
{-# INLINE unfoldrN #-}
-- | /O(n)/ Construct a vector with @n@ elements by repeatedly applying the
-- generator function to the already constructed part of the vector.
--
-- > constructN 3 f = let a = f <> ; b = f <a> ; c = f <a,b> in f <a,b,c>
--
constructN :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> ((Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
constructN = G.constructN
{-# INLINE constructN #-}
-- | /O(n)/ Construct a vector with @n@ elements from right to left by
-- repeatedly applying the generator function to the already constructed part
-- of the vector.
--
-- > constructrN 3 f = let a = f <> ; b = f<a> ; c = f <b,a> in f <c,b,a>
--
constructrN :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> ((Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
constructrN = G.constructrN
{-# INLINE constructrN #-}
-- Concatenation
-- -------------
-- | /O(n)/ Prepend an element
cons :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
{-# INLINE cons #-}
cons = G.cons
-- | /O(n)/ Append an element
snoc :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) rs)
{-# INLINE snoc #-}
snoc = G.snoc
infixr 5 ++
-- | /O(m+n)/ Concatenate two vectors
(++) :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
{-# INLINE (++) #-}
(++) = (G.++)
-- | /O(n)/ Concatenate all vectors in the list
concat :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> [(Vector k) (Rec (TaggedFunctor Identity) rs)] -> (Vector k) (Rec (TaggedFunctor Identity) rs)
{-# INLINE concat #-}
concat = G.concat
-- Monadic initialisation
-- ----------------------
-- | /O(n)/ Execute the monadic action the given number of times and store the
-- results in a vector.
replicateM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> Int -> m (Rec (TaggedFunctor Identity) rs) -> m ((Vector k) (Rec (TaggedFunctor Identity) rs))
replicateM = G.replicateM
{-# INLINE replicateM #-}
-- | /O(n)/ Construct a vector of the given length by applying the monadic
-- action to each index
generateM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> Int -> (Int -> m (Rec (TaggedFunctor Identity) rs)) -> m ((Vector k) (Rec (TaggedFunctor Identity) rs))
generateM = G.generateM
{-# INLINE generateM #-}
-- | Execute the monadic action and freeze the resulting vector.
--
-- @
-- create (do { v \<- new 2; write v 0 \'a\'; write v 1 \'b\'; return v }) = \<'a','b'\>
-- @
create :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (forall s. ST s (G.Mutable (Vector k) s (Rec (TaggedFunctor Identity) rs))) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
-- NOTE: eta-expanded due to http://hackage.haskell.org/trac/ghc/ticket/4120
create p = G.create p
{-# INLINE create #-}
-- Restricting memory usage
-- ------------------------
-- | /O(n)/ Yield the argument but force it not to retain any extra memory,
-- possibly by copying it.
--
-- This is especially useful when dealing with slices. For example:
--
-- > force (slice 0 2 <huge vector>)
--
-- Here, the slice retains a reference to the huge vector. Forcing it creates
-- a copy of just the elements that belong to the slice and allows the huge
-- vector to be garbage collected.
force :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
force = G.force
{-# INLINE force #-}
-- Bulk updates
-- ------------
-- | /O(m+n)/ For each pair @(i,a)@ from the list, replace the vector
-- element at position @i@ by @a@.
--
-- > <5,9,2,7> // [(2,1),(0,3),(2,8)] = <3,9,8,7>
--
(//) :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -- ^ initial vector (of length @m@)
-> [(Int, Rec (TaggedFunctor Identity) rs)] -- ^ list of index/value pairs (of length @n@)
-> (Vector k) (Rec (TaggedFunctor Identity) rs)
(//) = (G.//)
{-# INLINE (//) #-}
-- | Same as ('//') but without bounds checking.
unsafeUpd :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> [(Int, Rec (TaggedFunctor Identity) rs)] -> (Vector k) (Rec (TaggedFunctor Identity) rs)
unsafeUpd = G.unsafeUpd
{-# INLINE unsafeUpd #-}
-- Accumulations
-- -------------
-- | /O(m+n)/ For each pair @(i,c)@ from the list, replace the vector element
-- @a@ at position @i@ by @f a c@.
--
-- > accum (+) <5,9,2> [(2,4),(1,6),(0,3),(1,7)] = <5+3, 9+6+7, 2+4>
accum :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> c -> Rec (TaggedFunctor Identity) rs) -- ^ accumulating function @f@
-> (Vector k) (Rec (TaggedFunctor Identity) rs) -- ^ initial vector (of length @m@)
-> [(Int,c)] -- ^ list of index/value pairs (of length @n@)
-> (Vector k) (Rec (TaggedFunctor Identity) rs)
accum = G.accum
{-# INLINE accum #-}
-- | Same as 'accum' but without bounds checking.
unsafeAccum :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> c -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> [(Int,c)] -> (Vector k) (Rec (TaggedFunctor Identity) rs)
unsafeAccum = G.unsafeAccum
{-# INLINE unsafeAccum #-}
-- Permutations
-- ------------
-- | /O(n)/ Reverse a vector
reverse :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
{-# INLINE reverse #-}
reverse = G.reverse
-- Safe destructive updates
-- ------------------------
-- | Apply a destructive operation to a vector. The operation will be
-- performed in place if it is safe to do so and will modify a copy of the
-- vector otherwise.
--
-- @
-- modify (\\v -> write v 0 \'x\') ('replicate' 3 \'a\') = \<\'x\',\'a\',\'a\'\>
-- @
modify :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (forall s. G.Mutable (Vector k) s (Rec (TaggedFunctor Identity) rs) -> ST s ())
-> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
{-# INLINE modify #-}
modify p = G.modify p
-- Mapping
-- -------
-- | /O(n)/ Map a function over a vector
map :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)
map = G.map
{-# INLINE map #-}
-- | /O(n)/ Apply a function to every element of a vector and its index
imap :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Int -> Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss)
-> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)
imap = G.imap
{-# INLINE imap #-}
-- | Map a function over a vector and concatenate the results.
concatMap :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) ss)) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)
concatMap = G.concatMap
{-# INLINE concatMap #-}
-- Monadic mapping
-- ---------------
-- | /O(n)/ Apply the monadic action to all elements of the vector, yielding a
-- vector of results
mapM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> m (Rec (TaggedFunctor Identity) ss)) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m ((Vector k) (Rec (TaggedFunctor Identity) ss))
mapM = G.mapM
{-# INLINE mapM #-}
-- | /O(n)/ Apply the monadic action to all elements of a vector and ignore the
-- results
mapM_ :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Rec (TaggedFunctor Identity) rs -> m b) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m ()
mapM_ = G.mapM_
{-# INLINE mapM_ #-}
-- | /O(n)/ Apply the monadic action to all elements of the vector, yielding a
-- vector of results. Equvalent to @flip 'mapM'@.
forM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Rec (TaggedFunctor Identity) rs -> m (Rec (TaggedFunctor Identity) ss)) -> m ((Vector k) (Rec (TaggedFunctor Identity) ss))
forM = G.forM
{-# INLINE forM #-}
-- | /O(n)/ Apply the monadic action to all elements of a vector and ignore the
-- results. Equivalent to @flip 'mapM_'@.
forM_ :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Rec (TaggedFunctor Identity) rs -> m b) -> m ()
forM_ = G.forM_
{-# INLINE forM_ #-}
-- Zipping
-- -------
-- -- | /O(min(m,n))/ Zip two vectors with the given function.
-- zipWith :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- ) => ((a,a') -> (b,b') -> (c,c'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c')
-- zipWith = G.zipWith
-- {-# INLINE zipWith #-}
--
-- -- | Zip three vectors with the given function.
--
-- zipWith3 :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- , G.Vector u d, G.Vector v d'
-- ) => ((a,a') -> (b,b') -> (c,c') -> (d, d'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c') -> (Vector k) u v (d,d')
-- zipWith3 = G.zipWith3
-- {-# INLINE zipWith3 #-}
--
-- zipWith4 :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- , G.Vector u d, G.Vector v d'
-- , G.Vector u e, G.Vector v e'
-- ) => ((a,a') -> (b,b') -> (c,c') -> (d, d') -> (e,e'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c') -> (Vector k) u v (d,d') -> (Vector k) u v (e,e')
-- zipWith4 = G.zipWith4
-- {-# INLINE zipWith4 #-}
--
-- zipWith5 :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- , G.Vector u d, G.Vector v d'
-- , G.Vector u e, G.Vector v e'
-- , G.Vector u f, G.Vector v f'
-- ) => ((a,a') -> (b,b') -> (c,c') -> (d, d') -> (e,e') -> (f,f'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c') -> (Vector k) u v (d,d') -> (Vector k) u v (e,e') -> (Vector k) u v (f,f')
-- zipWith5 = G.zipWith5
-- {-# INLINE zipWith5 #-}
--
-- zipWith6 :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- , G.Vector u d, G.Vector v d'
-- , G.Vector u e, G.Vector v e'
-- , G.Vector u f, G.Vector v f'
-- , G.Vector u g, G.Vector v g'
-- ) => ((a,a') -> (b,b') -> (c,c') -> (d, d') -> (e,e') -> (f,f') -> (g,g'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c') -> (Vector k) u v (d,d') -> (Vector k) u v (e,e') -> (Vector k) u v (f,f') -> (Vector k) u v (g,g')
-- zipWith6 = G.zipWith6
-- {-# INLINE zipWith6 #-}
--
-- -- | /O(min(m,n))/ Zip two vectors with a function that also takes the
-- -- elements' indices.
-- izipWith :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- ) => (Int -> (a,a') -> (b,b') -> (c,c'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c')
-- izipWith = G.izipWith
-- {-# INLINE izipWith #-}
--
-- -- | Zip three vectors and their indices with the given function.
-- izipWith3 :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- , G.Vector u d, G.Vector v d'
-- ) => (Int -> (a,a') -> (b,b') -> (c,c') -> (d, d'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c') -> (Vector k) u v (d,d')
-- izipWith3 = G.izipWith3
-- {-# INLINE izipWith3 #-}
--
-- izipWith4 :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- , G.Vector u d, G.Vector v d'
-- , G.Vector u e, G.Vector v e'
-- ) => (Int -> (a,a') -> (b,b') -> (c,c') -> (d, d') -> (e,e'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c') -> (Vector k) u v (d,d') -> (Vector k) u v (e,e')
-- izipWith4 = G.izipWith4
-- {-# INLINE izipWith4 #-}
--
-- izipWith5 :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- , G.Vector u d, G.Vector v d'
-- , G.Vector u e, G.Vector v e'
-- , G.Vector u f, G.Vector v f'
-- ) => (Int -> (a,a') -> (b,b') -> (c,c') -> (d, d') -> (e,e') -> (f,f'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c') -> (Vector k) u v (d,d') -> (Vector k) u v (e,e') -> (Vector k) u v (f,f')
-- izipWith5 = G.izipWith5
-- {-# INLINE izipWith5 #-}
--
-- izipWith6 :: ( G.Vector u a, G.Vector v a'
-- , G.Vector u b, G.Vector v b'
-- , G.Vector u c, G.Vector v c'
-- , G.Vector u d, G.Vector v d'
-- , G.Vector u e, G.Vector v e'
-- , G.Vector u f, G.Vector v f'
-- , G.Vector u g, G.Vector v g'
-- ) => (Int -> (a,a') -> (b,b') -> (c,c') -> (d, d') -> (e,e') -> (f,f') -> (g,g'))
-- -> (Vector k) u v (a,a') -> (Vector k) u v (b,b') -> (Vector k) u v (c,c') -> (Vector k) u v (d,d') -> (Vector k) u v (e,e') -> (Vector k) u v (f,f') -> (Vector k) u v (g,g')
-- izipWith6 = G.izipWith6
-- {-# INLINE izipWith6 #-}
-- Monadic zipping
-- ---------------
-- | /O(min(m,n))/ Zip the two vectors with the monadic action and yield a
-- vector of results
zipWithM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ts))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> m (Rec (TaggedFunctor Identity) ts)) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss) -> m ((Vector k) (Rec (TaggedFunctor Identity) ts))
zipWithM = G.zipWithM
{-# INLINE zipWithM #-}
-- | /O(min(m,n))/ Zip the two vectors with the monadic action and ignore the
-- results
zipWithM_ :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> m e) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)-> m ()
zipWithM_ = G.zipWithM_
{-# INLINE zipWithM_ #-}
-- Filtering
-- ---------
-- | /O(n)/ Drop elements that do not satisfy the predicate
filter :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
filter = G.filter
{-# INLINE filter #-}
-- | /O(n)/ Drop elements that do not satisfy the predicate which is applied to
-- values and their indices
ifilter :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Int -> Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
ifilter = G.ifilter
{-# INLINE ifilter #-}
-- | /O(n)/ Drop elements that do not satisfy the monadic predicate
filterM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Rec (TaggedFunctor Identity) rs -> m Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m ((Vector k) (Rec (TaggedFunctor Identity) rs))
filterM = G.filterM
{-# INLINE filterM #-}
-- | /O(n)/ Yield the longest prefix of elements satisfying the predicate
-- without copying.
takeWhile :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
takeWhile = G.takeWhile
{-# INLINE takeWhile #-}
-- | /O(n)/ Drop the longest prefix of elements that satisfy the predicate
-- without copying.
dropWhile :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
dropWhile = G.dropWhile
{-# INLINE dropWhile #-}
-- Parititioning
-- -------------
-- | /O(n)/ Split the vector in two parts, the first one containing those
-- elements that satisfy the predicate and the second one those that don't. The
-- relative order of the elements is preserved at the cost of a sometimes
-- reduced performance compared to 'unstablePartition'.
partition :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> ((Vector k) (Rec (TaggedFunctor Identity) rs), (Vector k) (Rec (TaggedFunctor Identity) rs))
{-# INLINE partition #-}
partition = G.partition
-- | /O(n)/ Split the vector in two parts, the first one containing those
-- elements that satisfy the predicate and the second one those that don't.
-- The order of the elements is not preserved but the operation is often
-- faster than 'partition'.
unstablePartition :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> ((Vector k) (Rec (TaggedFunctor Identity) rs), (Vector k) (Rec (TaggedFunctor Identity) rs))
{-# INLINE unstablePartition #-}
unstablePartition = G.unstablePartition
-- | /O(n)/ Split the vector into the longest prefix of elements that satisfy
-- the predicate and the rest without copying.
span :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> ((Vector k) (Rec (TaggedFunctor Identity) rs), (Vector k) (Rec (TaggedFunctor Identity) rs))
{-# INLINE span #-}
span = G.span
-- | /O(n)/ Split the vector into the longest prefix of elements that do not
-- satisfy the predicate and the rest without copying.
break :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> ((Vector k) (Rec (TaggedFunctor Identity) rs), (Vector k) (Rec (TaggedFunctor Identity) rs))
{-# INLINE break #-}
break = G.break
-- Searching
-- ---------
infix 4 `elem`
-- | /O(n)/ Check if the vector contains an element
elem :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), Eq (Rec (TaggedFunctor Identity) rs))
=> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Bool
elem = G.elem
{-# INLINE elem #-}
infix 4 `notElem`
-- | /O(n)/ Check if the vector does not contain an element (inverse of 'elem')
notElem :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), Eq (Rec (TaggedFunctor Identity) rs))
=> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Bool
notElem = G.notElem
{-# INLINE notElem #-}
-- | /O(n)/ Yield 'Just' the first element matching the predicate or 'Nothing'
-- if no such element exists.
find :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Maybe (Rec (TaggedFunctor Identity) rs)
find = G.find
{-# INLINE find #-}
-- | /O(n)/ Yield 'Just' the index of the first element matching the predicate
-- or 'Nothing' if no such element exists.
findIndex :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Maybe Int
findIndex = G.findIndex
{-# INLINE findIndex #-}
{-
-- | /O(n)/ Yield the indices of elements satisfying the predicate in ascending
-- order.
findIndices :: ((a, b) -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) u v Int
findIndices = G.findIndices
{-# INLINE findIndices #-}
-}
-- | /O(n)/ Yield 'Just' the index of the first occurence of the given element or
-- 'Nothing' if the vector does not contain the element. This is a specialised
-- version of 'findIndex'.
elemIndex :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), Eq (Rec (TaggedFunctor Identity) rs))
=> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Maybe Int
elemIndex = G.elemIndex
{-# INLINE elemIndex #-}
{-
-- | /O(n)/ Yield the indices of all occurences of the given element in
-- ascending order. This is a specialised version of 'findIndices'.
elemIndices :: (a, b) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) Int
elemIndices = G.elemIndices
{-# INLINE elemIndices #-}
-}
-- Folding
-- -------
-- | /O(n)/ Left fold
foldl :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (r -> Rec (TaggedFunctor Identity) rs -> r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> r
foldl = G.foldl
{-# INLINE foldl #-}
-- | /O(n)/ Left fold on non-empty vectors
foldl1 :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
foldl1 = G.foldl1
{-# INLINE foldl1 #-}
-- | /O(n)/ Left fold with strict accumulator
foldl' :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (r -> Rec (TaggedFunctor Identity) rs -> r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> r
foldl' = G.foldl'
{-# INLINE foldl' #-}
-- | /O(n)/ Left fold on non-empty vectors with strict accumulator
foldl1' :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
foldl1' = G.foldl1'
{-# INLINE foldl1' #-}
-- | /O(n)/ Right fold
foldr :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> r -> r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> r
foldr = G.foldr
{-# INLINE foldr #-}
-- | /O(n)/ Right fold on non-empty vectors
foldr1 :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
foldr1 = G.foldr1
{-# INLINE foldr1 #-}
-- | /O(n)/ Right fold with a strict accumulator
foldr' :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> r -> r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> r
foldr' = G.foldr'
{-# INLINE foldr' #-}
-- | /O(n)/ Right fold on non-empty vectors with strict accumulator
foldr1' :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
foldr1' = G.foldr1'
{-# INLINE foldr1' #-}
-- | /O(n)/ Left fold (function applied to each element and its index)
ifoldl :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (r -> Int -> Rec (TaggedFunctor Identity) rs -> r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> r
ifoldl = G.ifoldl
{-# INLINE ifoldl #-}
-- | /O(n)/ Left fold with strict accumulator (function applied to each element
-- and its index)
ifoldl' :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (r -> Int -> Rec (TaggedFunctor Identity) rs -> r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> r
ifoldl' = G.ifoldl'
{-# INLINE ifoldl' #-}
-- | /O(n)/ Right fold (function applied to each element and its index)
ifoldr :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Int -> Rec (TaggedFunctor Identity) rs -> r -> r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> r
ifoldr = G.ifoldr
{-# INLINE ifoldr #-}
-- | /O(n)/ Right fold with strict accumulator (function applied to each
-- element and its index)
ifoldr' :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Int -> Rec (TaggedFunctor Identity) rs -> r -> r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> r
ifoldr' = G.ifoldr'
{-# INLINE ifoldr' #-}
-- Specialised folds
-- -----------------
-- | /O(n)/ Check if all elements satisfy the predicate.
all :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Bool
{-# INLINE all #-}
all = G.all
-- | /O(n)/ Check if any element satisfies the predicate.
any :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Bool) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Bool
{-# INLINE any #-}
any = G.any
{-
-- | /O(n)/ Compute the sum of the elements
sum :: (Vector k) (Rec (TaggedFunctor Identity) rs) -> (a, b)
{-# INLINE sum #-}
sum = G.sum
-- | /O(n)/ Compute the product of the elements
product :: (Vector k) (Rec (TaggedFunctor Identity) rs) -> (a, b)
{-# INLINE product #-}
product = G.product
-}
-- | /O(n)/ Yield the maximum element of the vector. The vector may not be
-- empty.
maximum :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), Ord (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
{-# INLINE maximum #-}
maximum = G.maximum
-- | /O(n)/ Yield the maximum element of the vector according to the given
-- comparison function. The vector may not be empty.
maximumBy :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Ordering) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
{-# INLINE maximumBy #-}
maximumBy = G.maximumBy
-- | /O(n)/ Yield the minimum element of the vector. The vector may not be
-- empty.
minimum :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), Ord (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
{-# INLINE minimum #-}
minimum = G.minimum
-- | /O(n)/ Yield the minimum element of the vector according to the given
-- comparison function. The vector may not be empty.
minimumBy :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Ordering) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs
{-# INLINE minimumBy #-}
minimumBy = G.minimumBy
-- | /O(n)/ Yield the index of the maximum element of the vector. The vector
-- may not be empty.
maxIndex :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), Ord (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Int
{-# INLINE maxIndex #-}
maxIndex = G.maxIndex
-- | /O(n)/ Yield the index of the maximum element of the vector according to
-- the given comparison function. The vector may not be empty.
maxIndexBy :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Ordering) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Int
{-# INLINE maxIndexBy #-}
maxIndexBy = G.maxIndexBy
-- | /O(n)/ Yield the index of the minimum element of the vector. The vector
-- may not be empty.
minIndex :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), Ord (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Int
{-# INLINE minIndex #-}
minIndex = G.minIndex
-- | /O(n)/ Yield the index of the minimum element of the vector according to
-- the given comparison function. The vector may not be empty.
minIndexBy :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Ordering) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> Int
{-# INLINE minIndexBy #-}
minIndexBy = G.minIndexBy
-- Monadic folds
-- -------------
-- | /O(n)/ Monadic fold
foldM :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (r -> Rec (TaggedFunctor Identity) rs -> m r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m r
foldM = G.foldM
{-# INLINE foldM #-}
-- | /O(n)/ Monadic fold over non-empty vectors
fold1M :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> m (Rec (TaggedFunctor Identity) rs)) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m (Rec (TaggedFunctor Identity) rs)
{-# INLINE fold1M #-}
fold1M = G.fold1M
-- | /O(n)/ Monadic fold with strict accumulator
foldM' :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (r -> Rec (TaggedFunctor Identity) rs -> m r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m r
{-# INLINE foldM' #-}
foldM' = G.foldM'
-- | /O(n)/ Monadic fold over non-empty vectors with strict accumulator
fold1M' :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> m (Rec (TaggedFunctor Identity) rs)) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m (Rec (TaggedFunctor Identity) rs)
{-# INLINE fold1M' #-}
fold1M' = G.fold1M'
-- | /O(n)/ Monadic fold that discards the result
foldM_ :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (r -> Rec (TaggedFunctor Identity) rs -> m r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m ()
{-# INLINE foldM_ #-}
foldM_ = G.foldM_
-- | /O(n)/ Monadic fold over non-empty vectors that discards the result
fold1M_ :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> m (Rec (TaggedFunctor Identity) rs)) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m ()
{-# INLINE fold1M_ #-}
fold1M_ = G.fold1M_
-- | /O(n)/ Monadic fold with strict accumulator that discards the result
foldM'_ :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (r -> Rec (TaggedFunctor Identity) rs -> m r) -> r -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m ()
{-# INLINE foldM'_ #-}
foldM'_ = G.foldM'_
-- | /O(n)/ Monadic fold over non-empty vectors with strict accumulator
-- that discards the result
fold1M'_ :: (Monad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> m (Rec (TaggedFunctor Identity) rs)) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m ()
{-# INLINE fold1M'_ #-}
fold1M'_ = G.fold1M'_
-- Prefix sums (scans)
-- -------------------
-- | /O(n)/ Prescan
--
-- @
-- prescanl f z = 'init' . 'scanl' f z
-- @
--
-- Example: @prescanl (+) 0 \<1,2,3,4\> = \<0,1,3,6\>@
--
prescanl :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) ss) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
prescanl = G.prescanl
{-# INLINE prescanl #-}
-- | /O(n)/ Prescan with strict accumulator
prescanl' :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) ss) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
prescanl' = G.prescanl'
{-# INLINE prescanl' #-}
-- | /O(n)/ Scan
--
-- @
-- postscanl f z = 'tail' . 'scanl' f z
-- @
--
-- Example: @postscanl (+) 0 \<1,2,3,4\> = \<1,3,6,10\>@
--
postscanl :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) ss) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
postscanl = G.postscanl
{-# INLINE postscanl #-}
-- | /O(n)/ Scan with strict accumulator
postscanl' :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) ss) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
postscanl' = G.postscanl'
{-# INLINE postscanl' #-}
-- | /O(n)/ Haskell-style scan
--
-- > scanl f z <x1,...,xn> = <y1,...,y(n+1)>
-- > where y1 = z
-- > yi = f y(i-1) x(i-1)
--
-- Example: @scanl (+) 0 \<1,2,3,4\> = \<0,1,3,6,10\>@
--
scanl :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) ss) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
scanl = G.scanl
{-# INLINE scanl #-}
-- | /O(n)/ Haskell-style scan with strict accumulator
scanl' :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor Identity) rs -> (Vector k) (Rec (TaggedFunctor Identity) ss) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
scanl' = G.scanl'
{-# INLINE scanl' #-}
-- | /O(n)/ Scan over a non-empty vector
--
-- > scanl f <x1,...,xn> = <y1,...,yn>
-- > where y1 = x1
-- > yi = f y(i-1) xi
--
scanl1 :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
scanl1 = G.scanl1
{-# INLINE scanl1 #-}
-- | /O(n)/ Scan over a non-empty vector with a strict accumulator
scanl1' :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
scanl1' = G.scanl1'
{-# INLINE scanl1' #-}
-- | /O(n)/ Right-to-left prescan
--
-- @
-- prescanr f z = 'reverse' . 'prescanl' (flip f) z . 'reverse'
-- @
--
prescanr :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) ss) -> Rec (TaggedFunctor Identity) ss -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)
{-# INLINE prescanr #-}
prescanr = G.prescanr
-- | /O(n)/ Right-to-left prescan with strict accumulator
prescanr' :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) ss) -> Rec (TaggedFunctor Identity) ss -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)
prescanr' = G.prescanr'
{-# INLINE prescanr' #-}
-- | /O(n)/ Right-to-left scan
postscanr :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) ss) -> Rec (TaggedFunctor Identity) ss -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)
postscanr = G.postscanr
{-# INLINE postscanr #-}
-- | /O(n)/ Right-to-left scan with strict accumulator
postscanr' :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) ss) -> Rec (TaggedFunctor Identity) ss -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)
postscanr' = G.postscanr'
{-# INLINE postscanr' #-}
-- | /O(n)/ Right-to-left Haskell-style scan
scanr :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) ss) -> Rec (TaggedFunctor Identity) ss -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)
scanr = G.scanr
{-# INLINE scanr #-}
-- | /O(n)/ Right-to-left Haskell-style scan with strict accumulator
scanr' :: (G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs), G.Vector (Vector k) (Rec (TaggedFunctor Identity) ss))
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) ss -> Rec (TaggedFunctor Identity) ss) -> Rec (TaggedFunctor Identity) ss -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) ss)
scanr' = G.scanr'
{-# INLINE scanr' #-}
-- | /O(n)/ Right-to-left scan over a non-empty vector
scanr1 :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
{-# INLINE scanr1 #-}
scanr1 = G.scanr1
-- | /O(n)/ Right-to-left scan over a non-empty vector with a strict
-- accumulator
scanr1' :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs -> Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs)
{-# INLINE scanr1' #-}
scanr1' = G.scanr1'
-- | /O(n)/ Convert a vector to a list
toList :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> [Rec (TaggedFunctor Identity) rs]
toList = G.toList
{-# INLINE toList #-}
-- | /O(n)/ Convert a list to a vector
fromList :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> [Rec (TaggedFunctor Identity) rs] -> (Vector k) (Rec (TaggedFunctor Identity) rs)
fromList = G.fromList
{-# INLINE fromList #-}
-- | /O(n)/ Convert the first @n@ elements of a list to a vector
--
-- @
-- fromListN n xs = 'fromList' ('take' n xs)
-- @
fromListN :: G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs)
=> Int -> [Rec (TaggedFunctor Identity) rs] -> (Vector k) (Rec (TaggedFunctor Identity) rs)
fromListN = G.fromListN
{-# INLINE fromListN #-}
-- Conversions - Mutable vectors
-- -----------------------------
-- | /O(1)/ Unsafe convert a mutable vector to an immutable one without
-- copying. The mutable vector may not be used after this operation.
unsafeFreeze :: (PrimMonad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> G.Mutable (Vector k) (PrimState m) (Rec (TaggedFunctor Identity) rs) -> m ((Vector k) (Rec (TaggedFunctor Identity) rs))
unsafeFreeze = G.unsafeFreeze
{-# INLINE unsafeFreeze #-}
-- | /O(1)/ Unsafely convert an immutable vector to a mutable one without
-- copying. The immutable vector may not be used after this operation.
unsafeThaw :: (PrimMonad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m (G.Mutable (Vector k) (PrimState m) (Rec (TaggedFunctor Identity) rs))
unsafeThaw = G.unsafeThaw
{-# INLINE unsafeThaw #-}
-- | /O(n)/ Yield a mutable copy of the immutable vector.
thaw :: (PrimMonad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m (G.Mutable (Vector k) (PrimState m) (Rec (TaggedFunctor Identity) rs))
thaw = G.thaw
{-# INLINE thaw #-}
-- | /O(n)/ Yield an immutable copy of the mutable vector.
freeze :: (PrimMonad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> G.Mutable (Vector k) (PrimState m) (Rec (TaggedFunctor Identity) rs) -> m ((Vector k) (Rec (TaggedFunctor Identity) rs))
freeze = G.freeze
{-# INLINE freeze #-}
-- | /O(n)/ Copy an immutable vector into a mutable one. The two vectors must
-- have the same length. This is not checked.
unsafeCopy :: (PrimMonad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> G.Mutable (Vector k) (PrimState m) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m ()
unsafeCopy = G.unsafeCopy
{-# INLINE unsafeCopy #-}
-- | /O(n)/ Copy an immutable vector into a mutable one. The two vectors must
-- have the same length.
copy :: (PrimMonad m, G.Vector (Vector k) (Rec (TaggedFunctor Identity) rs))
=> G.Mutable (Vector k) (PrimState m) (Rec (TaggedFunctor Identity) rs) -> (Vector k) (Rec (TaggedFunctor Identity) rs) -> m ()
copy = G.copy
{-# INLINE copy #-}
-- | Convert the vector to a record of vectors. This conversion is trivial
-- since this vector type is internal represented as a structure of arrays.
toRec :: forall (k :: KProxy a) (rs :: [(a,*)]). (k ~ 'KProxy) => Vector k (Rec (TaggedFunctor Identity) rs) -> Rec (TaggedFunctor VectorVal) rs
toRec (V rs) = rs
{-# INLINE toRec #-}
-- | Convert a record of vectors to a vector.
fromRec :: forall (k :: KProxy a) (rs :: [(a,*)]). (k ~ 'KProxy) => Rec (TaggedFunctor VectorVal) rs -> Vector k (Rec (TaggedFunctor Identity) rs)
fromRec rs = V rs
{-# INLINE fromRec #-}
-- proxyKey :: forall (k :: KProxy a) (rs :: [(a,*)]). proxy k
| andrewthad/vinyl-vectors | src/Data/Vector/Vinyl/Default/NonEmpty/Tagged.hs | bsd-3-clause | 62,017 | 0 | 15 | 14,355 | 16,895 | 8,879 | 8,016 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module Mars.Query
( GlobItem (..),
Query (..),
QueryItem (..),
normalizeQuery,
querySeparator,
parseQuery, -- TODO these parsers could be part of a `Parsable` typeclass perhaps?
query,
queryItem,
globItems,
globItem,
globKeys,
globIndices,
)
where
import Control.Applicative ((<|>))
import Data.Aeson hiding ((<?>))
import Data.Functor.Identity
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as Map
import Data.Ix
import Data.List.NonEmpty (NonEmpty, toList)
import qualified Data.List.NonEmpty as NonEmpty
import Data.Maybe
import Data.String.Conv
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Vector (Vector)
import GHC.Generics
import Mars.Renderable
import Test.QuickCheck
import qualified Test.QuickCheck.Modifiers as Modifiers
import Text.Parsec (Parsec)
import Text.Parsec.Prim (ParsecT)
import Text.ParserCombinators.Parsec hiding ((<|>))
-- | The datatype representing the queries possible for commands that select
-- - items
-- -
data Query
= DefaultLocation
| Query (NonEmpty QueryItem)
deriving (Generic, Show, Eq)
instance Semigroup Query where
DefaultLocation <> b = b
Query _ <> DefaultLocation = DefaultLocation
(Query a) <> (Query b) = Query (a <> b)
instance Monoid Query where
mempty = DefaultLocation
instance Arbitrary Query where
arbitrary = Query . NonEmpty.fromList . Modifiers.getNonEmpty <$> arbitrary
instance FromJSON Query
instance ToJSON Query
newtype QueryItem
= Glob (NonEmpty GlobItem)
deriving (Generic, Show, Eq)
instance Arbitrary QueryItem where
arbitrary =
oneof
[ Glob <$> genGlob
]
instance FromJSON QueryItem
instance ToJSON QueryItem
genGlob :: Gen (NonEmpty GlobItem)
genGlob = do
startGlob <- Modifiers.getNonEmpty <$> arbitrary
specialGlob <- oneof [pure AnyChar, pure AnyCharMultiple]
endGlob <- Modifiers.getNonEmpty <$> arbitrary
return . NonEmpty.fromList $ (startGlob <> [specialGlob] <> endGlob)
data UnnormalizedQueryItem
= GlobInput (NonEmpty GlobItem)
| LevelAbove
| CurrentLevel
deriving (Generic, Show, Eq)
data GlobItem
= AnyChar
| AnyCharMultiple
| LiteralString Text
deriving (Generic, Show, Eq)
instance Arbitrary GlobItem where
arbitrary =
oneof [pure AnyChar, pure AnyCharMultiple]
instance FromJSON GlobItem
instance ToJSON GlobItem
-- | The character used to separate query items when entered on the commandline
querySeparator :: Text.Text
querySeparator = "/"
instance Renderable Query where
render DefaultLocation = "/"
render (Query l) =
Text.intercalate querySeparator
. toList
$ (render <$> l)
instance Renderable UnnormalizedQueryItem where
render (GlobInput g) = mconcat . map renderGlob . toList $ g
render LevelAbove = Text.pack ".."
render CurrentLevel = Text.pack "."
instance Renderable QueryItem where
render (Glob g) = mconcat . toList $ renderGlob <$> g
renderGlob :: GlobItem -> Text.Text
renderGlob AnyChar = "?"
renderGlob AnyCharMultiple = "*"
renderGlob (LiteralString s) = s
normalizeQuery :: [UnnormalizedQueryItem] -> Maybe Query
normalizeQuery l =
Query
<$> ( NonEmpty.nonEmpty
. reverse
. fmap unsafeToQI
. foldr simplify []
. reverse
$ l
)
where
unsafeToQI :: UnnormalizedQueryItem -> QueryItem
unsafeToQI (GlobInput g) = Glob g
unsafeToQI i =
error $
"`normalizeQuery` did not remove an "
<> show i
<> ". This is a bug in `mars`"
simplify :: UnnormalizedQueryItem -> [UnnormalizedQueryItem] -> [UnnormalizedQueryItem]
simplify CurrentLevel processed = processed
simplify LevelAbove processed = drop 1 processed
simplify item processed = item : processed
parseQuery :: Text.Text -> Either ParseError Query
parseQuery s = parse query "" $ Text.unpack s
query :: ParsecT String u Identity Query
query =
do
items <- queryItem `sepBy` string (Text.unpack querySeparator)
return $ fromMaybe mempty . normalizeQuery $ items
<?> "query"
queryItem :: ParsecT String u Identity UnnormalizedQueryItem
queryItem =
try (CurrentLevel <$ string ".")
<|> (LevelAbove <$ string "..")
<|> try (GlobInput <$> globItems)
<?> "queryItem"
globItems :: ParsecT String u Identity (NonEmpty GlobItem)
globItems = do
items <- many1 globItem
return . NonEmpty.fromList $ items
globItem :: ParsecT String u Identity GlobItem
globItem =
try (AnyChar <$ string "?")
<|> try (AnyCharMultiple <$ string "*")
<|> try
( do
str <- many1 . noneOf $ "/ *?"
return . LiteralString . toS $ str
)
globKeys :: HashMap Text a -> NonEmpty GlobItem -> [Text]
globKeys obj glob = filter (match glob) . Map.keys $ obj
globIndices :: Vector a -> NonEmpty GlobItem -> [Int]
globIndices vec glob =
map (read . toS)
. filter (match glob)
. map ((toS . show) :: Int -> Text)
. range
$ (0, length vec)
match :: NonEmpty GlobItem -> Text -> Bool
match l v = case parse (mkParser l) "" v of
Left _ -> False
Right _ -> True
where
mkParser :: NonEmpty GlobItem -> Parsec Text u ()
mkParser = foldr (<*) eof . fmap toParser
toParser :: GlobItem -> Parsec Text u ()
toParser (LiteralString s) = () <$ string (toS s)
toParser AnyChar = () <$ anyToken
toParser AnyCharMultiple = () <$ many anyToken
| lorcanmcdonald/mars | src/Mars/Query.hs | bsd-3-clause | 5,486 | 0 | 13 | 1,175 | 1,669 | 875 | 794 | 162 | 4 |
{-# LANGUAGE ViewPatterns #-}
-- | a simple module to help display the AO standard environment
module ShowEnv (showEnv) where
import qualified Data.List as L
import ABC.Imperative.Value
-- | print an AO standard environment, or a summary of it, for
-- the user to view.
showEnv :: V cx -> ShowS
showEnv (P s (P h (P pb (P (P (valToText -> Just sn) ns) ex)))) =
showPower pb . showExt ex . showNamedStacks ns . showHand h .
showString "---" . showString sn . showString "---\n" .
showStack 12 s
showEnv v =
showString "--(non-standard environment)--\n" .
summarize v . showChar '\n'
-- print a value, but not all of it if it's enormous
summarize :: V cx -> ShowS
summarize v =
let sumSize = 1000 in
let sVal = show v in
let (sSummary, sChopped) = L.splitAt sumSize sVal in
showString sSummary .
if null sChopped then id else showString "\n...chopped"
showPower, showExt, showNamedStacks, showHand :: V cx -> ShowS
stackCount :: V cx -> Int
stackCount U = 0
stackCount (P _e s) = 1 + stackCount s
stackCount _v = 1
showStack :: Int -> V cx -> ShowS
showStack _ U = id
showStack n s | (n < 1) =
showChar '(' .
shows (stackCount s) .
showString " more)\n"
showStack n (P v s) =
showStack (n-1) s .
showChar '|' . summarize v . showChar '\n'
showStack _ v =
showString "(non-standard stack): " .
summarize v
showNamedStacks U = id
showNamedStacks (P (P (valToText -> Just name) stack) ns') =
showString name .
showString ": " . shows (stackCount stack) .
showChar '\n' . showNamedStacks ns'
showNamedStacks v =
showString "???: " . shows v . showChar '\n'
showHand h =
let n = stackCount h in
if (0 == n) then id else
showString "hand: " . shows n . showChar '\n'
showPower (B _) = id
showPower v = showString "power block: " . summarize v . showChar '\n'
showExt U = id
showExt v = showString "extended env: " . summarize v . showChar '\n'
-- TODO: consider switching to a 'summary' value display.
| dmbarbour/awelon | hsrc_util/ShowEnv.hs | bsd-3-clause | 2,012 | 0 | 18 | 475 | 711 | 345 | 366 | 51 | 2 |
module Filter (
Filter,
Rule,
SimpleRule,
BlockType (..),
makeFilter,
rule,
globalRule
) where
import BasicCategories
import Category
import Style
data Rule = Simple SimpleRule
| Global [(Category, Style)] [Rule]
type SimpleRule = (BlockType, Category, Style)
data BlockType = Show | Hide
deriving (Show, Eq)
newtype Filter = Filter String
instance Show Filter where
show (Filter s) = s
emptyFilter :: Filter
emptyFilter = Filter ""
rule :: BlockType -> Category -> Style -> Rule
rule t c s = Simple (t, c, s)
globalRule :: [(Category, Style)] -> [Rule] -> Rule
globalRule = Global
toSimples :: [Rule] -> [SimpleRule]
toSimples = concatMap toSimple
-- TODO: test nesting behaviour
toSimple :: Rule -> [SimpleRule]
toSimple (Simple r) = [r]
toSimple (Global gRs rs) = foldr (\r xs -> (addGlobalContext <$> toSimple r <*> gRs) ++ toSimple r ++ xs) [] rs
-- ^- this is to keep the unchanged rule
addGlobalContext :: SimpleRule -> (Category, Style) -> SimpleRule
addGlobalContext (t,c,s) (gC,gS) = (t,c `intersect` gC, s <+ gS)
makeFilter :: [Rule] -> Filter
makeFilter xs = foldl chainFilter emptyFilter (map implementRule (toSimples xs))
implementRule :: SimpleRule -> Filter
implementRule (t, c, s) = let blockHeader = show t ++ newline
in Filter $ concat $
zipWith3
(\header xs ys -> header ++ xs ++ ys)
(repeat blockHeader)
(implementCategory c)
(repeat $ implementStyle s ++ newline)
chainFilter :: Filter -> Filter -> Filter
chainFilter (Filter x) (Filter y) = Filter $ x ++ y
newline = "\n"
-- examples
simples = [rule Show (boots `union` helmets) (borderColor (0,0,0,0)), rule Show rings defaultStyle, rule Show currency defaultStyle]
global = globalRule [(baseType "Coral",borderColor (255,0,0,0)),(currency, alertSound (9, 100))] simples
| owestphal/LootLanguage | src/Filter.hs | bsd-3-clause | 2,120 | 0 | 13 | 649 | 709 | 397 | 312 | 47 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
module Cyoa.Monad where
import Cyoa.PageLang
import Prelude hiding (take, drop)
import Control.Monad.Writer
import Control.Monad.RWS
import Control.Monad.Error
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Array
import Data.Maybe
import Control.Applicative
import System.Random
data Link = PageLink PageNum
| StartFightLink
| ContinueFightLink (Maybe FightRound)
deriving (Show, Read, Eq)
data Output = OutputClear String [OutputItem]
| OutputContinue [OutputItem]
instance Monoid Output where
mempty = OutputContinue []
(OutputContinue []) `mappend` output@(OutputClear _ _) = output
(OutputClear title is) `mappend` (OutputContinue is') = OutputClear title (is `mappend` is')
(OutputContinue is) `mappend` (OutputContinue is') = OutputContinue (is `mappend` is')
emit :: (MonadWriter Output m) => [OutputItem] -> m ()
emit = tell . OutputContinue
data OutputAttr = Good
| Bad
deriving Show
data OutputItem = OutText (Maybe OutputAttr) String
| OutDie Int
| OutLink Link String
| OutEnemies [Enemy]
| OutBreak
| OutImage ImageId
outText = OutText Nothing
data FightState = FS { fight_enemies :: [Enemy]
, fight_last_round :: Maybe FightRound
, fight_cont :: [PageItem] }
deriving (Show, Read)
data PlayerState = PS { player_carries :: Set Item
, player_flags :: Set Flag
, player_counters :: Map Counter Int
, player_stats :: Map Stat (Int, Int) -- (current, initial)
, player_page :: PageNum
}
deriving (Show, Read)
data GameState = GS { player_state :: PlayerState
, fight_state :: Maybe FightState
, page_state :: Map Die Int
}
deriving (Show, Read)
data GameEvent = DeathEvent
| WinEvent
| FightEvent
deriving Show
instance Error GameEvent where
noMsg = error "noMsg"
newtype CyoaT m a = CyoaT { unCyoaT :: ErrorT GameEvent (RWST (Array PageNum Page, [PageItem]) Output GameState m) a }
deriving (Monad, Functor, MonadIO,
MonadError GameEvent,
MonadState GameState, MonadReader (Array PageNum Page, [PageItem]), MonadWriter Output,
Applicative)
modifyPlayerState :: (Monad m) => (PlayerState -> PlayerState) -> CyoaT m ()
modifyPlayerState f = modify $ \gs -> gs{ player_state = f (player_state gs) }
modifyFightState :: (Monad m) => (FightState -> FightState) -> CyoaT m ()
modifyFightState f = modify $ \gs -> gs{ fight_state = f' (fight_state gs) }
where f' = Just . f. fromJust
carries :: (Monad m) => Item -> CyoaT m Bool
carries item =
gets $ Set.member item . player_carries . player_state
takeItem :: (Monad m) => Item -> CyoaT m ()
takeItem item =
modifyPlayerState $ \ps -> ps { player_carries = Set.insert item (player_carries ps) }
dropItem :: (Monad m) => Item -> CyoaT m ()
dropItem item =
modifyPlayerState $ \ps -> ps { player_carries = Set.delete item (player_carries ps) }
flagSet :: (Monad m) => Flag -> CyoaT m Bool
flagSet flag = gets $ Set.member flag . player_flags . player_state
setFlag :: (Monad m) => Flag -> CyoaT m ()
setFlag flag =
modifyPlayerState $ \ps -> ps { player_flags = Set.insert flag (player_flags ps) }
resetFlag :: (Monad m) => Flag -> CyoaT m ()
resetFlag flag =
modifyPlayerState $ \ps -> ps { player_flags = Set.delete flag (player_flags ps) }
getCounter :: (Monad m) => Counter -> CyoaT m Int
getCounter counter = do
lookup <- gets $ Map.lookup counter . player_counters . player_state
return $ 0 `fromMaybe` lookup
modifyCounter :: (Monad m) => (Int -> Int) -> Counter -> CyoaT m ()
modifyCounter f counter = do
modifyPlayerState $ \ps -> ps { player_counters = Map.alter f' counter (player_counters ps) }
where f' mx = Just $ f $ 0 `fromMaybe` mx
getStat :: (Monad m) => Stat -> CyoaT m Int
getStat a = gets $ fst . fromJust . Map.lookup a . player_stats . player_state
getDice :: (Monad m) => Die -> CyoaT m Int
getDice d = gets (fromJust . Map.lookup d . page_state)
addDice :: (Monad m) => Die -> Int -> CyoaT m ()
addDice d value = modify $ \gs -> gs{ page_state = Map.insert d value (page_state gs) }
clearDice :: (Monad m) => CyoaT m ()
clearDice = modify $ \gs -> gs{ page_state = Map.empty }
die :: (Monad m) => CyoaT m ()
die = do
throwError DeathEvent
modifyStat :: (Monad m) => (Int -> Int) -> Stat -> CyoaT m ()
modifyStat f stat = do
modifyPlayerState $ \ps -> ps { player_stats = Map.alter (Just . f' . fromJust) stat (player_stats ps) }
when (stat == Health) $ do
health <- getStat Health
when (health == 0) $ do
emit [outText "Életerőpontjaid elfogytak, kalandod itt véget ér."]
die
where f' (current, initial) = (new, initial)
where new = clamp (0, initial) (f current)
clamp (low, high) x | x < low = low
| x > high = high
| otherwise = x
data Attacker = AttackerPlayer
| AttackerEnemy
deriving (Show, Read, Eq)
data FightRound = FightRound Attacker Bool
deriving (Show, Read, Eq)
roll :: (MonadWriter Output m, MonadIO m) => m Int
roll = do
d <- liftIO $ randomRIO (1, 6)
emit [OutDie d]
return d
stepCyoa :: (Monad m) => CyoaT m a -> [Page] -> GameState -> m (Either GameEvent a, GameState, Output)
stepCyoa f pages gs = runRWST (runErrorT $ unCyoaT f) (pageArray, []) gs
where pageArray = listArray (1, 400) pages
mkGameState :: WriterT Output IO GameState
mkGameState = do
ps <- mkPlayer
return $ GS { player_state = ps,
fight_state = Nothing,
page_state = Map.empty}
mkPlayer :: WriterT Output IO PlayerState
mkPlayer = do
tell $ OutputClear "Új kaland" []
emit [outText "Ügyesség pontjaid: 6 + "]
agility <- (6+) <$> roll
emit [outText $ " = " ++ show agility, OutBreak]
emit [outText "Életerő pontjaid: 12 + "]
health <- (12+) <$> ((+) <$> roll <*> roll)
emit [outText $ " = " ++ show health, OutBreak]
emit [outText "Szerencse pontjaid: 6 + "]
luck <- (6+) <$> roll
emit [outText $ " = " ++ show luck, OutBreak]
emit [ OutLink (PageLink 1) "És most lapozz az 1. oldalra..." ]
return PS { player_carries = Set.empty,
player_flags = Set.empty,
player_counters = Map.empty,
player_stats = Map.fromList [ (Luck, (luck, luck))
, (Agility, (agility, agility))
, (Health, (health, health))],
player_page = 1 } -- harc teszt: 73, death teszt: 323, kockadobas: 5, 31
| vikikiss/cyoa | src/Cyoa/Monad.hs | bsd-3-clause | 7,025 | 0 | 15 | 1,947 | 2,447 | 1,307 | 1,140 | 157 | 1 |
import qualified Data.Map as Map
import Control.Monad.State (runState)
import System (getArgs)
import Text.XML.HaXml
import Text.XML.HaXml.Pretty
import GenNew
import GenXml
import ParseXml
main :: IO ()
main = do --interact (render . document . genXml . fst . (\f -> runState (genNewIds f) (0,Map.empty)) . parseXml)
args <- getArgs
input <- readFile (args !! 0)
let out = render . document . genXml . fst . (\f -> runState (genNewIds f) (0,Map.empty)) $ parseXml input
writeFile (args !! 1) out
| molysgaard/OsmXmlTool | CommandLine.hs | bsd-3-clause | 516 | 0 | 16 | 98 | 164 | 90 | 74 | 14 | 1 |
-- | A description of the platform we're compiling for.
--
module Platform (
Platform(..),
Arch(..),
OS(..),
ArmISA(..),
ArmISAExt(..),
target32Bit,
osElfTarget
)
where
-- | Contains enough information for the native code generator to emit
-- code for this platform.
data Platform
= Platform {
platformArch :: Arch,
platformOS :: OS,
platformWordSize :: {-# UNPACK #-} !Int,
platformHasGnuNonexecStack :: Bool,
platformHasSubsectionsViaSymbols :: Bool
}
deriving (Read, Show, Eq)
-- | Architectures that the native code generator knows about.
-- TODO: It might be nice to extend these constructors with information
-- about what instruction set extensions an architecture might support.
--
data Arch
= ArchUnknown
| ArchX86
| ArchX86_64
| ArchPPC
| ArchPPC_64
| ArchSPARC
| ArchARM
{ armISA :: ArmISA
, armISAExt :: [ArmISAExt] }
deriving (Read, Show, Eq)
-- | Operating systems that the native code generator knows about.
-- Having OSUnknown should produce a sensible default, but no promises.
data OS
= OSUnknown
| OSLinux
| OSDarwin
| OSSolaris2
| OSMinGW32
| OSFreeBSD
| OSOpenBSD
| OSNetBSD
| OSKFreeBSD
| OSHaiku
deriving (Read, Show, Eq)
-- | ARM Instruction Set Architecture and Extensions
--
data ArmISA
= ARMv5
| ARMv6
| ARMv7
deriving (Read, Show, Eq)
data ArmISAExt
= VFPv2
| VFPv3
| VFPv3D16
| NEON
| IWMMX2
deriving (Read, Show, Eq)
target32Bit :: Platform -> Bool
target32Bit p = platformWordSize p == 4
-- | This predicates tells us whether the OS supports ELF-like shared libraries.
osElfTarget :: OS -> Bool
osElfTarget OSLinux = True
osElfTarget OSFreeBSD = True
osElfTarget OSOpenBSD = True
osElfTarget OSNetBSD = True
osElfTarget OSSolaris2 = True
osElfTarget OSDarwin = False
osElfTarget OSMinGW32 = False
osElfTarget OSKFreeBSD = True
osElfTarget OSUnknown = False
osElfTarget OSHaiku = True
-- Defaulting to False is safe; it means don't rely on any
-- ELF-specific functionality. It is important to have a default for
-- portability, otherwise we have to answer this question for every
-- new platform we compile on (even unreg).
| ilyasergey/GHC-XAppFix | compiler/utils/Platform.hs | bsd-3-clause | 2,539 | 0 | 9 | 810 | 401 | 242 | 159 | 64 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE GADTs #-}
module Main ( main ) where
import Control.Monad.Fix (MonadFix)
import qualified Data.ByteString.Char8 as B8
import qualified Data.Text as T
import Reflex.Dom.Core
import Reflex.Dom.SemanticUI
import Servant.API
import Servant.Reflex
import Zero.Widget
------------------------------------------------------------------------------
-- Main
------------------------------------------------------------------------------
main :: IO ()
main = do
mainWidgetWithCss (B8.pack $ T.unpack semanticCSS) $ run
------------------------------------------------------------------------------
-- Runtime
------------------------------------------------------------------------------
run :: forall t m.
( SupportsServantReflex t m
, DomBuilder t m
, DomBuilderSpace m ~ GhcjsDomSpace
, MonadFix m
, PostBuild t m
, MonadHold t m
, MonadWidget t m
) => m ()
run = do
divClass "ui grid" $ return ()
divClass "ui divider" $ return ()
footer
| et4te/zero | frontend/Profile.hs | bsd-3-clause | 1,202 | 0 | 12 | 250 | 228 | 128 | 100 | 30 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.Succinct
( AsBits(..)
, FromBits(..)
, bitsToString
, bitsShows
, fromBitsDiff
, fromBitsDiffN
, stringToBits
)
where
import Data.Bits
import Data.String
import Data.Word
bitsDiff' :: FiniteBits a => a -> Int -> Int -> [Bool] -> [Bool]
bitsDiff' a n len bs
| n < len = testBit a n : bitsDiff' a (n + 1) len bs
| n == len = bs
| otherwise = error "Invalid index"
bits :: AsBits a => a -> [Bool]
bits a = bitsDiff a []
bitsShows' :: [Bool] -> ShowS
bitsShows' [] s = s
bitsShows' (True :bs) s = '1':bitsShows' bs s
bitsShows' (False:bs) s = '0':bitsShows' bs s
bitsShows :: AsBits a => a -> ShowS
bitsShows = bitsShows' . bits
bitsToString :: AsBits a => a -> String
bitsToString bs = bitsShows bs ""
-- unbits :: AsBits a => [Bool] -> (a, [Bool])
-- unbits a = _uu
-- bitsUnshows :: AsBits a => String -> (a, String)
-- bitsUnshows = bitsShows' . bits
--
-- stringToBits :: AsBits a => String -> (a, String)
-- stringToBits as = _u
class AsBits a where
bitsDiff :: a -> [Bool] -> [Bool]
class FromBits a where
fromBits1 :: [Bool] -> (Maybe a, [Bool])
--------------------------------------------------------------------------------
instance AsBits Bool where
bitsDiff = (:)
instance AsBits Word8 where
bitsDiff a = bitsDiff' a 0 (finiteBitSize a)
instance AsBits a => AsBits [a] where
bitsDiff [] = id
bitsDiff (x:xs) = bitsDiff x . bitsDiff xs
instance FromBits Bool where
fromBits1 [] = (Nothing, [])
fromBits1 (b:bs) = (Just b, bs)
instance FromBits Word8 where
fromBits1 (a:b:c:d:e:f:g:h:bs) = (,)
(Just $ if a then 0x01 else 0 .|.
if b then 0x02 else 0 .|.
if c then 0x04 else 0 .|.
if d then 0x08 else 0 .|.
if e then 0x10 else 0 .|.
if f then 0x20 else 0 .|.
if g then 0x40 else 0 .|.
if h then 0x80 else 0)
bs
fromBits1 bs = (Nothing, bs)
instance FromBits Word16 where
fromBits1 (a:b:c:d:e:f:g:h:i:j:k:l:m:n:o:p:bs) = (,)
(Just $ if a then 0x0001 else 0 .|.
if b then 0x0002 else 0 .|.
if c then 0x0004 else 0 .|.
if d then 0x0008 else 0 .|.
if e then 0x0010 else 0 .|.
if f then 0x0020 else 0 .|.
if g then 0x0040 else 0 .|.
if h then 0x0080 else 0 .|.
if i then 0x0100 else 0 .|.
if j then 0x0200 else 0 .|.
if k then 0x0400 else 0 .|.
if l then 0x0800 else 0 .|.
if m then 0x1000 else 0 .|.
if n then 0x2000 else 0 .|.
if o then 0x4000 else 0 .|.
if p then 0x8000 else 0)
bs
fromBits1 bs = (Nothing, bs)
fromBitsDiff :: FromBits a => [Bool] -> ([a] -> [a], [Bool])
fromBitsDiff bs = case fromBits1 bs of
(Nothing, rs) -> (id , rs)
(Just a , rs) -> case fromBitsDiff rs of
(f, ss) -> ((a:) . f, ss)
fromBitsDiffN :: FromBits a => Int -> [Bool] -> ([a] -> [a], [Bool])
fromBitsDiffN n bs
| n > 0 = case fromBits1 bs of
(Nothing, rs) -> (id , rs)
(Just a , rs) -> case fromBitsDiffN (n - 1) rs of
(f, ss) -> ((a:) . f, ss)
| n == 0 = (id, bs)
| n < 0 = error "Invalid count"
| null bs = (id, [])
| otherwise = error "Error"
stringToBits :: String -> [Bool]
stringToBits [] = []
stringToBits ('1' :xs) = True :stringToBits xs
stringToBits ('0' :xs) = False:stringToBits xs
stringToBits (' ' :xs) = stringToBits xs
stringToBits ('\n':xs) = stringToBits xs
stringToBits (_ :_ ) = error "Invalid bit"
| haskell-works/conduit-succinct-json | src/Data/Succinct.hs | bsd-3-clause | 3,651 | 0 | 39 | 1,085 | 1,466 | 792 | 674 | 96 | 2 |
Subsets and Splits