code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-|
Module : Idris.Elab.Data
Description : Code to elaborate data structures.
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE CPP, PatternGuards #-}
module Idris.Elab.Data(elabData) where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.Core.Evaluate
import Idris.Core.TT
import Idris.Core.Typecheck
import Idris.Delaborate
import Idris.Docstrings
import Idris.Elab.Rewrite
import Idris.Elab.Type
import Idris.Elab.Utils
import Idris.Elab.Value
import Idris.Error
import Idris.Output (iWarn, sendHighlighting)
import Util.Pretty
#if (MIN_VERSION_base(4,11,0))
import Prelude hiding (id, (.), (<>))
#else
import Prelude hiding (id, (.))
#endif
import Control.Category
import Control.Monad
import Data.List
import qualified Data.Set as S
warnLC :: FC -> Name -> Idris ()
warnLC fc n
= iWarn fc $ annName n <+> text "has a name which may be implicitly bound."
<> line <> text "This is likely to lead to problems!"
elabData :: ElabInfo -> SyntaxInfo -> Docstring (Either Err PTerm)-> [(Name, Docstring (Either Err PTerm))] -> FC -> DataOpts -> PData -> Idris ()
elabData info syn doc argDocs fc opts (PLaterdecl n nfc t_in)
= do logElab 1 (show (fc, doc))
checkUndefined fc n
when (implicitable (nsroot n)) $ warnLC fc n
(cty, _, t, inacc) <- buildType info syn fc [] n t_in
addIBC (IBCDef n)
updateContext (addTyDecl n (TCon 0 0) cty) -- temporary, to check cons
sendHighlighting $ S.fromList [(FC' nfc, AnnName n Nothing Nothing Nothing)]
elabData info syn doc argDocs fc opts (PDatadecl n nfc t_in dcons)
= do let codata = Codata `elem` opts
logElab 1 (show fc)
undef <- isUndefined fc n
when (implicitable (nsroot n)) $ warnLC fc n
(cty, ckind, t, inacc) <- buildType info syn fc [] n t_in
-- if n is defined already, make sure it is just a type declaration
-- with the same type we've just elaborated, and no constructors
-- yet
i <- getIState
checkDefinedAs fc n cty i
-- temporary, to check cons
when undef $ updateContext (addTyDecl n (TCon 0 0) cty)
let cnameinfo = cinfo info (map cname dcons)
unique <- case getRetTy (normalise (tt_ctxt i) [] cty) of
UType UniqueType -> return True
UType _ -> return False
TType _ -> return False
rt -> tclift $ tfail (At fc (Elaborating "type constructor " n Nothing (Msg "Not a valid type constructor")))
cons <- mapM (elabCon cnameinfo syn n codata (getRetTy cty) ckind) dcons
ttag <- getName
ctxt <- getContext
let params = findParams n (normalise ctxt [] cty) (map snd cons)
logElab 2 $ "Parameters : " ++ show params
addParamConstraints fc params cty cons
i <- getIState
-- TI contains information about mutually declared types - this will
-- be updated when the mutual block is complete
putIState (i { idris_datatypes =
addDef n (TI (map fst cons) codata opts params [n]
(any linearArg (map snd cons)))
(idris_datatypes i) })
addIBC (IBCDef n)
addIBC (IBCData n)
checkDocs fc argDocs t
doc' <- elabDocTerms info doc
argDocs' <- mapM (\(n, d) -> do d' <- elabDocTerms info d
return (n, d')) argDocs
addDocStr n doc' argDocs'
addIBC (IBCDoc n)
let metainf = DataMI params
addIBC (IBCMetaInformation n metainf)
-- TMP HACK! Make this a data option
updateContext (addDatatype (Data n ttag cty unique cons))
updateContext (setMetaInformation n metainf)
mapM_ totcheck (zip (repeat fc) (map fst cons))
-- mapM_ (checkPositive n) cons
-- if there's exactly one constructor,
-- mark both the type and the constructor as detaggable
case cons of
[(cn,ct)] -> setDetaggable cn >> setDetaggable n
>> addIBC (IBCOpt cn) >> addIBC (IBCOpt n)
_ -> return ()
-- create a rewriting lemma
when (n /= sUN "=") $
elabRewriteLemma info n cty
-- Emit highlighting info
sendHighlighting $ S.fromList $ [(FC' nfc, AnnName n Nothing Nothing Nothing)] ++
map (\(_, _, n, nfc, _, _, _) ->
(FC' nfc, AnnName n Nothing Nothing Nothing))
dcons
where
checkDefinedAs fc n t i
= let defined = tclift $ tfail (At fc (AlreadyDefined n))
ctxt = tt_ctxt i in
case lookupDef n ctxt of
[] -> return ()
[TyDecl _ ty] ->
case converts ctxt [] t ty of
OK () -> case lookupCtxtExact n (idris_datatypes i) of
Nothing -> return ()
_ -> defined
_ -> defined
_ -> defined
cname (_, _, n, _, _, _, _) = n
-- Abuse of ElabInfo.
-- TODO Contemplate whether the ElabInfo type needs modification.
cinfo :: ElabInfo -> [Name] -> ElabInfo
cinfo info ds
= let newps = params info
dsParams = map (\n -> (n, [])) ds
newb = addAlist dsParams (inblock info) in
info { params = newps,
inblock = newb,
liftname = id -- Is this appropriate?
}
elabCon :: ElabInfo -> SyntaxInfo -> Name -> Bool ->
Type -> -- for unique kind checking
Type -> -- data type's kind
(Docstring (Either Err PTerm), [(Name, Docstring (Either Err PTerm))], Name, FC, PTerm, FC, [Name]) ->
Idris (Name, Type)
elabCon info syn tn codata expkind dkind (doc, argDocs, n, nfc, t_in, fc, forcenames)
= do checkUndefined fc n
when (implicitable (nsroot n)) $ warnLC fc n
logElab 2 $ show fc ++ ":Constructor " ++ show n ++ " : " ++ show t_in
(cty, ckind, t, inacc) <- buildType info syn fc [Constructor] n (if codata then mkLazy t_in else t_in)
ctxt <- getContext
let cty' = normalise ctxt [] cty
checkUniqueKind ckind expkind
-- Check that the constructor type is, in fact, a part of the family being defined
tyIs n cty'
-- Need to calculate forceability from the non-normalised type,
-- because we might not be able to export the definitions which
-- we're normalising which changes the forceability status!
let force = if tn == sUN "Delayed"
then [] -- TMP HACK! Totality checker needs this info
else forceArgs ctxt tn cty
logElab 5 $ show fc ++ ":Constructor " ++ show n ++ " elaborated : " ++ show t
logElab 5 $ "Inaccessible args: " ++ show inacc
logElab 5 $ "Forceable args: " ++ show force
logElab 2 $ "---> " ++ show n ++ " : " ++ show cty
-- Add to the context (this is temporary, so that later constructors
-- can be indexed by it)
updateContext (addTyDecl n (DCon 0 0 False) cty)
addIBC (IBCDef n)
checkDocs fc argDocs t
doc' <- elabDocTerms info doc
argDocs' <- mapM (\(n, d) -> do d' <- elabDocTerms info d
return (n, d')) argDocs
addDocStr n doc' argDocs'
addIBC (IBCDoc n)
fputState (opt_inaccessible . ist_optimisation n) inacc
fputState (opt_forceable . ist_optimisation n) force
addIBC (IBCOpt n)
return (n, cty)
where
tyIs con (Bind n b sc) = tyIs con (substV (P Bound n Erased) sc)
tyIs con t | (P Bound n' _, _) <- unApply t
= if n' /= tn then
tclift $ tfail (At fc (Elaborating "constructor " con Nothing
(Msg ("Type level variable " ++ show n' ++ " is not " ++ show tn))))
else return ()
tyIs con t | (P _ n' _, _) <- unApply t
= if n' /= tn then tclift $ tfail (At fc (Elaborating "constructor " con Nothing (Msg (show n' ++ " is not " ++ show tn))))
else return ()
tyIs con t = tclift $ tfail (At fc (Elaborating "constructor " con Nothing (Msg (show t ++ " is not " ++ show tn))))
mkLazy (PPi pl n nfc ty sc)
= let ty' = if getTyName ty
then PApp fc (PRef fc [] (sUN "Delayed"))
[pexp (PRef fc [] (sUN "Infinite")),
pexp ty]
else ty in
PPi pl n nfc ty' (mkLazy sc)
mkLazy t = t
getTyName (PApp _ (PRef _ _ n) _) = n == nsroot tn
getTyName (PRef _ _ n) = n == nsroot tn
getTyName _ = False
-- if the constructor is a UniqueType, the datatype must be too
-- (AnyType is fine, since that is checked for uniqueness too)
-- if hte contructor is AnyType, the datatype must be at least AnyType
checkUniqueKind (UType NullType) (UType NullType) = return ()
checkUniqueKind (UType NullType) _
= tclift $ tfail (At fc (UniqueKindError NullType n))
checkUniqueKind (UType UniqueType) (UType UniqueType) = return ()
checkUniqueKind (UType UniqueType) (UType AllTypes) = return ()
checkUniqueKind (UType UniqueType) _
= tclift $ tfail (At fc (UniqueKindError UniqueType n))
checkUniqueKind (UType AllTypes) (UType AllTypes) = return ()
checkUniqueKind (UType AllTypes) (UType UniqueType) = return ()
checkUniqueKind (UType AllTypes) _
= tclift $ tfail (At fc (UniqueKindError AllTypes n))
checkUniqueKind _ _ = return ()
forceArgs :: Context -> Name -> Type -> [Int]
forceArgs ctxt tn ty = forceFrom 0 ty
where
-- for each argument, substitute in MN pos "FF"
-- then when we look at the return type, if we see MN pos name
-- constructor guarded, then 'pos' is a forceable position
forceFrom :: Int -> Type -> [Int]
forceFrom i (Bind n (Pi _ _ _ _) sc)
= forceFrom (i + 1) (substV (P Ref (sMN i "FF") Erased) sc)
forceFrom i sc
-- Go under the top level type application
-- We risk affecting erasure of more complex indices, so we'll only
-- mark something forced if *everything* which appears in an index
-- is forceable
-- (FIXME: Actually the real risk is if we erase something a programmer
-- definitely wants, which is particularly the case with 'views'.
-- So perhaps we need a way of marking that in the source?)
| (P _ ty _, args) <- unApply sc,
ty == tn -- Must be the right top level type!
= if null (concatMap (findNonForcePos True) args)
then nub (concatMap findForcePos args)
else []
forceFrom i sc = []
findForcePos (P _ (MN i ff) _)
| ff == txt "FF" = [i]
-- Only look under constructors in applications
findForcePos ap@(App _ f a)
| (P _ con _, args) <- unApply ap,
isDConName con ctxt
= nub $ concatMap findForcePos args
findForcePos _ = []
findNonForcePos fok (P _ (MN i ff) _)
| ff == txt "FF" = if fok then [] else [i]
-- Look under non-constructors in applications for things which can't
-- be forced
findNonForcePos fok ap@(App _ f a)
| (P _ con _, args) <- unApply ap
= nub $ concatMap (findNonForcePos (fok && isConName con ctxt)) args
findNonForcePos _ _ = []
addParamConstraints :: FC -> [Int] -> Type -> [(Name, Type)] -> Idris ()
addParamConstraints fc ps cty cons
= case getRetTy cty of
TType cvar -> mapM_ (addConConstraint ps cvar)
(map getParamNames cons)
_ -> return ()
where
getParamNames (n, ty) = (ty, getPs ty)
getPs (Bind n (Pi _ _ _ _) sc)
= getPs (substV (P Ref n Erased) sc)
getPs t | (f, args) <- unApply t
= paramArgs 0 args
paramArgs i (P _ n _ : args) | i `elem` ps = n : paramArgs (i + 1) args
paramArgs i (_ : args) = paramArgs (i + 1) args
paramArgs i [] = []
addConConstraint ps cvar (ty, pnames) = constraintTy ty
where
constraintTy (Bind n (Pi _ _ ty _) sc)
= case getRetTy ty of
TType avar -> do tit <- typeInType
when (not tit) $ do
ctxt <- getContext
let tv = next_tvar ctxt
let con = if n `elem` pnames
then ULE avar cvar
else ULT avar cvar
addConstraints fc (tv, [con])
addIBC (IBCConstraint fc con)
_ -> return ()
constraintTy t = return ()
| kojiromike/Idris-dev | src/Idris/Elab/Data.hs | bsd-3-clause | 13,166 | 0 | 21 | 4,660 | 4,072 | 2,022 | 2,050 | 223 | 20 |
{-# LINE 1 "Database/MySQL/Base/Types.hsc" #-}
{-# LANGUAGE DeriveDataTypeable, EmptyDataDecls, ForeignFunctionInterface #-}
{-# LINE 2 "Database/MySQL/Base/Types.hsc" #-}
-- |
-- Module: Database.MySQL.Base.C
-- Copyright: (c) 2011 MailRank, Inc.
-- License: BSD3
-- Maintainer: Bryan O'Sullivan <[email protected]>
-- Stability: experimental
-- Portability: portable
--
-- Types for working with the direct bindings to the C @mysqlclient@
-- API.
module Database.MySQL.Base.Types
(
-- * Types
-- * High-level types
Type(..)
, Seconds
, Protocol(..)
, Option(..)
, Field(..)
, FieldFlag
, FieldFlags
-- * Low-level types
, MYSQL
, MYSQL_RES
, MYSQL_ROW
, MYSQL_ROWS
, MYSQL_ROW_OFFSET
, MyBool
-- * Field flags
, hasAllFlags
, flagNotNull
, flagPrimaryKey
, flagUniqueKey
, flagMultipleKey
, flagUnsigned
, flagZeroFill
, flagBinary
, flagAutoIncrement
, flagNumeric
, flagNoDefaultValue
-- * Connect flags
, toConnectFlag
) where
{-# LINE 49 "Database/MySQL/Base/Types.hsc" #-}
import Control.Applicative ((<$>), (<*>), pure)
import Data.Bits ((.|.), (.&.))
import Data.ByteString hiding (intercalate)
import Data.ByteString.Internal (create, memcpy)
import Data.List (intercalate)
import Data.Maybe (catMaybes)
import Data.Monoid (Monoid(..))
import Data.Typeable (Typeable)
import Data.Word (Word, Word8)
import Foreign.C.Types (CChar, CInt, CUInt, CULong)
import Foreign.Ptr (Ptr)
import Foreign.Storable (Storable(..), peekByteOff)
import qualified Data.IntMap as IntMap
data MYSQL
data MYSQL_RES
data MYSQL_ROWS
type MYSQL_ROW = Ptr (Ptr CChar)
type MYSQL_ROW_OFFSET = Ptr MYSQL_ROWS
type MyBool = CChar
-- | Column types supported by MySQL.
data Type = Decimal
| Tiny
| Short
| Long
| Float
| Double
| Null
| Timestamp
| LongLong
| Int24
| Date
| Time
| DateTime
| Year
| NewDate
| VarChar
| Bit
| NewDecimal
| Enum
| Set
| TinyBlob
| MediumBlob
| LongBlob
| Blob
| VarString
| String
| Geometry
deriving (Enum, Eq, Show, Typeable)
toType :: CInt -> Type
toType v = IntMap.findWithDefault oops (fromIntegral v) typeMap
where
oops = error $ "Database.MySQL: unknown field type " ++ show v
typeMap = IntMap.fromList [
((0), Decimal),
{-# LINE 107 "Database/MySQL/Base/Types.hsc" #-}
((1), Tiny),
{-# LINE 108 "Database/MySQL/Base/Types.hsc" #-}
((2), Short),
{-# LINE 109 "Database/MySQL/Base/Types.hsc" #-}
((3), Long),
{-# LINE 110 "Database/MySQL/Base/Types.hsc" #-}
((4), Float),
{-# LINE 111 "Database/MySQL/Base/Types.hsc" #-}
((5), Double),
{-# LINE 112 "Database/MySQL/Base/Types.hsc" #-}
((6), Null),
{-# LINE 113 "Database/MySQL/Base/Types.hsc" #-}
((7), Timestamp),
{-# LINE 114 "Database/MySQL/Base/Types.hsc" #-}
((8), LongLong),
{-# LINE 115 "Database/MySQL/Base/Types.hsc" #-}
((10), Date),
{-# LINE 116 "Database/MySQL/Base/Types.hsc" #-}
((11), Time),
{-# LINE 117 "Database/MySQL/Base/Types.hsc" #-}
((12), DateTime),
{-# LINE 118 "Database/MySQL/Base/Types.hsc" #-}
((13), Year),
{-# LINE 119 "Database/MySQL/Base/Types.hsc" #-}
((14), NewDate),
{-# LINE 120 "Database/MySQL/Base/Types.hsc" #-}
((15), VarChar),
{-# LINE 121 "Database/MySQL/Base/Types.hsc" #-}
((16), Bit),
{-# LINE 122 "Database/MySQL/Base/Types.hsc" #-}
((246), NewDecimal),
{-# LINE 123 "Database/MySQL/Base/Types.hsc" #-}
((247), Enum),
{-# LINE 124 "Database/MySQL/Base/Types.hsc" #-}
((248), Set),
{-# LINE 125 "Database/MySQL/Base/Types.hsc" #-}
((249), TinyBlob),
{-# LINE 126 "Database/MySQL/Base/Types.hsc" #-}
((250), MediumBlob),
{-# LINE 127 "Database/MySQL/Base/Types.hsc" #-}
((251), LongBlob),
{-# LINE 128 "Database/MySQL/Base/Types.hsc" #-}
((252), Blob),
{-# LINE 129 "Database/MySQL/Base/Types.hsc" #-}
((253), VarString),
{-# LINE 130 "Database/MySQL/Base/Types.hsc" #-}
((254), String),
{-# LINE 131 "Database/MySQL/Base/Types.hsc" #-}
((255), Geometry)
{-# LINE 132 "Database/MySQL/Base/Types.hsc" #-}
]
-- | A description of a field (column) of a table.
data Field = Field {
fieldName :: ByteString -- ^ Name of column.
, fieldOrigName :: ByteString -- ^ Original column name, if an alias.
, fieldTable :: ByteString -- ^ Table of column, if column was a field.
, fieldOrigTable :: ByteString -- ^ Original table name, if table was an alias.
, fieldDB :: ByteString -- ^ Database for table.
, fieldCatalog :: ByteString -- ^ Catalog for table.
, fieldDefault :: Maybe ByteString -- ^ Default value.
, fieldLength :: Word -- ^ Width of column (create length).
, fieldMaxLength :: Word -- ^ Maximum width for selected set.
, fieldFlags :: FieldFlags -- ^ Div flags.
, fieldDecimals :: Word -- ^ Number of decimals in field.
, fieldCharSet :: Word -- ^ Character set number.
, fieldType :: Type
} deriving (Eq, Show, Typeable)
newtype FieldFlags = FieldFlags CUInt
deriving (Eq, Typeable)
instance Show FieldFlags where
show f = '[' : z ++ "]"
where z = intercalate "," . catMaybes $ [
flagNotNull ??? "flagNotNull"
, flagPrimaryKey ??? "flagPrimaryKey"
, flagUniqueKey ??? "flagUniqueKey"
, flagMultipleKey ??? "flagMultipleKey"
, flagUnsigned ??? "flagUnsigned"
, flagZeroFill ??? "flagZeroFill"
, flagBinary ??? "flagBinary"
, flagAutoIncrement ??? "flagAutoIncrement"
, flagNumeric ??? "flagNumeric"
, flagNoDefaultValue ??? "flagNoDefaultValue"
]
flag ??? name | f `hasAllFlags` flag = Just name
| otherwise = Nothing
type FieldFlag = FieldFlags
instance Monoid FieldFlags where
mempty = FieldFlags 0
{-# INLINE mempty #-}
mappend (FieldFlags a) (FieldFlags b) = FieldFlags (a .|. b)
{-# INLINE mappend #-}
flagNotNull, flagPrimaryKey, flagUniqueKey, flagMultipleKey :: FieldFlag
flagNotNull = FieldFlags 1
{-# LINE 181 "Database/MySQL/Base/Types.hsc" #-}
flagPrimaryKey = FieldFlags 2
{-# LINE 182 "Database/MySQL/Base/Types.hsc" #-}
flagUniqueKey = FieldFlags 4
{-# LINE 183 "Database/MySQL/Base/Types.hsc" #-}
flagMultipleKey = FieldFlags 8
{-# LINE 184 "Database/MySQL/Base/Types.hsc" #-}
flagUnsigned, flagZeroFill, flagBinary, flagAutoIncrement :: FieldFlag
flagUnsigned = FieldFlags 32
{-# LINE 187 "Database/MySQL/Base/Types.hsc" #-}
flagZeroFill = FieldFlags 64
{-# LINE 188 "Database/MySQL/Base/Types.hsc" #-}
flagBinary = FieldFlags 128
{-# LINE 189 "Database/MySQL/Base/Types.hsc" #-}
flagAutoIncrement = FieldFlags 512
{-# LINE 190 "Database/MySQL/Base/Types.hsc" #-}
flagNumeric, flagNoDefaultValue :: FieldFlag
flagNumeric = FieldFlags 32768
{-# LINE 193 "Database/MySQL/Base/Types.hsc" #-}
flagNoDefaultValue = FieldFlags 4096
{-# LINE 194 "Database/MySQL/Base/Types.hsc" #-}
hasAllFlags :: FieldFlags -> FieldFlags -> Bool
FieldFlags a `hasAllFlags` FieldFlags b = a .&. b == b
{-# INLINE hasAllFlags #-}
peekField :: Ptr Field -> IO Field
peekField ptr = do
flags <- FieldFlags <$> ((\hsc_ptr -> peekByteOff hsc_ptr 100)) ptr
{-# LINE 202 "Database/MySQL/Base/Types.hsc" #-}
Field
<$> peekS (((\hsc_ptr -> peekByteOff hsc_ptr 0))) (((\hsc_ptr -> peekByteOff hsc_ptr 72)))
{-# LINE 204 "Database/MySQL/Base/Types.hsc" #-}
<*> peekS (((\hsc_ptr -> peekByteOff hsc_ptr 8))) (((\hsc_ptr -> peekByteOff hsc_ptr 76)))
{-# LINE 205 "Database/MySQL/Base/Types.hsc" #-}
<*> peekS (((\hsc_ptr -> peekByteOff hsc_ptr 16))) (((\hsc_ptr -> peekByteOff hsc_ptr 80)))
{-# LINE 206 "Database/MySQL/Base/Types.hsc" #-}
<*> peekS (((\hsc_ptr -> peekByteOff hsc_ptr 24))) (((\hsc_ptr -> peekByteOff hsc_ptr 84)))
{-# LINE 207 "Database/MySQL/Base/Types.hsc" #-}
<*> peekS (((\hsc_ptr -> peekByteOff hsc_ptr 32))) (((\hsc_ptr -> peekByteOff hsc_ptr 88)))
{-# LINE 208 "Database/MySQL/Base/Types.hsc" #-}
<*> peekS (((\hsc_ptr -> peekByteOff hsc_ptr 40))) (((\hsc_ptr -> peekByteOff hsc_ptr 92)))
{-# LINE 209 "Database/MySQL/Base/Types.hsc" #-}
<*> (if flags `hasAllFlags` flagNoDefaultValue
then pure Nothing
else Just <$> peekS (((\hsc_ptr -> peekByteOff hsc_ptr 48))) (((\hsc_ptr -> peekByteOff hsc_ptr 96))))
{-# LINE 212 "Database/MySQL/Base/Types.hsc" #-}
<*> (uint <$> ((\hsc_ptr -> peekByteOff hsc_ptr 56)) ptr)
{-# LINE 213 "Database/MySQL/Base/Types.hsc" #-}
<*> (uint <$> ((\hsc_ptr -> peekByteOff hsc_ptr 64)) ptr)
{-# LINE 214 "Database/MySQL/Base/Types.hsc" #-}
<*> pure flags
<*> (uint <$> ((\hsc_ptr -> peekByteOff hsc_ptr 104)) ptr)
{-# LINE 216 "Database/MySQL/Base/Types.hsc" #-}
<*> (uint <$> ((\hsc_ptr -> peekByteOff hsc_ptr 108)) ptr)
{-# LINE 217 "Database/MySQL/Base/Types.hsc" #-}
<*> (toType <$> ((\hsc_ptr -> peekByteOff hsc_ptr 112)) ptr)
{-# LINE 218 "Database/MySQL/Base/Types.hsc" #-}
where
uint = fromIntegral :: CUInt -> Word
peekS :: (Ptr Field -> IO (Ptr Word8)) -> (Ptr Field -> IO CUInt)
-> IO ByteString
peekS peekPtr peekLen = do
p <- peekPtr ptr
l <- peekLen ptr
create (fromIntegral l) $ \d -> memcpy d p (fromIntegral l)
instance Storable Field where
sizeOf _ = (128)
{-# LINE 229 "Database/MySQL/Base/Types.hsc" #-}
alignment _ = alignment (undefined :: Ptr CChar)
peek = peekField
type Seconds = Word
data Protocol = TCP
| Socket
| Pipe
| Memory
deriving (Eq, Read, Show, Enum, Typeable)
data Option =
-- Options accepted by mysq_options.
ConnectTimeout Seconds
| Compress
| NamedPipe
| InitCommand ByteString
| ReadDefaultFile FilePath
| ReadDefaultGroup ByteString
| CharsetDir FilePath
| CharsetName String
| LocalInFile Bool
| Protocol Protocol
| SharedMemoryBaseName ByteString
| ReadTimeout Seconds
| WriteTimeout Seconds
| UseRemoteConnection
| UseEmbeddedConnection
| GuessConnection
| ClientIP ByteString
| SecureAuth Bool
| ReportDataTruncation Bool
| Reconnect Bool
| SSLVerifyServerCert Bool
-- Flags accepted by mysql_real_connect.
| FoundRows
| IgnoreSIGPIPE
| IgnoreSpace
| Interactive
| LocalFiles
| MultiResults
| MultiStatements
| NoSchema
deriving (Eq, Read, Show, Typeable)
toConnectFlag :: Option -> CULong
toConnectFlag Compress = 32
{-# LINE 276 "Database/MySQL/Base/Types.hsc" #-}
toConnectFlag FoundRows = 2
{-# LINE 277 "Database/MySQL/Base/Types.hsc" #-}
toConnectFlag IgnoreSIGPIPE = 4096
{-# LINE 278 "Database/MySQL/Base/Types.hsc" #-}
toConnectFlag IgnoreSpace = 256
{-# LINE 279 "Database/MySQL/Base/Types.hsc" #-}
toConnectFlag Interactive = 1024
{-# LINE 280 "Database/MySQL/Base/Types.hsc" #-}
toConnectFlag LocalFiles = 128
{-# LINE 281 "Database/MySQL/Base/Types.hsc" #-}
toConnectFlag MultiResults = 131072
{-# LINE 282 "Database/MySQL/Base/Types.hsc" #-}
toConnectFlag MultiStatements = 65536
{-# LINE 283 "Database/MySQL/Base/Types.hsc" #-}
toConnectFlag NoSchema = 16
{-# LINE 284 "Database/MySQL/Base/Types.hsc" #-}
toConnectFlag _ = 0
| lhuang7/mysql | dist/dist-sandbox-bd9d9ce/build/Database/MySQL/Base/Types.hs | bsd-3-clause | 12,188 | 56 | 13 | 3,229 | 2,310 | 1,403 | 907 | -1 | -1 |
{-# LANGUAGE CPP, NondecreasingIndentation #-}
{-# OPTIONS -fno-warn-incomplete-patterns -optc-DNON_POSIX_SOURCE #-}
-----------------------------------------------------------------------------
--
-- GHC Driver program
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module Main (main) where
-- The official GHC API
import qualified GHC
import GHC ( -- DynFlags(..), HscTarget(..),
-- GhcMode(..), GhcLink(..),
Ghc, GhcMonad(..),
LoadHowMuch(..) )
import CmdLineParser
-- Implementations of the various modes (--show-iface, mkdependHS. etc.)
import LoadIface ( showIface )
import HscMain ( newHscEnv )
import DriverPipeline ( oneShot, compileFile )
import DriverMkDepend ( doMkDependHS )
#ifdef GHCI
import InteractiveUI ( interactiveUI, ghciWelcomeMsg, defaultGhciSettings )
#endif
-- Various other random stuff that we need
import Config
import Constants
import HscTypes
import Packages ( dumpPackages, simpleDumpPackages )
import DriverPhases
import BasicTypes ( failed )
import StaticFlags
import DynFlags
import ErrUtils
import FastString
import Outputable
import SrcLoc
import Util
import Panic
import MonadUtils ( liftIO )
-- Imports for --abi-hash
import LoadIface ( loadUserInterface )
import Module ( mkModuleName )
import Finder ( findImportedModule, cannotFindInterface )
import TcRnMonad ( initIfaceCheck )
import Binary ( openBinMem, put_, fingerprintBinMem )
-- Standard Haskell libraries
import System.IO
import System.Environment
import System.Exit
import System.FilePath
import Control.Monad
import Data.Char
import Data.List
import Data.Maybe
-----------------------------------------------------------------------------
-- ToDo:
-- time commands when run with -v
-- user ways
-- Win32 support: proper signal handling
-- reading the package configuration file is too slow
-- -K<size>
-----------------------------------------------------------------------------
-- GHC's command-line interface
main :: IO ()
main = do
initGCStatistics -- See Note [-Bsymbolic and hooks]
hSetBuffering stdout LineBuffering
hSetBuffering stderr LineBuffering
GHC.defaultErrorHandler defaultFatalMessager defaultFlushOut $ do
-- 1. extract the -B flag from the args
argv0 <- getArgs
let (minusB_args, argv1) = partition ("-B" `isPrefixOf`) argv0
mbMinusB | null minusB_args = Nothing
| otherwise = Just (drop 2 (last minusB_args))
let argv1' = map (mkGeneralLocated "on the commandline") argv1
(argv2, staticFlagWarnings) <- parseStaticFlags argv1'
-- 2. Parse the "mode" flags (--make, --interactive etc.)
(mode, argv3, modeFlagWarnings) <- parseModeFlags argv2
let flagWarnings = staticFlagWarnings ++ modeFlagWarnings
-- If all we want to do is something like showing the version number
-- then do it now, before we start a GHC session etc. This makes
-- getting basic information much more resilient.
-- In particular, if we wait until later before giving the version
-- number then bootstrapping gets confused, as it tries to find out
-- what version of GHC it's using before package.conf exists, so
-- starting the session fails.
case mode of
Left preStartupMode ->
do case preStartupMode of
ShowSupportedExtensions -> showSupportedExtensions
ShowVersion -> showVersion
ShowNumVersion -> putStrLn cProjectVersion
ShowOptions -> showOptions
Right postStartupMode ->
-- start our GHC session
GHC.runGhc mbMinusB $ do
dflags <- GHC.getSessionDynFlags
case postStartupMode of
Left preLoadMode ->
liftIO $ do
case preLoadMode of
ShowInfo -> showInfo dflags
ShowGhcUsage -> showGhcUsage dflags
ShowGhciUsage -> showGhciUsage dflags
PrintWithDynFlags f -> putStrLn (f dflags)
Right postLoadMode ->
main' postLoadMode dflags argv3 flagWarnings
main' :: PostLoadMode -> DynFlags -> [Located String] -> [Located String]
-> Ghc ()
main' postLoadMode dflags0 args flagWarnings = do
-- set the default GhcMode, HscTarget and GhcLink. The HscTarget
-- can be further adjusted on a module by module basis, using only
-- the -fvia-C and -fasm flags. If the default HscTarget is not
-- HscC or HscAsm, -fvia-C and -fasm have no effect.
let dflt_target = hscTarget dflags0
(mode, lang, link)
= case postLoadMode of
DoInteractive -> (CompManager, HscInterpreted, LinkInMemory)
DoEval _ -> (CompManager, HscInterpreted, LinkInMemory)
DoMake -> (CompManager, dflt_target, LinkBinary)
DoMkDependHS -> (MkDepend, dflt_target, LinkBinary)
DoAbiHash -> (OneShot, dflt_target, LinkBinary)
_ -> (OneShot, dflt_target, LinkBinary)
let dflags1 = case lang of
HscInterpreted ->
let platform = targetPlatform dflags0
dflags0a = updateWays $ dflags0 { ways = interpWays }
dflags0b = foldl gopt_set dflags0a
$ concatMap (wayGeneralFlags platform)
interpWays
dflags0c = foldl gopt_unset dflags0b
$ concatMap (wayUnsetGeneralFlags platform)
interpWays
in dflags0c
_ ->
dflags0
dflags2 = dflags1{ ghcMode = mode,
hscTarget = lang,
ghcLink = link,
verbosity = case postLoadMode of
DoEval _ -> 0
_other -> 1
}
-- turn on -fimplicit-import-qualified for GHCi now, so that it
-- can be overriden from the command-line
-- XXX: this should really be in the interactive DynFlags, but
-- we don't set that until later in interactiveUI
dflags3 | DoInteractive <- postLoadMode = imp_qual_enabled
| DoEval _ <- postLoadMode = imp_qual_enabled
| otherwise = dflags2
where imp_qual_enabled = dflags2 `gopt_set` Opt_ImplicitImportQualified
-- The rest of the arguments are "dynamic"
-- Leftover ones are presumably files
(dflags4, fileish_args, dynamicFlagWarnings) <- GHC.parseDynamicFlags dflags3 args
GHC.prettyPrintGhcErrors dflags4 $ do
let flagWarnings' = flagWarnings ++ dynamicFlagWarnings
handleSourceError (\e -> do
GHC.printException e
liftIO $ exitWith (ExitFailure 1)) $ do
liftIO $ handleFlagWarnings dflags4 flagWarnings'
-- make sure we clean up after ourselves
GHC.defaultCleanupHandler dflags4 $ do
liftIO $ showBanner postLoadMode dflags4
let
-- To simplify the handling of filepaths, we normalise all filepaths right
-- away - e.g., for win32 platforms, backslashes are converted
-- into forward slashes.
normal_fileish_paths = map (normalise . unLoc) fileish_args
(srcs, objs) = partition_args normal_fileish_paths [] []
dflags5 = dflags4 { ldInputs = map (FileOption "") objs
++ ldInputs dflags4 }
-- we've finished manipulating the DynFlags, update the session
_ <- GHC.setSessionDynFlags dflags5
dflags6 <- GHC.getSessionDynFlags
hsc_env <- GHC.getSession
---------------- Display configuration -----------
case verbosity dflags6 of
v | v == 4 -> liftIO $ simpleDumpPackages dflags6
| v >= 5 -> liftIO $ dumpPackages dflags6
| otherwise -> return ()
when (verbosity dflags6 >= 3) $ do
liftIO $ hPutStrLn stderr ("Hsc static flags: " ++ unwords staticFlags)
---------------- Final sanity checking -----------
liftIO $ checkOptions postLoadMode dflags6 srcs objs
---------------- Do the business -----------
handleSourceError (\e -> do
GHC.printException e
liftIO $ exitWith (ExitFailure 1)) $ do
case postLoadMode of
ShowInterface f -> liftIO $ doShowIface dflags6 f
DoMake -> doMake srcs
DoMkDependHS -> doMkDependHS (map fst srcs)
StopBefore p -> liftIO (oneShot hsc_env p srcs)
DoInteractive -> ghciUI srcs Nothing
DoEval exprs -> ghciUI srcs $ Just $ reverse exprs
DoAbiHash -> abiHash srcs
liftIO $ dumpFinalStats dflags6
ghciUI :: [(FilePath, Maybe Phase)] -> Maybe [String] -> Ghc ()
#ifndef GHCI
ghciUI _ _ = throwGhcException (CmdLineError "not built for interactive use")
#else
ghciUI = interactiveUI defaultGhciSettings
#endif
-- -----------------------------------------------------------------------------
-- Splitting arguments into source files and object files. This is where we
-- interpret the -x <suffix> option, and attach a (Maybe Phase) to each source
-- file indicating the phase specified by the -x option in force, if any.
partition_args :: [String] -> [(String, Maybe Phase)] -> [String]
-> ([(String, Maybe Phase)], [String])
partition_args [] srcs objs = (reverse srcs, reverse objs)
partition_args ("-x":suff:args) srcs objs
| "none" <- suff = partition_args args srcs objs
| StopLn <- phase = partition_args args srcs (slurp ++ objs)
| otherwise = partition_args rest (these_srcs ++ srcs) objs
where phase = startPhase suff
(slurp,rest) = break (== "-x") args
these_srcs = zip slurp (repeat (Just phase))
partition_args (arg:args) srcs objs
| looks_like_an_input arg = partition_args args ((arg,Nothing):srcs) objs
| otherwise = partition_args args srcs (arg:objs)
{-
We split out the object files (.o, .dll) and add them
to ldInputs for use by the linker.
The following things should be considered compilation manager inputs:
- haskell source files (strings ending in .hs, .lhs or other
haskellish extension),
- module names (not forgetting hierarchical module names),
- things beginning with '-' are flags that were not recognised by
the flag parser, and we want them to generate errors later in
checkOptions, so we class them as source files (#5921)
- and finally we consider everything not containing a '.' to be
a comp manager input, as shorthand for a .hs or .lhs filename.
Everything else is considered to be a linker object, and passed
straight through to the linker.
-}
looks_like_an_input :: String -> Bool
looks_like_an_input m = isSourceFilename m
|| looksLikeModuleName m
|| "-" `isPrefixOf` m
|| '.' `notElem` m
-- -----------------------------------------------------------------------------
-- Option sanity checks
-- | Ensure sanity of options.
--
-- Throws 'UsageError' or 'CmdLineError' if not.
checkOptions :: PostLoadMode -> DynFlags -> [(String,Maybe Phase)] -> [String] -> IO ()
-- Final sanity checking before kicking off a compilation (pipeline).
checkOptions mode dflags srcs objs = do
-- Complain about any unknown flags
let unknown_opts = [ f | (f@('-':_), _) <- srcs ]
when (notNull unknown_opts) (unknownFlagsErr unknown_opts)
when (notNull (filter wayRTSOnly (ways dflags))
&& isInterpretiveMode mode) $
hPutStrLn stderr ("Warning: -debug, -threaded and -ticky are ignored by GHCi")
-- -prof and --interactive are not a good combination
when ((filter (not . wayRTSOnly) (ways dflags) /= interpWays)
&& isInterpretiveMode mode) $
do throwGhcException (UsageError
"--interactive can't be used with -prof or -unreg.")
-- -ohi sanity check
if (isJust (outputHi dflags) &&
(isCompManagerMode mode || srcs `lengthExceeds` 1))
then throwGhcException (UsageError "-ohi can only be used when compiling a single source file")
else do
-- -o sanity checking
if (srcs `lengthExceeds` 1 && isJust (outputFile dflags)
&& not (isLinkMode mode))
then throwGhcException (UsageError "can't apply -o to multiple source files")
else do
let not_linking = not (isLinkMode mode) || isNoLink (ghcLink dflags)
when (not_linking && not (null objs)) $
hPutStrLn stderr ("Warning: the following files would be used as linker inputs, but linking is not being done: " ++ unwords objs)
-- Check that there are some input files
-- (except in the interactive case)
if null srcs && (null objs || not_linking) && needsInputsMode mode
then throwGhcException (UsageError "no input files")
else do
-- Verify that output files point somewhere sensible.
verifyOutputFiles dflags
-- Compiler output options
-- called to verify that the output files & directories
-- point somewhere valid.
--
-- The assumption is that the directory portion of these output
-- options will have to exist by the time 'verifyOutputFiles'
-- is invoked.
--
verifyOutputFiles :: DynFlags -> IO ()
verifyOutputFiles dflags = do
-- not -odir: we create the directory for -odir if it doesn't exist (#2278).
let ofile = outputFile dflags
when (isJust ofile) $ do
let fn = fromJust ofile
flg <- doesDirNameExist fn
when (not flg) (nonExistentDir "-o" fn)
let ohi = outputHi dflags
when (isJust ohi) $ do
let hi = fromJust ohi
flg <- doesDirNameExist hi
when (not flg) (nonExistentDir "-ohi" hi)
where
nonExistentDir flg dir =
throwGhcException (CmdLineError ("error: directory portion of " ++
show dir ++ " does not exist (used with " ++
show flg ++ " option.)"))
-----------------------------------------------------------------------------
-- GHC modes of operation
type Mode = Either PreStartupMode PostStartupMode
type PostStartupMode = Either PreLoadMode PostLoadMode
data PreStartupMode
= ShowVersion -- ghc -V/--version
| ShowNumVersion -- ghc --numeric-version
| ShowSupportedExtensions -- ghc --supported-extensions
| ShowOptions -- ghc --show-options
showVersionMode, showNumVersionMode, showSupportedExtensionsMode, showOptionsMode :: Mode
showVersionMode = mkPreStartupMode ShowVersion
showNumVersionMode = mkPreStartupMode ShowNumVersion
showSupportedExtensionsMode = mkPreStartupMode ShowSupportedExtensions
showOptionsMode = mkPreStartupMode ShowOptions
mkPreStartupMode :: PreStartupMode -> Mode
mkPreStartupMode = Left
isShowVersionMode :: Mode -> Bool
isShowVersionMode (Left ShowVersion) = True
isShowVersionMode _ = False
isShowNumVersionMode :: Mode -> Bool
isShowNumVersionMode (Left ShowNumVersion) = True
isShowNumVersionMode _ = False
data PreLoadMode
= ShowGhcUsage -- ghc -?
| ShowGhciUsage -- ghci -?
| ShowInfo -- ghc --info
| PrintWithDynFlags (DynFlags -> String) -- ghc --print-foo
showGhcUsageMode, showGhciUsageMode, showInfoMode :: Mode
showGhcUsageMode = mkPreLoadMode ShowGhcUsage
showGhciUsageMode = mkPreLoadMode ShowGhciUsage
showInfoMode = mkPreLoadMode ShowInfo
printSetting :: String -> Mode
printSetting k = mkPreLoadMode (PrintWithDynFlags f)
where f dflags = fromMaybe (panic ("Setting not found: " ++ show k))
$ lookup k (compilerInfo dflags)
mkPreLoadMode :: PreLoadMode -> Mode
mkPreLoadMode = Right . Left
isShowGhcUsageMode :: Mode -> Bool
isShowGhcUsageMode (Right (Left ShowGhcUsage)) = True
isShowGhcUsageMode _ = False
isShowGhciUsageMode :: Mode -> Bool
isShowGhciUsageMode (Right (Left ShowGhciUsage)) = True
isShowGhciUsageMode _ = False
data PostLoadMode
= ShowInterface FilePath -- ghc --show-iface
| DoMkDependHS -- ghc -M
| StopBefore Phase -- ghc -E | -C | -S
-- StopBefore StopLn is the default
| DoMake -- ghc --make
| DoInteractive -- ghc --interactive
| DoEval [String] -- ghc -e foo -e bar => DoEval ["bar", "foo"]
| DoAbiHash -- ghc --abi-hash
doMkDependHSMode, doMakeMode, doInteractiveMode, doAbiHashMode :: Mode
doMkDependHSMode = mkPostLoadMode DoMkDependHS
doMakeMode = mkPostLoadMode DoMake
doInteractiveMode = mkPostLoadMode DoInteractive
doAbiHashMode = mkPostLoadMode DoAbiHash
showInterfaceMode :: FilePath -> Mode
showInterfaceMode fp = mkPostLoadMode (ShowInterface fp)
stopBeforeMode :: Phase -> Mode
stopBeforeMode phase = mkPostLoadMode (StopBefore phase)
doEvalMode :: String -> Mode
doEvalMode str = mkPostLoadMode (DoEval [str])
mkPostLoadMode :: PostLoadMode -> Mode
mkPostLoadMode = Right . Right
isDoInteractiveMode :: Mode -> Bool
isDoInteractiveMode (Right (Right DoInteractive)) = True
isDoInteractiveMode _ = False
isStopLnMode :: Mode -> Bool
isStopLnMode (Right (Right (StopBefore StopLn))) = True
isStopLnMode _ = False
isDoMakeMode :: Mode -> Bool
isDoMakeMode (Right (Right DoMake)) = True
isDoMakeMode _ = False
#ifdef GHCI
isInteractiveMode :: PostLoadMode -> Bool
isInteractiveMode DoInteractive = True
isInteractiveMode _ = False
#endif
-- isInterpretiveMode: byte-code compiler involved
isInterpretiveMode :: PostLoadMode -> Bool
isInterpretiveMode DoInteractive = True
isInterpretiveMode (DoEval _) = True
isInterpretiveMode _ = False
needsInputsMode :: PostLoadMode -> Bool
needsInputsMode DoMkDependHS = True
needsInputsMode (StopBefore _) = True
needsInputsMode DoMake = True
needsInputsMode _ = False
-- True if we are going to attempt to link in this mode.
-- (we might not actually link, depending on the GhcLink flag)
isLinkMode :: PostLoadMode -> Bool
isLinkMode (StopBefore StopLn) = True
isLinkMode DoMake = True
isLinkMode DoInteractive = True
isLinkMode (DoEval _) = True
isLinkMode _ = False
isCompManagerMode :: PostLoadMode -> Bool
isCompManagerMode DoMake = True
isCompManagerMode DoInteractive = True
isCompManagerMode (DoEval _) = True
isCompManagerMode _ = False
-- -----------------------------------------------------------------------------
-- Parsing the mode flag
parseModeFlags :: [Located String]
-> IO (Mode,
[Located String],
[Located String])
parseModeFlags args = do
let ((leftover, errs1, warns), (mModeFlag, errs2, flags')) =
runCmdLine (processArgs mode_flags args)
(Nothing, [], [])
mode = case mModeFlag of
Nothing -> doMakeMode
Just (m, _) -> m
errs = errs1 ++ map (mkGeneralLocated "on the commandline") errs2
when (not (null errs)) $ throwGhcException $ errorsToGhcException errs
return (mode, flags' ++ leftover, warns)
type ModeM = CmdLineP (Maybe (Mode, String), [String], [Located String])
-- mode flags sometimes give rise to new DynFlags (eg. -C, see below)
-- so we collect the new ones and return them.
mode_flags :: [Flag ModeM]
mode_flags =
[ ------- help / version ----------------------------------------------
Flag "?" (PassFlag (setMode showGhcUsageMode))
, Flag "-help" (PassFlag (setMode showGhcUsageMode))
, Flag "V" (PassFlag (setMode showVersionMode))
, Flag "-version" (PassFlag (setMode showVersionMode))
, Flag "-numeric-version" (PassFlag (setMode showNumVersionMode))
, Flag "-info" (PassFlag (setMode showInfoMode))
, Flag "-show-options" (PassFlag (setMode showOptionsMode))
, Flag "-supported-languages" (PassFlag (setMode showSupportedExtensionsMode))
, Flag "-supported-extensions" (PassFlag (setMode showSupportedExtensionsMode))
] ++
[ Flag k' (PassFlag (setMode (printSetting k)))
| k <- ["Project version",
"Booter version",
"Stage",
"Build platform",
"Host platform",
"Target platform",
"Have interpreter",
"Object splitting supported",
"Have native code generator",
"Support SMP",
"Unregisterised",
"Tables next to code",
"RTS ways",
"Leading underscore",
"Debug on",
"LibDir",
"Global Package DB",
"C compiler flags",
"Gcc Linker flags",
"Ld Linker flags"],
let k' = "-print-" ++ map (replaceSpace . toLower) k
replaceSpace ' ' = '-'
replaceSpace c = c
] ++
------- interfaces ----------------------------------------------------
[ Flag "-show-iface" (HasArg (\f -> setMode (showInterfaceMode f)
"--show-iface"))
------- primary modes ------------------------------------------------
, Flag "c" (PassFlag (\f -> do setMode (stopBeforeMode StopLn) f
addFlag "-no-link" f))
, Flag "M" (PassFlag (setMode doMkDependHSMode))
, Flag "E" (PassFlag (setMode (stopBeforeMode anyHsc)))
, Flag "C" (PassFlag (setMode (stopBeforeMode HCc)))
, Flag "S" (PassFlag (setMode (stopBeforeMode (As False))))
, Flag "-make" (PassFlag (setMode doMakeMode))
, Flag "-interactive" (PassFlag (setMode doInteractiveMode))
, Flag "-abi-hash" (PassFlag (setMode doAbiHashMode))
, Flag "e" (SepArg (\s -> setMode (doEvalMode s) "-e"))
]
setMode :: Mode -> String -> EwM ModeM ()
setMode newMode newFlag = liftEwM $ do
(mModeFlag, errs, flags') <- getCmdLineState
let (modeFlag', errs') =
case mModeFlag of
Nothing -> ((newMode, newFlag), errs)
Just (oldMode, oldFlag) ->
case (oldMode, newMode) of
-- -c/--make are allowed together, and mean --make -no-link
_ | isStopLnMode oldMode && isDoMakeMode newMode
|| isStopLnMode newMode && isDoMakeMode oldMode ->
((doMakeMode, "--make"), [])
-- If we have both --help and --interactive then we
-- want showGhciUsage
_ | isShowGhcUsageMode oldMode &&
isDoInteractiveMode newMode ->
((showGhciUsageMode, oldFlag), [])
| isShowGhcUsageMode newMode &&
isDoInteractiveMode oldMode ->
((showGhciUsageMode, newFlag), [])
-- Otherwise, --help/--version/--numeric-version always win
| isDominantFlag oldMode -> ((oldMode, oldFlag), [])
| isDominantFlag newMode -> ((newMode, newFlag), [])
-- We need to accumulate eval flags like "-e foo -e bar"
(Right (Right (DoEval esOld)),
Right (Right (DoEval [eNew]))) ->
((Right (Right (DoEval (eNew : esOld))), oldFlag),
errs)
-- Saying e.g. --interactive --interactive is OK
_ | oldFlag == newFlag -> ((oldMode, oldFlag), errs)
-- Otherwise, complain
_ -> let err = flagMismatchErr oldFlag newFlag
in ((oldMode, oldFlag), err : errs)
putCmdLineState (Just modeFlag', errs', flags')
where isDominantFlag f = isShowGhcUsageMode f ||
isShowGhciUsageMode f ||
isShowVersionMode f ||
isShowNumVersionMode f
flagMismatchErr :: String -> String -> String
flagMismatchErr oldFlag newFlag
= "cannot use `" ++ oldFlag ++ "' with `" ++ newFlag ++ "'"
addFlag :: String -> String -> EwM ModeM ()
addFlag s flag = liftEwM $ do
(m, e, flags') <- getCmdLineState
putCmdLineState (m, e, mkGeneralLocated loc s : flags')
where loc = "addFlag by " ++ flag ++ " on the commandline"
-- ----------------------------------------------------------------------------
-- Run --make mode
doMake :: [(String,Maybe Phase)] -> Ghc ()
doMake srcs = do
let (hs_srcs, non_hs_srcs) = partition haskellish srcs
haskellish (f,Nothing) =
looksLikeModuleName f || isHaskellUserSrcFilename f || '.' `notElem` f
haskellish (_,Just phase) =
phase `notElem` [ As True, As False, Cc, Cobjc, Cobjcpp, CmmCpp, Cmm
, StopLn]
hsc_env <- GHC.getSession
-- if we have no haskell sources from which to do a dependency
-- analysis, then just do one-shot compilation and/or linking.
-- This means that "ghc Foo.o Bar.o -o baz" links the program as
-- we expect.
if (null hs_srcs)
then liftIO (oneShot hsc_env StopLn srcs)
else do
o_files <- mapM (\x -> liftIO $ compileFile hsc_env StopLn x)
non_hs_srcs
dflags <- GHC.getSessionDynFlags
let dflags' = dflags { ldInputs = map (FileOption "") o_files
++ ldInputs dflags }
_ <- GHC.setSessionDynFlags dflags'
targets <- mapM (uncurry GHC.guessTarget) hs_srcs
GHC.setTargets targets
ok_flag <- GHC.load LoadAllTargets
when (failed ok_flag) (liftIO $ exitWith (ExitFailure 1))
return ()
-- ---------------------------------------------------------------------------
-- --show-iface mode
doShowIface :: DynFlags -> FilePath -> IO ()
doShowIface dflags file = do
hsc_env <- newHscEnv dflags
showIface hsc_env file
-- ---------------------------------------------------------------------------
-- Various banners and verbosity output.
showBanner :: PostLoadMode -> DynFlags -> IO ()
showBanner _postLoadMode dflags = do
let verb = verbosity dflags
#ifdef GHCI
-- Show the GHCi banner
when (isInteractiveMode _postLoadMode && verb >= 1) $ putStrLn ghciWelcomeMsg
#endif
-- Display details of the configuration in verbose mode
when (verb >= 2) $
do hPutStr stderr "Glasgow Haskell Compiler, Version "
hPutStr stderr cProjectVersion
hPutStr stderr ", stage "
hPutStr stderr cStage
hPutStr stderr " booted by GHC version "
hPutStrLn stderr cBooterVersion
-- We print out a Read-friendly string, but a prettier one than the
-- Show instance gives us
showInfo :: DynFlags -> IO ()
showInfo dflags = do
let sq x = " [" ++ x ++ "\n ]"
putStrLn $ sq $ intercalate "\n ," $ map show $ compilerInfo dflags
showSupportedExtensions :: IO ()
showSupportedExtensions = mapM_ putStrLn supportedLanguagesAndExtensions
showVersion :: IO ()
showVersion = putStrLn (cProjectName ++ ", version " ++ cProjectVersion)
showOptions :: IO ()
showOptions = putStr (unlines availableOptions)
where
availableOptions = map ((:) '-') $
getFlagNames mode_flags ++
getFlagNames flagsDynamic ++
(filterUnwantedStatic . getFlagNames $ flagsStatic) ++
flagsStaticNames
getFlagNames opts = map getFlagName opts
getFlagName (Flag name _) = name
-- this is a hack to get rid of two unwanted entries that get listed
-- as static flags. Hopefully this hack will disappear one day together
-- with static flags
filterUnwantedStatic = filter (\x -> not (x `elem` ["f", "fno-"]))
showGhcUsage :: DynFlags -> IO ()
showGhcUsage = showUsage False
showGhciUsage :: DynFlags -> IO ()
showGhciUsage = showUsage True
showUsage :: Bool -> DynFlags -> IO ()
showUsage ghci dflags = do
let usage_path = if ghci then ghciUsagePath dflags
else ghcUsagePath dflags
usage <- readFile usage_path
dump usage
where
dump "" = return ()
dump ('$':'$':s) = putStr progName >> dump s
dump (c:s) = putChar c >> dump s
dumpFinalStats :: DynFlags -> IO ()
dumpFinalStats dflags =
when (gopt Opt_D_faststring_stats dflags) $ dumpFastStringStats dflags
dumpFastStringStats :: DynFlags -> IO ()
dumpFastStringStats dflags = do
buckets <- getFastStringTable
let (entries, longest, has_z) = countFS 0 0 0 buckets
msg = text "FastString stats:" $$
nest 4 (vcat [text "size: " <+> int (length buckets),
text "entries: " <+> int entries,
text "longest chain: " <+> int longest,
text "has z-encoding: " <+> (has_z `pcntOf` entries)
])
-- we usually get more "has z-encoding" than "z-encoded", because
-- when we z-encode a string it might hash to the exact same string,
-- which will is not counted as "z-encoded". Only strings whose
-- Z-encoding is different from the original string are counted in
-- the "z-encoded" total.
putMsg dflags msg
where
x `pcntOf` y = int ((x * 100) `quot` y) <> char '%'
countFS :: Int -> Int -> Int -> [[FastString]] -> (Int, Int, Int)
countFS entries longest has_z [] = (entries, longest, has_z)
countFS entries longest has_z (b:bs) =
let
len = length b
longest' = max len longest
entries' = entries + len
has_zs = length (filter hasZEncoding b)
in
countFS entries' longest' (has_z + has_zs) bs
-- -----------------------------------------------------------------------------
-- ABI hash support
{-
ghc --abi-hash Data.Foo System.Bar
Generates a combined hash of the ABI for modules Data.Foo and
System.Bar. The modules must already be compiled, and appropriate -i
options may be necessary in order to find the .hi files.
This is used by Cabal for generating the InstalledPackageId for a
package. The InstalledPackageId must change when the visible ABI of
the package chagnes, so during registration Cabal calls ghc --abi-hash
to get a hash of the package's ABI.
-}
abiHash :: [(String, Maybe Phase)] -> Ghc ()
abiHash strs = do
hsc_env <- getSession
let dflags = hsc_dflags hsc_env
liftIO $ do
let find_it str = do
let modname = mkModuleName str
r <- findImportedModule hsc_env modname Nothing
case r of
Found _ m -> return m
_error -> throwGhcException $ CmdLineError $ showSDoc dflags $
cannotFindInterface dflags modname r
mods <- mapM find_it (map fst strs)
let get_iface modl = loadUserInterface False (text "abiHash") modl
ifaces <- initIfaceCheck hsc_env $ mapM get_iface mods
bh <- openBinMem (3*1024) -- just less than a block
put_ bh hiVersion
-- package hashes change when the compiler version changes (for now)
-- see #5328
mapM_ (put_ bh . mi_mod_hash) ifaces
f <- fingerprintBinMem bh
putStrLn (showPpr dflags f)
-- -----------------------------------------------------------------------------
-- Util
unknownFlagsErr :: [String] -> a
unknownFlagsErr fs = throwGhcException $ UsageError $ concatMap oneError fs
where
oneError f =
"unrecognised flag: " ++ f ++ "\n" ++
(case fuzzyMatch f (nub allFlags) of
[] -> ""
suggs -> "did you mean one of:\n" ++ unlines (map (" " ++) suggs))
{- Note [-Bsymbolic and hooks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Bsymbolic is a flag that prevents the binding of references to global
symbols to symbols outside the shared library being compiled (see `man
ld`). When dynamically linking, we don't use -Bsymbolic on the RTS
package: that is because we want hooks to be overridden by the user,
we don't want to constrain them to the RTS package.
Unfortunately this seems to have broken somehow on OS X: as a result,
defaultHooks (in hschooks.c) is not called, which does not initialize
the GC stats. As a result, this breaks things like `:set +s` in GHCi
(#8754). As a hacky workaround, we instead call 'defaultHooks'
directly to initalize the flags in the RTS.
A byproduct of this, I believe, is that hooks are likely broken on OS
X when dynamically linking. But this probably doesn't affect most
people since we're linking GHC dynamically, but most things themselves
link statically.
-}
foreign import ccall safe "initGCStatistics"
initGCStatistics :: IO ()
| frantisekfarka/ghc-dsi | ghc/Main.hs | bsd-3-clause | 33,164 | 0 | 27 | 9,300 | 6,810 | 3,512 | 3,298 | 522 | 14 |
{-@ LIQUID "--totality" @-}
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__
-- LIQUID {- LANGUAGE DeriveDataTypeable, StandaloneDeriving -}
#endif
#if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Map.Base
-- Copyright : (c) Daan Leijen 2002
-- (c) Andriy Palamarchuk 2008
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of maps from keys to values (dictionaries).
--
-- Since many function names (but not the type name) clash with
-- "Prelude" names, this module is usually imported @qualified@, e.g.
--
-- > import Data.Map (Map)
-- > import qualified Data.Map as Map
--
-- The implementation of 'Map' is based on /size balanced/ binary trees (or
-- trees of /bounded balance/) as described by:
--
-- * Stephen Adams, \"/Efficient sets: a balancing act/\",
-- Journal of Functional Programming 3(4):553-562, October 1993,
-- <http://www.swiss.ai.mit.edu/~adams/BB/>.
--
-- * J. Nievergelt and E.M. Reingold,
-- \"/Binary search trees of bounded balance/\",
-- SIAM journal of computing 2(1), March 1973.
--
-- Note that the implementation is /left-biased/ -- the elements of a
-- first argument are always preferred to the second, for example in
-- 'union' or 'insert'.
--
-- Operation comments contain the operation time complexity in
-- the Big-O notation <http://en.wikipedia.org/wiki/Big_O_notation>.
-----------------------------------------------------------------------------
-- [Note: Using INLINABLE]
-- ~~~~~~~~~~~~~~~~~~~~~~~
-- It is crucial to the performance that the functions specialize on the Ord
-- type when possible. GHC 7.0 and higher does this by itself when it sees th
-- unfolding of a function -- that is why all public functions are marked
-- INLINABLE (that exposes the unfolding).
-- [Note: Using INLINE]
-- ~~~~~~~~~~~~~~~~~~~~
-- For other compilers and GHC pre 7.0, we mark some of the functions INLINE.
-- We mark the functions that just navigate down the tree (lookup, insert,
-- delete and similar). That navigation code gets inlined and thus specialized
-- when possible. There is a price to pay -- code growth. The code INLINED is
-- therefore only the tree navigation, all the real work (rebalancing) is not
-- INLINED by using a NOINLINE.
--
-- All methods marked INLINE have to be nonrecursive -- a 'go' function doing
-- the real work is provided.
-- [Note: Type of local 'go' function]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- If the local 'go' function uses an Ord class, it sometimes heap-allocates
-- the Ord dictionary when the 'go' function does not have explicit type.
-- In that case we give 'go' explicit type. But this slightly decrease
-- performance, as the resulting 'go' function can float out to top level.
-- [Note: Local 'go' functions and capturing]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- As opposed to IntMap, when 'go' function captures an argument, increased
-- heap-allocation can occur: sometimes in a polymorphic function, the 'go'
-- floats out of its enclosing function and then it heap-allocates the
-- dictionary and the argument. Maybe it floats out too late and strictness
-- analyzer cannot see that these could be passed on stack.
--
-- For example, change 'member' so that its local 'go' function is not passing
-- argument k and then look at the resulting code for hedgeInt.
-- [Note: Order of constructors]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- The order of constructors of Map matters when considering performance.
-- Currently in GHC 7.0, when type has 2 constructors, a forward conditional
-- jump is made when successfully matching second constructor. Successful match
-- of first constructor results in the forward jump not taken.
-- On GHC 7.0, reordering constructors from Tip | Bin to Bin | Tip
-- improves the benchmark by up to 10% on x86.
module Data.Map.Base (
-- * Map type
Map(..) -- instance Eq,Show,Read
-- * Operators
, (!), (\\)
-- * Query
, null
, size
, member
, notMember
, lookup
, findWithDefault
, lookupLT
, lookupGT
, lookupLE
, lookupGE
-- * Construction
, empty
, singleton
-- ** Insertion
, insert
, insertWith
, insertWithKey
, insertLookupWithKey
-- ** Delete\/Update
, delete
, adjust
, adjustWithKey
, update
, updateWithKey
, updateLookupWithKey
, alter
-- * Combine
-- ** Union
, union
, unionWith
, unionWithKey
, unions
, unionsWith
-- ** Difference
, difference
, differenceWith
, differenceWithKey
-- ** Intersection
, intersection
, intersectionWith
, intersectionWithKey
-- ** Universal combining function
, mergeWithKey
-- * Traversal
-- ** Map
, map
, mapWithKey
-- LIQUID, traverseWithKey
, mapAccum
, mapAccumWithKey
, mapAccumRWithKey
, mapKeys
, mapKeysWith
, mapKeysMonotonic
-- * Folds
, foldr
, foldl
, foldrWithKey
, foldlWithKey
-- ** Strict folds
, foldr'
, foldl'
, foldrWithKey'
, foldlWithKey'
-- * Conversion
, elems
, keys
, assocs
-- LIQUID, keysSet
-- LIQUID, fromSet
-- ** Lists
, toList
, fromList
, fromListWith
, fromListWithKey
-- ** Ordered lists
, toAscList
, toDescList
, fromAscList
, fromAscListWith
, fromAscListWithKey
, fromDistinctAscList
-- * Filter
, filter
, filterWithKey
, partition
, partitionWithKey
, mapMaybe
, mapMaybeWithKey
, mapEither
, mapEitherWithKey
, split
, splitLookup
-- * Submap
, isSubmapOf, isSubmapOfBy
, isProperSubmapOf, isProperSubmapOfBy
-- * Indexed
, lookupIndex
, findIndex
, elemAt
, updateAt
, deleteAt
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, updateMin
, updateMax
, updateMinWithKey
, updateMaxWithKey
, minView
, maxView
, minViewWithKey
, maxViewWithKey
-- * Debugging
, showTree
, showTreeWith
, valid
-- Used by the strict version
, bin
, balance
, balanced
, balanceL
, balanceR
, delta
, join
, merge
, glue
, trim, zoo1, zoo2
, trimLookupLo
, foldlStrict
, MaybeS(..)
, filterGt
, filterLt
) where
import Prelude hiding (lookup,map,filter,foldr,foldl,null)
-- LIQUID import qualified Data.Set.Base as Set
-- LIQUID import Data.StrictPair
import Data.Monoid (Monoid(..))
-- LIQUID import Control.Applicative (Applicative(..), (<$>))
import Data.Traversable (Traversable(traverse))
import qualified Data.Foldable as Foldable
-- import Data.Typeable
import Control.DeepSeq (NFData(rnf))
#if __GLASGOW_HASKELL__
import GHC.Exts ( build )
import Text.Read
import Data.Data
#endif
-- Use macros to define strictness of functions.
-- STRICT_x_OF_y denotes an y-ary function strict in the x-th parameter.
-- We do not use BangPatterns, because they are not in any standard and we
-- want the compilers to be compiled by as many compilers as possible.
#define STRICT_1_OF_2(fn) fn arg _ | arg `seq` False = undefined
#define STRICT_1_OF_3(fn) fn arg _ _ | arg `seq` False = undefined
#define STRICT_2_OF_3(fn) fn _ arg _ | arg `seq` False = undefined
#define STRICT_1_OF_4(fn) fn arg _ _ _ | arg `seq` False = undefined
#define STRICT_2_OF_4(fn) fn _ arg _ _ | arg `seq` False = undefined
{--------------------------------------------------------------------
Operators
--------------------------------------------------------------------}
infixl 9 !,\\ --
-- | /O(log n)/. Find the value at a key.
-- Calls 'error' when the element can not be found.
--
-- > fromList [(5,'a'), (3,'b')] ! 1 Error: element not in the map
-- > fromList [(5,'a'), (3,'b')] ! 5 == 'a'
{-@ Data.Map.Base.! :: (Ord k) => OMap k a -> k -> a @-}
(!) :: Ord k => Map k a -> k -> a
m ! k = find k m
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE (!) #-}
#endif
-- | Same as 'difference'.
{-@ Data.Map.Base.\\ :: Ord k => OMap k a -> OMap k b -> OMap k a @-}
(\\) :: Ord k => Map k a -> Map k b -> Map k a
m1 \\ m2 = difference m1 m2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE (\\) #-}
#endif
{--------------------------------------------------------------------
Size balanced trees.
--------------------------------------------------------------------}
-- | A Map from keys @k@ to values @a@.
-- See Note: Order of constructors
data Map k a = Bin Size k a (Map k a) (Map k a)
| Tip
type Size = Int
{-@ include <Base.hquals> @-}
{-@ data Map [mlen] k a <l :: root:k -> k -> Prop, r :: root:k -> k -> Prop>
= Bin (sz :: Size)
(key :: k)
(value :: a)
(left :: Map <l, r> (k <l key>) a)
(right :: Map <l, r> (k <r key>) a)
| Tip
@-}
{-@ measure mlen :: (Map k a) -> Int
mlen(Tip) = 0
mlen(Bin s k v l r) = 1 + (mlen l) + (mlen r)
@-}
{-@ type SumMLen A B = {v:Nat | v = (mlen A) + (mlen B)} @-}
{-@ invariant {v:Map k a | (mlen v) >= 0} @-}
{-@ mlen :: m:Map k a -> {v:Nat | v = (mlen m)} @-}
mlen :: Map k a -> Int
mlen Tip = 0
mlen (Bin s k v l r) = 1 + mlen l + mlen r
{-@ type OMap k a = Map <{\root v -> v < root}, {\root v -> v > root}> k a @-}
{-@ measure isJustS :: forall a. MaybeS a -> Prop
isJustS (JustS x) = true
isJustS (NothingS) = false
@-}
{-@ measure fromJustS :: forall a. MaybeS a -> a
fromJustS (JustS x) = x
@-}
{-@ measure isBin :: Map k a -> Prop
isBin (Bin sz kx x l r) = true
isBin (Tip) = false
@-}
{-@ invariant {v0: MaybeS {v: a | ((isJustS v0) && (v = (fromJustS v0)))} | true} @-}
{-@ predicate IfDefLe X Y = ((isJustS X) => ((fromJustS X) < Y)) @-}
{-@ predicate IfDefLt X Y = ((isJustS X) => ((fromJustS X) < Y)) @-}
{-@ predicate IfDefGt X Y = ((isJustS X) => (Y < (fromJustS X))) @-}
{-@ predicate RootLt Lo V = ((isBin V) => (IfDefLt Lo (key V))) @-}
{-@ predicate RootGt Hi V = ((isBin V) => (IfDefGt Hi (key V))) @-}
{-@ predicate RootBetween Lo Hi V = ((RootLt Lo V) && (RootGt Hi V)) @-}
{-@ predicate KeyBetween Lo Hi V = ((IfDefLt Lo V) && (IfDefGt Hi V)) @-}
-- LIQUID instance (Ord k) => Monoid (Map k v) where
-- mempty = empty
-- mappend = union
-- mconcat = unions
#if __GLASGOW_HASKELL__
{--------------------------------------------------------------------
A Data instance
--------------------------------------------------------------------}
-- This instance preserves data abstraction at the cost of inefficiency.
-- We omit reflection services for the sake of data abstraction.
-- LIQUID instance (Data k, Data a, Ord k) => Data (Map k a) where
-- LIQUID gfoldl f z m = z fromList `f` toList m
-- LIQUID toConstr _ = error "toConstr"
-- LIQUID gunfold _ _ = error "gunfold"
-- LIQUID dataTypeOf _ = mkNoRepType "Data.Map.Map"
-- LIQUID dataCast2 f = gcast2 f
#endif
{--------------------------------------------------------------------
Query
--------------------------------------------------------------------}
-- | /O(1)/. Is the map empty?
--
-- > Data.Map.null (empty) == True
-- > Data.Map.null (singleton 1 'a') == False
null :: Map k a -> Bool
null Tip = True
null (Bin {}) = False
{-# INLINE null #-}
-- | /O(1)/. The number of elements in the map.
--
-- > size empty == 0
-- > size (singleton 1 'a') == 1
-- > size (fromList([(1,'a'), (2,'c'), (3,'b')])) == 3
size :: Map k a -> Int
size Tip = 0
size (Bin sz _ _ _ _) = sz
{-# INLINE size #-}
-- | /O(log n)/. Lookup the value at a key in the map.
--
-- The function will return the corresponding value as @('Just' value)@,
-- or 'Nothing' if the key isn't in the map.
--
-- An example of using @lookup@:
--
-- > import Prelude hiding (lookup)
-- > import Data.Map
-- >
-- > employeeDept = fromList([("John","Sales"), ("Bob","IT")])
-- > deptCountry = fromList([("IT","USA"), ("Sales","France")])
-- > countryCurrency = fromList([("USA", "Dollar"), ("France", "Euro")])
-- >
-- > employeeCurrency :: String -> Maybe String
-- > employeeCurrency name = do
-- > dept <- lookup name employeeDept
-- > country <- lookup dept deptCountry
-- > lookup country countryCurrency
-- >
-- > main = do
-- > putStrLn $ "John's currency: " ++ (show (employeeCurrency "John"))
-- > putStrLn $ "Pete's currency: " ++ (show (employeeCurrency "Pete"))
--
-- The output of this program:
--
-- > John's currency: Just "Euro"
-- > Pete's currency: Nothing
{-@ lookup :: (Ord k) => k -> OMap k a -> Maybe a @-}
lookup :: Ord k => k -> Map k a -> Maybe a
lookup = go
where
STRICT_1_OF_2(go)
go _ Tip = Nothing
go k (Bin _ kx x l r) = case compare k kx of
LT -> go k l
GT -> go k r
EQ -> Just x
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE lookup #-}
#else
{-# INLINE lookup #-}
#endif
-- | /O(log n)/. Is the key a member of the map? See also 'notMember'.
--
-- > member 5 (fromList [(5,'a'), (3,'b')]) == True
-- > member 1 (fromList [(5,'a'), (3,'b')]) == False
{-@ member :: (Ord k) => k -> OMap k a -> Bool @-}
member :: Ord k => k -> Map k a -> Bool
member = go
where
STRICT_1_OF_2(go)
go _ Tip = False
go k (Bin _ kx _ l r) = case compare k kx of
LT -> go k l
GT -> go k r
EQ -> True
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE member #-}
#else
{-# INLINE member #-}
#endif
-- | /O(log n)/. Is the key not a member of the map? See also 'member'.
--
-- > notMember 5 (fromList [(5,'a'), (3,'b')]) == False
-- > notMember 1 (fromList [(5,'a'), (3,'b')]) == True
{-@ notMember :: (Ord k) => k -> OMap k a -> Bool @-}
notMember :: Ord k => k -> Map k a -> Bool
notMember k m = not $ member k m
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE notMember #-}
#else
{-# INLINE notMember #-}
#endif
-- | /O(log n)/. Find the value at a key.
-- Calls 'error' when the element can not be found.
{-@ find :: (Ord k) => k -> OMap k a -> a @-}
find :: Ord k => k -> Map k a -> a
find = go
where
STRICT_1_OF_2(go)
go _ Tip = error "Map.!: given key is not an element in the map"
go k (Bin _ kx x l r) = case compare k kx of
LT -> go k l
GT -> go k r
EQ -> x
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE find #-}
#else
{-# INLINE find #-}
#endif
-- | /O(log n)/. The expression @('findWithDefault' def k map)@ returns
-- the value at key @k@ or returns default value @def@
-- when the key is not in the map.
--
-- > findWithDefault 'x' 1 (fromList [(5,'a'), (3,'b')]) == 'x'
-- > findWithDefault 'x' 5 (fromList [(5,'a'), (3,'b')]) == 'a'
{-@ findWithDefault :: (Ord k) => a -> k -> OMap k a -> a @-}
findWithDefault :: Ord k => a -> k -> Map k a -> a
findWithDefault = go
where
STRICT_2_OF_3(go)
go def _ Tip = def
go def k (Bin _ kx x l r) = case compare k kx of
LT -> go def k l
GT -> go def k r
EQ -> x
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE findWithDefault #-}
#else
{-# INLINE findWithDefault #-}
#endif
-- | /O(log n)/. Find largest key smaller than the given one and return the
-- corresponding (key, value) pair.
--
-- > lookupLT 3 (fromList [(3,'a'), (5,'b')]) == Nothing
-- > lookupLT 4 (fromList [(3,'a'), (5,'b')]) == Just (3, 'a')
{-@ lookupLT :: (Ord k) => k -> OMap k v -> Maybe (k, v) @-}
lookupLT :: Ord k => k -> Map k v -> Maybe (k, v)
lookupLT = goNothing
where
STRICT_1_OF_2(goNothing)
goNothing _ Tip = Nothing
goNothing k (Bin _ kx x l r) | k <= kx = goNothing k l
| otherwise = goJust k kx x r
STRICT_1_OF_4(goJust)
goJust _ kx' x' Tip = Just (kx', x')
goJust k kx' x' (Bin _ kx x l r) | k <= kx = goJust k kx' x' l
| otherwise = goJust k kx x r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE lookupLT #-}
#else
{-# INLINE lookupLT #-}
#endif
-- | /O(log n)/. Find smallest key greater than the given one and return the
-- corresponding (key, value) pair.
--
-- > lookupGT 4 (fromList [(3,'a'), (5,'b')]) == Just (5, 'b')
-- > lookupGT 5 (fromList [(3,'a'), (5,'b')]) == Nothing
{-@ lookupGT :: (Ord k) => k -> OMap k v -> Maybe (k, v) @-}
lookupGT :: Ord k => k -> Map k v -> Maybe (k, v)
lookupGT = goNothing
where
STRICT_1_OF_2(goNothing)
goNothing _ Tip = Nothing
goNothing k (Bin _ kx x l r) | k < kx = goJust k kx x l
| otherwise = goNothing k r
STRICT_1_OF_4(goJust)
goJust _ kx' x' Tip = Just (kx', x')
goJust k kx' x' (Bin _ kx x l r) | k < kx = goJust k kx x l
| otherwise = goJust k kx' x' r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE lookupGT #-}
#else
{-# INLINE lookupGT #-}
#endif
-- | /O(log n)/. Find largest key smaller or equal to the given one and return
-- the corresponding (key, value) pair.
--
-- > lookupLE 2 (fromList [(3,'a'), (5,'b')]) == Nothing
-- > lookupLE 4 (fromList [(3,'a'), (5,'b')]) == Just (3, 'a')
-- > lookupLE 5 (fromList [(3,'a'), (5,'b')]) == Just (5, 'b')
{-@ lookupLE :: (Ord k) => k -> OMap k v -> Maybe (k, v) @-}
lookupLE :: Ord k => k -> Map k v -> Maybe (k, v)
lookupLE = goNothing
where
STRICT_1_OF_2(goNothing)
goNothing _ Tip = Nothing
goNothing k (Bin _ kx x l r) = case compare k kx of LT -> goNothing k l
EQ -> Just (kx, x)
GT -> goJust k kx x r
STRICT_1_OF_4(goJust)
goJust _ kx' x' Tip = Just (kx', x')
goJust k kx' x' (Bin _ kx x l r) = case compare k kx of LT -> goJust k kx' x' l
EQ -> Just (kx, x)
GT -> goJust k kx x r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE lookupLE #-}
#else
{-# INLINE lookupLE #-}
#endif
-- | /O(log n)/. Find smallest key greater or equal to the given one and return
-- the corresponding (key, value) pair.
--
-- > lookupGE 3 (fromList [(3,'a'), (5,'b')]) == Just (3, 'a')
-- > lookupGE 4 (fromList [(3,'a'), (5,'b')]) == Just (5, 'b')
-- > lookupGE 6 (fromList [(3,'a'), (5,'b')]) == Nothing
{-@ lookupGE :: (Ord k) => k -> OMap k v -> Maybe (k, v) @-}
lookupGE :: Ord k => k -> Map k v -> Maybe (k, v)
lookupGE = goNothing
where
STRICT_1_OF_2(goNothing)
goNothing _ Tip = Nothing
goNothing k (Bin _ kx x l r) = case compare k kx of LT -> goJust k kx x l
EQ -> Just (kx, x)
GT -> goNothing k r
STRICT_1_OF_4(goJust)
goJust _ kx' x' Tip = Just (kx', x')
goJust k kx' x' (Bin _ kx x l r) = case compare k kx of LT -> goJust k kx x l
EQ -> Just (kx, x)
GT -> goJust k kx' x' r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE lookupGE #-}
#else
{-# INLINE lookupGE #-}
#endif
{--------------------------------------------------------------------
Construction
--------------------------------------------------------------------}
-- | /O(1)/. The empty map.
--
-- > empty == fromList []
-- > size empty == 0
{-@ empty :: OMap k a @-}
empty :: Map k a
empty = Tip
{-# INLINE empty #-}
-- | /O(1)/. A map with a single element.
--
-- > singleton 1 'a' == fromList [(1, 'a')]
-- > size (singleton 1 'a') == 1
{-@ singleton :: k -> a -> OMap k a @-}
singleton :: k -> a -> Map k a
singleton k x = Bin 1 k x Tip Tip
{-# INLINE singleton #-}
{--------------------------------------------------------------------
Insertion
--------------------------------------------------------------------}
-- | /O(log n)/. Insert a new key and value in the map.
-- If the key is already present in the map, the associated value is
-- replaced with the supplied value. 'insert' is equivalent to
-- @'insertWith' 'const'@.
--
-- > insert 5 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'x')]
-- > insert 7 'x' (fromList [(5,'a'), (3,'b')]) == fromList [(3, 'b'), (5, 'a'), (7, 'x')]
-- > insert 5 'x' empty == singleton 5 'x'
-- See Note: Type of local 'go' function
{-@ insert :: (Ord k) => k -> a -> OMap k a -> OMap k a @-}
insert :: Ord k => k -> a -> Map k a -> Map k a
insert = insert_go
--LIQUID insert = go
--LIQUID where
--LIQUID go :: Ord k => k -> a -> Map k a -> Map k a
--LIQUID STRICT_1_OF_3(go)
--LIQUID go kx x Tip = singleton kx x
--LIQUID go kx x (Bin sz ky y l r) =
--LIQUID case compare kx ky of
--LIQUID -- Bin ky y (go kx x l) r
--LIQUID LT -> balanceL ky y (go kx x l) r
--LIQUID GT -> balanceR ky y l (go kx x r)
--LIQUID EQ -> Bin sz kx x l r
{-@ insert_go :: (Ord k) => k -> a -> OMap k a -> OMap k a @-}
insert_go :: Ord k => k -> a -> Map k a -> Map k a
STRICT_1_OF_3(insert_go)
insert_go kx x Tip = singleton kx x
insert_go kx x (Bin sz ky y l r) =
case compare kx ky of
-- Bin ky y (insert_go kx x l) r
LT -> balanceL ky y (insert_go kx x l) r
GT -> balanceR ky y l (insert_go kx x r)
EQ -> Bin sz kx x l r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE insert #-}
#else
{-# INLINE insert #-}
#endif
-- Insert a new key and value in the map if it is not already present.
-- Used by `union`.
-- See Note: Type of local 'go' function
insertR :: Ord k => k -> a -> Map k a -> Map k a
insertR = insertR_go
--LIQUID insertR = go
--LIQUID where
--LIQUID go :: Ord k => k -> a -> Map k a -> Map k a
--LIQUID STRICT_1_OF_3(go)
--LIQUID go kx x Tip = singleton kx x
--LIQUID go kx x t@(Bin _ ky y l r) =
--LIQUID case compare kx ky of
--LIQUID LT -> balanceL ky y (go kx x l) r
--LIQUID GT -> balanceR ky y l (go kx x r)
--LIQUID EQ -> t
insertR_go :: Ord k => k -> a -> Map k a -> Map k a
STRICT_1_OF_3(insertR_go)
insertR_go kx x Tip = singleton kx x
insertR_go kx x t@(Bin _ ky y l r) =
case compare kx ky of
LT -> balanceL ky y (insertR_go kx x l) r
GT -> balanceR ky y l (insertR_go kx x r)
EQ -> t
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE insertR #-}
#else
{-# INLINE insertR #-}
#endif
-- | /O(log n)/. Insert with a function, combining new value and old value.
-- @'insertWith' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert the pair @(key, f new_value old_value)@.
--
-- > insertWith (++) 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "xxxa")]
-- > insertWith (++) 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWith (++) 5 "xxx" empty == singleton 5 "xxx"
{-@ insertWith :: (Ord k) => (a -> a -> a) -> k -> a -> OMap k a -> OMap k a @-}
insertWith :: Ord k => (a -> a -> a) -> k -> a -> Map k a -> Map k a
insertWith f = insertWithKey (\_ x' y' -> f x' y')
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE insertWith #-}
#else
{-# INLINE insertWith #-}
#endif
-- | /O(log n)/. Insert with a function, combining key, new value and old value.
-- @'insertWithKey' f key value mp@
-- will insert the pair (key, value) into @mp@ if key does
-- not exist in the map. If the key does exist, the function will
-- insert the pair @(key,f key new_value old_value)@.
-- Note that the key passed to f is the same key passed to 'insertWithKey'.
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:xxx|a")]
-- > insertWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "xxx")]
-- > insertWithKey f 5 "xxx" empty == singleton 5 "xxx"
-- See Note: Type of local 'go' function
{-@ insertWithKey :: (Ord k) => (k -> a -> a -> a) -> k -> a -> OMap k a -> OMap k a @-}
insertWithKey :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> Map k a
insertWithKey = insertWithKey_go
--LIQUID insertWithKey = go
--LIQUID where
--LIQUID go :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> Map k a
--LIQUID STRICT_2_OF_4(go)
--LIQUID go _ kx x Tip = singleton kx x
--LIQUID go f kx x (Bin sy ky y l r) =
--LIQUID case compare kx ky of
--LIQUID LT -> balanceL ky y (go f kx x l) r
--LIQUID GT -> balanceR ky y l (go f kx x r)
--LIQUID EQ -> Bin sy kx (f kx x y) l r
{-@ insertWithKey_go :: (Ord k) => (k -> a -> a -> a) -> k -> a -> OMap k a -> OMap k a @-}
insertWithKey_go :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> Map k a
STRICT_2_OF_4(insertWithKey_go)
insertWithKey_go _ kx x Tip = singleton kx x
insertWithKey_go f kx x (Bin sy ky y l r) =
case compare kx ky of
LT -> balanceL ky y (insertWithKey_go f kx x l) r
GT -> balanceR ky y l (insertWithKey_go f kx x r)
EQ -> Bin sy kx (f kx x y) l r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE insertWithKey #-}
#else
{-# INLINE insertWithKey #-}
#endif
-- | /O(log n)/. Combines insert operation with old value retrieval.
-- The expression (@'insertLookupWithKey' f k x map@)
-- is a pair where the first element is equal to (@'lookup' k map@)
-- and the second element equal to (@'insertWithKey' f k x map@).
--
-- > let f key new_value old_value = (show key) ++ ":" ++ new_value ++ "|" ++ old_value
-- > insertLookupWithKey f 5 "xxx" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "5:xxx|a")])
-- > insertLookupWithKey f 7 "xxx" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "xxx")])
-- > insertLookupWithKey f 5 "xxx" empty == (Nothing, singleton 5 "xxx")
--
-- This is how to define @insertLookup@ using @insertLookupWithKey@:
--
-- > let insertLookup kx x t = insertLookupWithKey (\_ a _ -> a) kx x t
-- > insertLookup 5 "x" (fromList [(5,"a"), (3,"b")]) == (Just "a", fromList [(3, "b"), (5, "x")])
-- > insertLookup 7 "x" (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a"), (7, "x")])
-- See Note: Type of local 'go' function
{-@ insertLookupWithKey :: Ord k => (k -> a -> a -> a) -> k -> a -> OMap k a -> (Maybe a, OMap k a) @-}
insertLookupWithKey :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> (Maybe a, Map k a)
insertLookupWithKey = insertLookupWithKey_go
--LIQUID insertLookupWithKey = go
--LIQUID where
--LIQUID go :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> (Maybe a, Map k a)
--LIQUID STRICT_2_OF_4(go)
--LIQUID go _ kx x Tip = (Nothing, singleton kx x)
--LIQUID go f kx x (Bin sy ky y l r) =
--LIQUID case compare kx ky of
--LIQUID LT -> let (found, l') = go f kx x l
--LIQUID in (found, balanceL ky y l' r)
--LIQUID GT -> let (found, r') = go f kx x r
--LIQUID in (found, balanceR ky y l r')
--LIQUID EQ -> (Just y, Bin sy kx (f kx x y) l r)
{-@ insertLookupWithKey_go :: Ord k => (k -> a -> a -> a) -> k -> a -> OMap k a -> (Maybe a, OMap k a) @-}
insertLookupWithKey_go :: Ord k => (k -> a -> a -> a) -> k -> a -> Map k a -> (Maybe a, Map k a)
STRICT_2_OF_4(insertLookupWithKey_go)
insertLookupWithKey_go _ kx x Tip = (Nothing, singleton kx x)
insertLookupWithKey_go f kx x (Bin sy ky y l r) =
case compare kx ky of
LT -> let (found, l') = insertLookupWithKey_go f kx x l
in (found, balanceL ky y l' r)
GT -> let (found, r') = insertLookupWithKey_go f kx x r
in (found, balanceR ky y l r')
EQ -> (Just y, Bin sy kx (f kx x y) l r)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE insertLookupWithKey #-}
#else
{-# INLINE insertLookupWithKey #-}
#endif
{--------------------------------------------------------------------
Deletion
--------------------------------------------------------------------}
-- | /O(log n)/. Delete a key and its value from the map. When the key is not
-- a member of the map, the original map is returned.
--
-- > delete 5 (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- > delete 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > delete 5 empty == empty
-- See Note: Type of local 'go' function
{-@ delete :: (Ord k) => k -> OMap k a -> OMap k a @-}
delete :: Ord k => k -> Map k a -> Map k a
delete = delete_go
--LIQUID delete = go
--LIQUID where
--LIQUID go :: Ord k => k -> Map k a -> Map k a
--LIQUID STRICT_1_OF_2(go)
--LIQUID go _ Tip = Tip
--LIQUID go k (Bin _ kx x l r) =
--LIQUID case compare k kx of
--LIQUID LT -> balanceR kx x (go k l) r
--LIQUID GT -> balanceL kx x l (go k r)
--LIQUID EQ -> glue kx l r
{-@ delete_go :: (Ord k) => k -> OMap k a -> OMap k a @-}
delete_go :: Ord k => k -> Map k a -> Map k a
STRICT_1_OF_2(delete_go)
delete_go _ Tip = Tip
delete_go k (Bin _ kx x l r) =
case compare k kx of
LT -> balanceR kx x (delete_go k l) r
GT -> balanceL kx x l (delete_go k r)
EQ -> glue kx l r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE delete #-}
#else
{-# INLINE delete #-}
#endif
-- | /O(log n)/. Update a value at a specific key with the result of the provided function.
-- When the key is not
-- a member of the map, the original map is returned.
--
-- > adjust ("new " ++) 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > adjust ("new " ++) 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjust ("new " ++) 7 empty == empty
{-@ adjust :: (Ord k) => (a -> a) -> k -> OMap k a -> OMap k a @-}
adjust :: Ord k => (a -> a) -> k -> Map k a -> Map k a
adjust f = adjustWithKey (\_ x -> f x)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE adjust #-}
#else
{-# INLINE adjust #-}
#endif
-- | /O(log n)/. Adjust a value at a specific key. When the key is not
-- a member of the map, the original map is returned.
--
-- > let f key x = (show key) ++ ":new " ++ x
-- > adjustWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > adjustWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > adjustWithKey f 7 empty == empty
{-@ adjustWithKey :: (Ord k) => (k -> a -> a) -> k -> OMap k a -> OMap k a @-}
adjustWithKey :: Ord k => (k -> a -> a) -> k -> Map k a -> Map k a
adjustWithKey f = updateWithKey (\k' x' -> Just (f k' x'))
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE adjustWithKey #-}
#else
{-# INLINE adjustWithKey #-}
#endif
-- | /O(log n)/. The expression (@'update' f k map@) updates the value @x@
-- at @k@ (if it is in the map). If (@f x@) is 'Nothing', the element is
-- deleted. If it is (@'Just' y@), the key @k@ is bound to the new value @y@.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > update f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "new a")]
-- > update f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > update f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
{-@ update :: (Ord k) => (a -> Maybe a) -> k -> OMap k a -> OMap k a @-}
update :: Ord k => (a -> Maybe a) -> k -> Map k a -> Map k a
update f = updateWithKey (\_ x -> f x)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE update #-}
#else
{-# INLINE update #-}
#endif
-- | /O(log n)/. The expression (@'updateWithKey' f k map@) updates the
-- value @x@ at @k@ (if it is in the map). If (@f k x@) is 'Nothing',
-- the element is deleted. If it is (@'Just' y@), the key @k@ is bound
-- to the new value @y@.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateWithKey f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "5:new a")]
-- > updateWithKey f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > updateWithKey f 3 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
-- See Note: Type of local 'go' function
{-@ updateWithKey :: (Ord k) => (k -> a -> Maybe a) -> k -> OMap k a -> OMap k a @-}
updateWithKey :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> Map k a
updateWithKey = updateWithKey_go
--LIQUID updateWithKey = go
--LIQUID where
--LIQUID go :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> Map k a
--LIQUID STRICT_2_OF_3(go)
--LIQUID go _ _ Tip = Tip
--LIQUID go f k(Bin sx kx x l r) =
--LIQUID case compare k kx of
--LIQUID LT -> balanceR kx x (go f k l) r
--LIQUID GT -> balanceL kx x l (go f k r)
--LIQUID EQ -> case f kx x of
--LIQUID Just x' -> Bin sx kx x' l r
--LIQUID Nothing -> glue kx l r
{-@ updateWithKey_go :: (Ord k) => (k -> a -> Maybe a) -> k -> OMap k a -> OMap k a @-}
updateWithKey_go :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> Map k a
STRICT_2_OF_3(updateWithKey_go)
updateWithKey_go _ _ Tip = Tip
updateWithKey_go f k(Bin sx kx x l r) =
case compare k kx of
LT -> balanceR kx x (updateWithKey_go f k l) r
GT -> balanceL kx x l (updateWithKey_go f k r)
EQ -> case f kx x of
Just x' -> Bin sx kx x' l r
Nothing -> glue kx l r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE updateWithKey #-}
#else
{-# INLINE updateWithKey #-}
#endif
-- | /O(log n)/. Lookup and update. See also 'updateWithKey'.
-- The function returns changed value, if it is updated.
-- Returns the original key value if the map entry is deleted.
--
-- > let f k x = if x == "a" then Just ((show k) ++ ":new a") else Nothing
-- > updateLookupWithKey f 5 (fromList [(5,"a"), (3,"b")]) == (Just "5:new a", fromList [(3, "b"), (5, "5:new a")])
-- > updateLookupWithKey f 7 (fromList [(5,"a"), (3,"b")]) == (Nothing, fromList [(3, "b"), (5, "a")])
-- > updateLookupWithKey f 3 (fromList [(5,"a"), (3,"b")]) == (Just "b", singleton 5 "a")
-- See Note: Type of local 'go' function
{-@ updateLookupWithKey :: (Ord k) => (k -> a -> Maybe a) -> k -> OMap k a -> (Maybe a, OMap k a) @-}
updateLookupWithKey :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> (Maybe a,Map k a)
updateLookupWithKey = updateLookupWithKey_go
--LIQUID updateLookupWithKey = go
--LIQUID where
--LIQUID go :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> (Maybe a,Map k a)
--LIQUID STRICT_2_OF_3(go)
--LIQUID go _ _ Tip = (Nothing,Tip)
--LIQUID go f k (Bin sx kx x l r) =
--LIQUID case compare k kx of
--LIQUID LT -> let (found,l') = go f k l in (found,balanceR kx x l' r)
--LIQUID GT -> let (found,r') = go f k r in (found,balanceL kx x l r')
--LIQUID EQ -> case f kx x of
--LIQUID Just x' -> (Just x',Bin sx kx x' l r)
--LIQUID Nothing -> (Just x,glue kx l r)
{-@ updateLookupWithKey_go :: (Ord k) => (k -> a -> Maybe a) -> k -> OMap k a -> (Maybe a, OMap k a) @-}
updateLookupWithKey_go :: Ord k => (k -> a -> Maybe a) -> k -> Map k a -> (Maybe a,Map k a)
STRICT_2_OF_3(updateLookupWithKey_go)
updateLookupWithKey_go _ _ Tip = (Nothing,Tip)
updateLookupWithKey_go f k (Bin sx kx x l r) =
case compare k kx of
LT -> let (found,l') = updateLookupWithKey_go f k l in (found,balanceR kx x l' r)
GT -> let (found,r') = updateLookupWithKey_go f k r in (found,balanceL kx x l r')
EQ -> case f kx x of
Just x' -> (Just x',Bin sx kx x' l r)
Nothing -> (Just x,glue kx l r)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE updateLookupWithKey #-}
#else
{-# INLINE updateLookupWithKey #-}
#endif
-- | /O(log n)/. The expression (@'alter' f k map@) alters the value @x@ at @k@, or absence thereof.
-- 'alter' can be used to insert, delete, or update a value in a 'Map'.
-- In short : @'lookup' k ('alter' f k m) = f ('lookup' k m)@.
--
-- > let f _ = Nothing
-- > alter f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a")]
-- > alter f 5 (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- >
-- > let f _ = Just "c"
-- > alter f 7 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "a"), (7, "c")]
-- > alter f 5 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "c")]
-- See Note: Type of local 'go' function
{-@ alter :: (Ord k) => (Maybe a -> Maybe a) -> k -> OMap k a -> OMap k a @-}
alter :: Ord k => (Maybe a -> Maybe a) -> k -> Map k a -> Map k a
alter = alter_go
--LIQUID alter = go
--LIQUID where
--LIQUID go :: Ord k => (Maybe a -> Maybe a) -> k -> Map k a -> Map k a
--LIQUID STRICT_2_OF_3(go)
--LIQUID go f k Tip = case f Nothing of
--LIQUID Nothing -> Tip
--LIQUID Just x -> singleton k x
--LIQUID
--LIQUID go f k (Bin sx kx x l r) = case compare k kx of
--LIQUID LT -> balance kx x (go f k l) r
--LIQUID GT -> balance kx x l (go f k r)
--LIQUID EQ -> case f (Just x) of
--LIQUID Just x' -> Bin sx kx x' l r
--LIQUID Nothing -> glue kx l r
alter_go :: Ord k => (Maybe a -> Maybe a) -> k -> Map k a -> Map k a
STRICT_2_OF_3(alter_go)
alter_go f k Tip = case f Nothing of
Nothing -> Tip
Just x -> singleton k x
alter_go f k (Bin sx kx x l r) = case compare k kx of
LT -> balance kx x (alter_go f k l) r
GT -> balance kx x l (alter_go f k r)
EQ -> case f (Just x) of
Just x' -> Bin sx kx x' l r
Nothing -> glue kx l r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE alter #-}
#else
{-# INLINE alter #-}
#endif
{--------------------------------------------------------------------
Indexing
--------------------------------------------------------------------}
-- | /O(log n)/. Return the /index/ of a key. The index is a number from
-- /0/ up to, but not including, the 'size' of the map. Calls 'error' when
-- the key is not a 'member' of the map.
--
-- > findIndex 2 (fromList [(5,"a"), (3,"b")]) Error: element is not in the map
-- > findIndex 3 (fromList [(5,"a"), (3,"b")]) == 0
-- > findIndex 5 (fromList [(5,"a"), (3,"b")]) == 1
-- > findIndex 6 (fromList [(5,"a"), (3,"b")]) Error: element is not in the map
-- See Note: Type of local 'go' function
{-@ findIndex :: (Ord k) => k -> OMap k a -> GHC.Types.Int @-}
findIndex :: Ord k => k -> Map k a -> Int
findIndex = findIndex_go 0
--LIQUID findIndex = go 0
--LIQUID where
--LIQUID go :: Ord k => Int -> k -> Map k a -> Int
--LIQUID STRICT_1_OF_3(go)
--LIQUID STRICT_2_OF_3(go)
--LIQUID go _ _ Tip = error "Map.findIndex: element is not in the map"
--LIQUID go idx k (Bin _ kx _ l r) = case compare k kx of
--LIQUID LT -> go idx k l
--LIQUID GT -> go (idx + size l + 1) k r
--LIQUID EQ -> idx + size l
{-@ findIndex_go :: (Ord k) => Int -> k -> OMap k a -> GHC.Types.Int @-}
{-@ Decrease findIndex_go 4 @-}
findIndex_go :: Ord k => Int -> k -> Map k a -> Int
STRICT_1_OF_3(findIndex_go)
STRICT_2_OF_3(findIndex_go)
findIndex_go _ _ Tip = error "Map.findIndex: element is not in the map"
findIndex_go idx k (Bin _ kx _ l r) = case compare k kx of
LT -> findIndex_go idx k l
GT -> findIndex_go (idx + size l + 1) k r
EQ -> idx + size l
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE findIndex #-}
#endif
-- | /O(log n)/. Lookup the /index/ of a key. The index is a number from
-- /0/ up to, but not including, the 'size' of the map.
--
-- > isJust (lookupIndex 2 (fromList [(5,"a"), (3,"b")])) == False
-- > fromJust (lookupIndex 3 (fromList [(5,"a"), (3,"b")])) == 0
-- > fromJust (lookupIndex 5 (fromList [(5,"a"), (3,"b")])) == 1
-- > isJust (lookupIndex 6 (fromList [(5,"a"), (3,"b")])) == False
-- See Note: Type of local 'go' function
{-@ lookupIndex :: (Ord k) => k -> OMap k a -> Maybe GHC.Types.Int @-}
lookupIndex :: Ord k => k -> Map k a -> Maybe Int
lookupIndex = lookupIndex_go 0
--LIQUID lookupIndex = go 0
--LIQUID where
--LIQUID go :: Ord k => Int -> k -> Map k a -> Maybe Int
--LIQUID STRICT_1_OF_3(go)
--LIQUID STRICT_2_OF_3(go)
--LIQUID go _ _ Tip = Nothing
--LIQUID go idx k (Bin _ kx _ l r) = case compare k kx of
--LIQUID LT -> go idx k l
--LIQUID GT -> go (idx + size l + 1) k r
--LIQUID EQ -> Just $! idx + size l
{-@ lookupIndex_go :: (Ord k) => Int -> k -> OMap k a -> Maybe GHC.Types.Int @-}
{-@ Decrease lookupIndex_go 4 @-}
lookupIndex_go :: Ord k => Int -> k -> Map k a -> Maybe Int
STRICT_1_OF_3(lookupIndex_go)
STRICT_2_OF_3(lookupIndex_go)
lookupIndex_go _ _ Tip = Nothing
lookupIndex_go idx k (Bin _ kx _ l r) = case compare k kx of
LT -> lookupIndex_go idx k l
GT -> lookupIndex_go (idx + size l + 1) k r
EQ -> Just $! idx + size l
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE lookupIndex #-}
#endif
-- | /O(log n)/. Retrieve an element by /index/. Calls 'error' when an
-- invalid index is used.
--
-- > elemAt 0 (fromList [(5,"a"), (3,"b")]) == (3,"b")
-- > elemAt 1 (fromList [(5,"a"), (3,"b")]) == (5, "a")
-- > elemAt 2 (fromList [(5,"a"), (3,"b")]) Error: index out of range
{-@ elemAt :: GHC.Types.Int -> OMap k a -> (k, a) @-}
{-@ Decrease elemAt 2 @-}
elemAt :: Int -> Map k a -> (k,a)
STRICT_1_OF_2(elemAt)
elemAt _ Tip = error "Map.elemAt: index out of range"
elemAt i (Bin _ kx x l r)
= case compare i sizeL of
LT -> elemAt i l
GT -> elemAt (i-sizeL-1) r
EQ -> (kx,x)
where
sizeL = size l
-- | /O(log n)/. Update the element at /index/. Calls 'error' when an
-- invalid index is used.
--
-- > updateAt (\ _ _ -> Just "x") 0 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "x"), (5, "a")]
-- > updateAt (\ _ _ -> Just "x") 1 (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "x")]
-- > updateAt (\ _ _ -> Just "x") 2 (fromList [(5,"a"), (3,"b")]) Error: index out of range
-- > updateAt (\ _ _ -> Just "x") (-1) (fromList [(5,"a"), (3,"b")]) Error: index out of range
-- > updateAt (\_ _ -> Nothing) 0 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
-- > updateAt (\_ _ -> Nothing) 1 (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- > updateAt (\_ _ -> Nothing) 2 (fromList [(5,"a"), (3,"b")]) Error: index out of range
-- > updateAt (\_ _ -> Nothing) (-1) (fromList [(5,"a"), (3,"b")]) Error: index out of range
{-@ updateAt :: (k -> a -> Maybe a) -> GHC.Types.Int -> OMap k a -> OMap k a @-}
{-@ Decrease updateAt 3 @-}
updateAt :: (k -> a -> Maybe a) -> Int -> Map k a -> Map k a
updateAt f i t = i `seq`
case t of
Tip -> error "Map.updateAt: index out of range"
Bin sx kx x l r -> case compare i sizeL of
LT -> balanceR kx x (updateAt f i l) r
GT -> balanceL kx x l (updateAt f (i-sizeL-1) r)
EQ -> case f kx x of
Just x' -> Bin sx kx x' l r
Nothing -> glue kx l r
where
sizeL = size l
-- | /O(log n)/. Delete the element at /index/.
-- Defined as (@'deleteAt' i map = 'updateAt' (\k x -> 'Nothing') i map@).
--
-- > deleteAt 0 (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
-- > deleteAt 1 (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- > deleteAt 2 (fromList [(5,"a"), (3,"b")]) Error: index out of range
-- > deleteAt (-1) (fromList [(5,"a"), (3,"b")]) Error: index out of range
{-@ deleteAt :: GHC.Types.Int -> OMap k a -> OMap k a @-}
{-@ Decrease deleteAt 2 @-}
deleteAt :: Int -> Map k a -> Map k a
deleteAt i t = i `seq`
case t of
Tip -> error "Map.deleteAt: index out of range"
Bin _ kx x l r -> case compare i sizeL of
LT -> balanceR kx x (deleteAt i l) r
GT -> balanceL kx x l (deleteAt (i-sizeL-1) r)
EQ -> glue kx l r
where
sizeL = size l
{--------------------------------------------------------------------
Minimal, Maximal
--------------------------------------------------------------------}
-- | /O(log n)/. The minimal key of the map. Calls 'error' if the map is empty.
--
-- > findMin (fromList [(5,"a"), (3,"b")]) == (3,"b")
-- > findMin empty Error: empty map has no minimal element
{-@ findMin :: OMap k a -> (k, a) @-}
findMin :: Map k a -> (k,a)
findMin (Bin _ kx x Tip _) = (kx,x)
findMin (Bin _ _ _ l _) = findMin l
findMin Tip = error "Map.findMin: empty map has no minimal element"
-- | /O(log n)/. The maximal key of the map. Calls 'error' if the map is empty.
--
-- > findMax (fromList [(5,"a"), (3,"b")]) == (5,"a")
-- > findMax empty Error: empty map has no maximal element
{-@ findMax :: OMap k a -> (k, a) @-}
findMax :: Map k a -> (k,a)
findMax (Bin _ kx x _ Tip) = (kx,x)
findMax (Bin _ _ _ _ r) = findMax r
findMax Tip = error "Map.findMax: empty map has no maximal element"
-- | /O(log n)/. Delete the minimal key. Returns an empty map if the map is empty.
--
-- > deleteMin (fromList [(5,"a"), (3,"b"), (7,"c")]) == fromList [(5,"a"), (7,"c")]
-- > deleteMin empty == empty
{-@ deleteMin :: OMap k a -> OMap k a @-}
deleteMin :: Map k a -> Map k a
deleteMin (Bin _ _ _ Tip r) = r
deleteMin (Bin _ kx x l r) = balanceR kx x (deleteMin l) r
deleteMin Tip = Tip
-- | /O(log n)/. Delete the maximal key. Returns an empty map if the map is empty.
--
-- > deleteMax (fromList [(5,"a"), (3,"b"), (7,"c")]) == fromList [(3,"b"), (5,"a")]
-- > deleteMax empty == empty
{-@ deleteMax :: OMap k a -> OMap k a @-}
deleteMax :: Map k a -> Map k a
deleteMax (Bin _ _ _ l Tip) = l
deleteMax (Bin _ kx x l r) = balanceL kx x l (deleteMax r)
deleteMax Tip = Tip
-- | /O(log n)/. Update the value at the minimal key.
--
-- > updateMin (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "Xb"), (5, "a")]
-- > updateMin (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
{-@ updateMin :: (a -> Maybe a) -> OMap k a -> OMap k a @-}
updateMin :: (a -> Maybe a) -> Map k a -> Map k a
updateMin f m
= updateMinWithKey (\_ x -> f x) m
-- | /O(log n)/. Update the value at the maximal key.
--
-- > updateMax (\ a -> Just ("X" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3, "b"), (5, "Xa")]
-- > updateMax (\ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
{-@ updateMax :: (a -> Maybe a) -> OMap k a -> OMap k a @-}
updateMax :: (a -> Maybe a) -> Map k a -> Map k a
updateMax f m
= updateMaxWithKey (\_ x -> f x) m
-- | /O(log n)/. Update the value at the minimal key.
--
-- > updateMinWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"3:b"), (5,"a")]
-- > updateMinWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
{-@ updateMinWithKey :: (k -> a -> Maybe a) -> OMap k a -> OMap k a @-}
updateMinWithKey :: (k -> a -> Maybe a) -> Map k a -> Map k a
updateMinWithKey _ Tip = Tip
updateMinWithKey f (Bin sx kx x Tip r) = case f kx x of
Nothing -> r
Just x' -> Bin sx kx x' Tip r
updateMinWithKey f (Bin _ kx x l r) = balanceR kx x (updateMinWithKey f l) r
-- | /O(log n)/. Update the value at the maximal key.
--
-- > updateMaxWithKey (\ k a -> Just ((show k) ++ ":" ++ a)) (fromList [(5,"a"), (3,"b")]) == fromList [(3,"b"), (5,"5:a")]
-- > updateMaxWithKey (\ _ _ -> Nothing) (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
{-@ updateMaxWithKey :: (k -> a -> Maybe a) -> OMap k a -> OMap k a @-}
updateMaxWithKey :: (k -> a -> Maybe a) -> Map k a -> Map k a
updateMaxWithKey _ Tip = Tip
updateMaxWithKey f (Bin sx kx x l Tip) = case f kx x of
Nothing -> l
Just x' -> Bin sx kx x' l Tip
updateMaxWithKey f (Bin _ kx x l r) = balanceL kx x l (updateMaxWithKey f r)
-- | /O(log n)/. Retrieves the minimal (key,value) pair of the map, and
-- the map stripped of that element, or 'Nothing' if passed an empty map.
--
-- > minViewWithKey (fromList [(5,"a"), (3,"b")]) == Just ((3,"b"), singleton 5 "a")
-- > minViewWithKey empty == Nothing
{-@ minViewWithKey :: OMap k a -> Maybe (k, a, OMap k a) @-}
minViewWithKey :: Map k a -> Maybe (k, a, Map k a)
minViewWithKey Tip = Nothing
minViewWithKey x = Just (deleteFindMin x)
-- | /O(log n)/. Retrieves the maximal (key,value) pair of the map, and
-- the map stripped of that element, or 'Nothing' if passed an empty map.
--
-- > maxViewWithKey (fromList [(5,"a"), (3,"b")]) == Just ((5,"a"), singleton 3 "b")
-- > maxViewWithKey empty == Nothing
{-@ maxViewWithKey :: OMap k a -> Maybe (k, a, OMap k a) @-}
maxViewWithKey :: Map k a -> Maybe (k, a, Map k a)
maxViewWithKey Tip = Nothing
maxViewWithKey x = Just (deleteFindMax x)
-- | /O(log n)/. Retrieves the value associated with minimal key of the
-- map, and the map stripped of that element, or 'Nothing' if passed an
-- empty map.
--
-- > minView (fromList [(5,"a"), (3,"b")]) == Just ("b", singleton 5 "a")
-- > minView empty == Nothing
{-@ minView :: OMap k a -> Maybe (a, OMap k a) @-}
minView :: Map k a -> Maybe (a, Map k a)
minView Tip = Nothing
minView x = let (_, m, t) = deleteFindMin x in Just (m ,t) -- (first snd $ deleteFindMin x)
-- | /O(log n)/. Retrieves the value associated with maximal key of the
-- map, and the map stripped of that element, or 'Nothing' if passed an
--
-- > maxView (fromList [(5,"a"), (3,"b")]) == Just ("a", singleton 3 "b")
-- > maxView empty == Nothing
{-@ maxView :: OMap k a -> Maybe (a, OMap k a) @-}
maxView :: Map k a -> Maybe (a, Map k a)
maxView Tip = Nothing
maxView x = let (_, m, t) = deleteFindMax x in Just (m, t)
-- Update the 1st component of a tuple (special case of Control.Arrow.first)
first :: (a -> b) -> (a, c) -> (b, c)
first f (x, y) = (f x, y)
{--------------------------------------------------------------------
Union.
--------------------------------------------------------------------}
-- | The union of a list of maps:
-- (@'unions' == 'Prelude.foldl' 'union' 'empty'@).
--
-- > unions [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
-- > == fromList [(3, "b"), (5, "a"), (7, "C")]
-- > unions [(fromList [(5, "A3"), (3, "B3")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "a"), (3, "b")])]
-- > == fromList [(3, "B3"), (5, "A3"), (7, "C")]
{-@ unions :: (Ord k) => [OMap k a] -> OMap k a @-}
unions :: Ord k => [Map k a] -> Map k a
unions ts
= foldlStrict union empty ts
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE unions #-}
#endif
-- | The union of a list of maps, with a combining operation:
-- (@'unionsWith' f == 'Prelude.foldl' ('unionWith' f) 'empty'@).
--
-- > unionsWith (++) [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
-- > == fromList [(3, "bB3"), (5, "aAA3"), (7, "C")]
{-@ unionsWith :: (Ord k) => (a->a->a) -> [OMap k a] -> OMap k a @-}
unionsWith :: Ord k => (a->a->a) -> [Map k a] -> Map k a
unionsWith f ts
= foldlStrict (unionWith f) empty ts
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE unionsWith #-}
#endif
-- | /O(n+m)/.
-- The expression (@'union' t1 t2@) takes the left-biased union of @t1@ and @t2@.
-- It prefers @t1@ when duplicate keys are encountered,
-- i.e. (@'union' == 'unionWith' 'const'@).
-- The implementation uses the efficient /hedge-union/ algorithm.
-- Hedge-union is more efficient on (bigset \``union`\` smallset).
--
-- > union (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "a"), (7, "C")]
{-@ union :: (Ord k) => OMap k a -> OMap k a -> OMap k a @-}
union :: Ord k => Map k a -> Map k a -> Map k a
union Tip t2 = t2
union t1 Tip = t1
union t1 t2 = hedgeUnion NothingS NothingS t1 t2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE union #-}
#endif
-- left-biased hedge union
{-@ hedgeUnion :: (Ord k) => lo: MaybeS k
-> hi: MaybeS {v: k | (IfDefLt lo v) }
-> OMap {v: k | (KeyBetween lo hi v) } a
-> {v: OMap k a | (RootBetween lo hi v) }
-> OMap {v: k | (KeyBetween lo hi v)} a @-}
hedgeUnion :: Ord a => MaybeS a -> MaybeS a -> Map a b -> Map a b -> Map a b
hedgeUnion _ _ t1 Tip = t1
hedgeUnion blo bhi Tip (Bin _ kx x l r) = join kx x (filterGt blo l) (filterLt bhi r)
hedgeUnion _ _ t1 (Bin _ kx x Tip Tip) = insertR kx x t1 -- According to benchmarks, this special case increases
-- performance up to 30%. It does not help in difference or intersection.
hedgeUnion blo bhi (Bin _ kx x l r) t2 = join kx x (hedgeUnion blo bmi l (trim blo bmi t2))
(hedgeUnion bmi bhi r (trim bmi bhi t2))
where bmi = JustS kx
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE hedgeUnion #-}
#endif
{--------------------------------------------------------------------
Union with a combining function
--------------------------------------------------------------------}
-- | /O(n+m)/. Union with a combining function. The implementation uses the efficient /hedge-union/ algorithm.
--
-- > unionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "aA"), (7, "C")]
{-@ unionWith :: (Ord k) => (a -> a -> a) -> OMap k a -> OMap k a -> OMap k a @-}
unionWith :: Ord k => (a -> a -> a) -> Map k a -> Map k a -> Map k a
unionWith f m1 m2
= unionWithKey (\_ x y -> f x y) m1 m2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE unionWith #-}
#endif
-- | /O(n+m)/.
-- Union with a combining function. The implementation uses the efficient /hedge-union/ algorithm.
-- Hedge-union is more efficient on (bigset \``union`\` smallset).
--
-- > let f key left_value right_value = (show key) ++ ":" ++ left_value ++ "|" ++ right_value
-- > unionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "5:a|A"), (7, "C")]
{-@ unionWithKey :: (Ord k) => (k -> a -> a -> a) -> OMap k a -> OMap k a -> OMap k a @-}
unionWithKey :: Ord k => (k -> a -> a -> a) -> Map k a -> Map k a -> Map k a
unionWithKey f t1 t2 = mergeWithKey (\k x1 x2 -> Just $ f k x1 x2) (\ _ _ x -> x) (\ _ _ x -> x) t1 t2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE unionWithKey #-}
#endif
{--------------------------------------------------------------------
Difference
--------------------------------------------------------------------}
-- | /O(n+m)/. Difference of two maps.
-- Return elements of the first map not existing in the second map.
-- The implementation uses an efficient /hedge/ algorithm comparable with /hedge-union/.
--
-- > difference (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 3 "b"
{-@ difference :: (Ord k) => OMap k a -> OMap k b -> OMap k a @-}
difference :: Ord k => Map k a -> Map k b -> Map k a
difference Tip _ = Tip
difference t1 Tip = t1
difference t1 t2 = hedgeDiff NothingS NothingS t1 t2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE difference #-}
#endif
{-@ hedgeDiff :: (Ord k) => lo: MaybeS k
-> hi: MaybeS {v: k | (IfDefLt lo v) }
-> {v: OMap k a | (RootBetween lo hi v) }
-> OMap {v: k | (KeyBetween lo hi v) } b
-> OMap {v: k | (KeyBetween lo hi v) } a @-}
{-@ Decrease hedgeDiff 5 @-}
hedgeDiff :: Ord a => MaybeS a -> MaybeS a -> Map a b -> Map a c -> Map a b
hedgeDiff _ _ Tip _ = Tip
hedgeDiff blo bhi (Bin _ kx x l r) Tip = join kx x (filterGt blo l) (filterLt bhi r)
hedgeDiff blo bhi t (Bin _ kx _ l r) = merge kx (hedgeDiff blo bmi (trim blo bmi t) l)
(hedgeDiff bmi bhi (trim bmi bhi t) r)
where bmi = JustS kx
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE hedgeDiff #-}
#endif
-- | /O(n+m)/. Difference with a combining function.
-- When two equal keys are
-- encountered, the combining function is applied to the values of these keys.
-- If it returns 'Nothing', the element is discarded (proper set difference). If
-- it returns (@'Just' y@), the element is updated with a new value @y@.
-- The implementation uses an efficient /hedge/ algorithm comparable with /hedge-union/.
--
-- > let f al ar = if al == "b" then Just (al ++ ":" ++ ar) else Nothing
-- > differenceWith f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (7, "C")])
-- > == singleton 3 "b:B"
{-@ differenceWith :: (Ord k) => (a -> b -> Maybe a) -> OMap k a -> OMap k b -> OMap k a @-}
differenceWith :: Ord k => (a -> b -> Maybe a) -> Map k a -> Map k b -> Map k a
differenceWith f m1 m2
= differenceWithKey (\_ x y -> f x y) m1 m2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE differenceWith #-}
#endif
-- | /O(n+m)/. Difference with a combining function. When two equal keys are
-- encountered, the combining function is applied to the key and both values.
-- If it returns 'Nothing', the element is discarded (proper set difference). If
-- it returns (@'Just' y@), the element is updated with a new value @y@.
-- The implementation uses an efficient /hedge/ algorithm comparable with /hedge-union/.
--
-- > let f k al ar = if al == "b" then Just ((show k) ++ ":" ++ al ++ "|" ++ ar) else Nothing
-- > differenceWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (3, "B"), (10, "C")])
-- > == singleton 3 "3:b|B"
{-@ differenceWithKey :: (Ord k) => (k -> a -> b -> Maybe a) -> OMap k a -> OMap k b -> OMap k a @-}
differenceWithKey :: Ord k => (k -> a -> b -> Maybe a) -> Map k a -> Map k b -> Map k a
differenceWithKey f t1 t2 = mergeWithKey f (\_ _ x -> x) (\ _ _ _ -> Tip) t1 t2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE differenceWithKey #-}
#endif
{--------------------------------------------------------------------
Intersection
--------------------------------------------------------------------}
-- | /O(n+m)/. Intersection of two maps.
-- Return data in the first map for the keys existing in both maps.
-- (@'intersection' m1 m2 == 'intersectionWith' 'const' m1 m2@).
--
-- > intersection (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "a"
{-@ intersection :: (Ord k) => OMap k a -> OMap k b -> OMap k a @-}
intersection :: Ord k => Map k a -> Map k b -> Map k a
intersection Tip _ = Tip
intersection _ Tip = Tip
intersection t1 t2 = hedgeInt NothingS NothingS t1 t2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE intersection #-}
#endif
{-@ hedgeInt :: (Ord k) => lo: MaybeS k
-> hi: MaybeS {v: k | (IfDefLt lo v) }
-> OMap {v: k | (KeyBetween lo hi v) } a
-> {v: OMap k b | (RootBetween lo hi v) }
-> OMap {v: k | (KeyBetween lo hi v)} a @-}
hedgeInt :: Ord k => MaybeS k -> MaybeS k -> Map k a -> Map k b -> Map k a
hedgeInt _ _ _ Tip = Tip
hedgeInt _ _ Tip _ = Tip
hedgeInt blo bhi (Bin _ kx x l r) t2 = let l' = hedgeInt blo bmi l (trim blo bmi t2)
r' = hedgeInt bmi bhi r (trim bmi bhi t2)
in if kx `member` t2 then join kx x l' r' else merge kx l' r'
where bmi = JustS kx
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE hedgeInt #-}
#endif
-- | /O(n+m)/. Intersection with a combining function.
--
-- > intersectionWith (++) (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "aA"
{-@ intersectionWith :: (Ord k) => (a -> b -> c) -> OMap k a -> OMap k b -> OMap k c @-}
intersectionWith :: Ord k => (a -> b -> c) -> Map k a -> Map k b -> Map k c
intersectionWith f m1 m2
= intersectionWithKey (\_ x y -> f x y) m1 m2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE intersectionWith #-}
#endif
-- | /O(n+m)/. Intersection with a combining function.
-- Intersection is more efficient on (bigset \``intersection`\` smallset).
--
-- > let f k al ar = (show k) ++ ":" ++ al ++ "|" ++ ar
-- > intersectionWithKey f (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "5:a|A"
{-@ intersectionWithKey :: (Ord k) => (k -> a -> b -> c) -> OMap k a -> OMap k b -> OMap k c @-}
intersectionWithKey :: Ord k => (k -> a -> b -> c) -> Map k a -> Map k b -> Map k c
intersectionWithKey f t1 t2 = mergeWithKey (\k x1 x2 -> Just $ f k x1 x2) (\ _ _ _ -> Tip) (\ _ _ _ -> Tip) t1 t2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE intersectionWithKey #-}
#endif
{--------------------------------------------------------------------
MergeWithKey
--------------------------------------------------------------------}
-- | /O(n+m)/. A high-performance universal combining function. This function
-- is used to define 'unionWith', 'unionWithKey', 'differenceWith',
-- 'differenceWithKey', 'intersectionWith', 'intersectionWithKey' and can be
-- used to define other custom combine functions.
--
-- Please make sure you know what is going on when using 'mergeWithKey',
-- otherwise you can be surprised by unexpected code growth or even
-- corruption of the data structure.
--
-- When 'mergeWithKey' is given three arguments, it is inlined to the call
-- site. You should therefore use 'mergeWithKey' only to define your custom
-- combining functions. For example, you could define 'unionWithKey',
-- 'differenceWithKey' and 'intersectionWithKey' as
--
-- > myUnionWithKey f m1 m2 = mergeWithKey (\k x1 x2 -> Just (f k x1 x2)) id id m1 m2
-- > myDifferenceWithKey f m1 m2 = mergeWithKey f id (const empty) m1 m2
-- > myIntersectionWithKey f m1 m2 = mergeWithKey (\k x1 x2 -> Just (f k x1 x2)) (const empty) (const empty) m1 m2
--
-- When calling @'mergeWithKey' combine only1 only2@, a function combining two
-- 'IntMap's is created, such that
--
-- * if a key is present in both maps, it is passed with both corresponding
-- values to the @combine@ function. Depending on the result, the key is either
-- present in the result with specified value, or is left out;
--
-- * a nonempty subtree present only in the first map is passed to @only1@ and
-- the output is added to the result;
--
-- * a nonempty subtree present only in the second map is passed to @only2@ and
-- the output is added to the result.
--
-- The @only1@ and @only2@ methods /must return a map with a subset (possibly empty) of the keys of the given map/.
-- The values can be modified arbitrarily. Most common variants of @only1@ and
-- @only2@ are 'id' and @'const' 'empty'@, but for example @'map' f@ or
-- @'filterWithKey' f@ could be used for any @f@.
{-@ mergeWithKey :: (Ord k) => (k -> a -> b -> Maybe c)
-> (lo:MaybeS k -> hi: MaybeS k -> OMap {v: k | (KeyBetween lo hi v) } a -> OMap {v: k | (KeyBetween lo hi v) } c)
-> (lo:MaybeS k -> hi: MaybeS k -> OMap {v: k | (KeyBetween lo hi v) } b -> OMap {v: k | (KeyBetween lo hi v) } c)
-> OMap k a -> OMap k b -> OMap k c @-}
mergeWithKey :: Ord k => (k -> a -> b -> Maybe c) -> (MaybeS k -> MaybeS k -> Map k a -> Map k c) -> (MaybeS k -> MaybeS k -> Map k b -> Map k c)
-> Map k a -> Map k b -> Map k c
mergeWithKey f g1 g2 = go
where
go Tip t2 = g2 NothingS NothingS t2
go t1 Tip = g1 NothingS NothingS t1
go t1 t2 = hedgeMerge f g1 g2 NothingS NothingS t1 t2
{-@ hedgeMerge :: (Ord k) => (k -> a -> b -> Maybe c)
-> (lo:MaybeS k -> hi: MaybeS k -> OMap {v: k | (KeyBetween lo hi v) } a -> OMap {v: k | (KeyBetween lo hi v) } c)
-> (lo:MaybeS k -> hi: MaybeS k -> OMap {v: k | (KeyBetween lo hi v) } b -> OMap {v: k | (KeyBetween lo hi v) } c)
-> lo: MaybeS k
-> hi: MaybeS {v: k | (IfDefLt lo v) }
-> OMap {v: k | (KeyBetween lo hi v) } a
-> {v: OMap k b | (RootBetween lo hi v) }
-> OMap {v: k | (KeyBetween lo hi v)} c @-}
hedgeMerge :: Ord k => (k -> a -> b -> Maybe c)
-> (MaybeS k -> MaybeS k -> Map k a -> Map k c)
-> (MaybeS k -> MaybeS k -> Map k b -> Map k c)
-> MaybeS k -> MaybeS k
-> Map k a -> Map k b -> Map k c
hedgeMerge f g1 g2 blo bhi t1 Tip
= g1 blo bhi t1
hedgeMerge f g1 g2 blo bhi Tip (Bin _ kx x l r)
= g2 blo bhi $ join kx x (filterGt blo l) (filterLt bhi r)
hedgeMerge f g1 g2 blo bhi (Bin _ kx x l r) t2
= let bmi = JustS kx
l' = hedgeMerge f g1 g2 blo bmi l (trim blo bmi t2)
(found, trim_t2) = trimLookupLo kx bhi t2
r' = hedgeMerge f g1 g2 bmi bhi r trim_t2
in case found of
Nothing -> case g1 blo bhi (singleton kx x) of
Tip -> merge kx l' r'
(Bin _ _ x' Tip Tip) -> join kx x' l' r'
_ -> error "mergeWithKey: Given function only1 does not fulfil required conditions (see documentation)"
Just x2 -> case f kx x x2 of
Nothing -> merge kx l' r'
Just x' -> join kx x' l' r'
{-# INLINE mergeWithKey #-}
{--------------------------------------------------------------------
Submap
--------------------------------------------------------------------}
-- | /O(n+m)/.
-- This function is defined as (@'isSubmapOf' = 'isSubmapOfBy' (==)@).
--
{-@ isSubmapOf :: (Ord k, Eq a) => OMap k a -> OMap k a -> Bool @-}
isSubmapOf :: (Ord k,Eq a) => Map k a -> Map k a -> Bool
isSubmapOf m1 m2 = isSubmapOfBy (==) m1 m2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE isSubmapOf #-}
#endif
{- | /O(n+m)/.
The expression (@'isSubmapOfBy' f t1 t2@) returns 'True' if
all keys in @t1@ are in tree @t2@, and when @f@ returns 'True' when
applied to their respective values. For example, the following
expressions are all 'True':
> isSubmapOfBy (==) (fromList [('a',1)]) (fromList [('a',1),('b',2)])
> isSubmapOfBy (<=) (fromList [('a',1)]) (fromList [('a',1),('b',2)])
> isSubmapOfBy (==) (fromList [('a',1),('b',2)]) (fromList [('a',1),('b',2)])
But the following are all 'False':
> isSubmapOfBy (==) (fromList [('a',2)]) (fromList [('a',1),('b',2)])
> isSubmapOfBy (<) (fromList [('a',1)]) (fromList [('a',1),('b',2)])
> isSubmapOfBy (==) (fromList [('a',1),('b',2)]) (fromList [('a',1)])
-}
{-@ isSubmapOfBy :: (Ord k) => (a->b->Bool) -> OMap k a -> OMap k b -> Bool @-}
isSubmapOfBy :: Ord k => (a->b->Bool) -> Map k a -> Map k b -> Bool
isSubmapOfBy f t1 t2
= (size t1 <= size t2) && (submap' f t1 t2)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE isSubmapOfBy #-}
#endif
submap' :: Ord a => (b -> c -> Bool) -> Map a b -> Map a c -> Bool
submap' _ Tip _ = True
submap' _ _ Tip = False
submap' f (Bin _ kx x l r) t
= case found of
Nothing -> False
Just y -> f x y && submap' f l lt && submap' f r gt
where
(lt,found,gt) = splitLookup kx t
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE submap' #-}
#endif
-- | /O(n+m)/. Is this a proper submap? (ie. a submap but not equal).
-- Defined as (@'isProperSubmapOf' = 'isProperSubmapOfBy' (==)@).
{-@ isProperSubmapOf :: (Ord k,Eq a) => OMap k a -> OMap k a -> Bool @-}
isProperSubmapOf :: (Ord k,Eq a) => Map k a -> Map k a -> Bool
isProperSubmapOf m1 m2
= isProperSubmapOfBy (==) m1 m2
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE isProperSubmapOf #-}
#endif
{- | /O(n+m)/. Is this a proper submap? (ie. a submap but not equal).
The expression (@'isProperSubmapOfBy' f m1 m2@) returns 'True' when
@m1@ and @m2@ are not equal,
all keys in @m1@ are in @m2@, and when @f@ returns 'True' when
applied to their respective values. For example, the following
expressions are all 'True':
> isProperSubmapOfBy (==) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
> isProperSubmapOfBy (<=) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
But the following are all 'False':
> isProperSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1),(2,2)])
> isProperSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1)])
> isProperSubmapOfBy (<) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
-}
{-@ isProperSubmapOfBy :: Ord k => (a -> b -> Bool) -> OMap k a -> OMap k b -> Bool @-}
isProperSubmapOfBy :: Ord k => (a -> b -> Bool) -> Map k a -> Map k b -> Bool
isProperSubmapOfBy f t1 t2
= (size t1 < size t2) && (submap' f t1 t2)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE isProperSubmapOfBy #-}
#endif
{--------------------------------------------------------------------
Filter and partition
--------------------------------------------------------------------}
-- | /O(n)/. Filter all values that satisfy the predicate.
--
-- > filter (> "a") (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- > filter (> "x") (fromList [(5,"a"), (3,"b")]) == empty
-- > filter (< "a") (fromList [(5,"a"), (3,"b")]) == empty
{-@ filter :: (a -> Bool) -> OMap k a -> OMap k a @-}
filter :: (a -> Bool) -> Map k a -> Map k a
filter p m
= filterWithKey (\_ x -> p x) m
-- | /O(n)/. Filter all keys\/values that satisfy the predicate.
--
-- > filterWithKey (\k _ -> k > 4) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
{-@ filterWithKey :: (k -> a -> Bool) -> OMap k a -> OMap k a @-}
filterWithKey :: (k -> a -> Bool) -> Map k a -> Map k a
filterWithKey _ Tip = Tip
filterWithKey p (Bin _ kx x l r)
| p kx x = join kx x (filterWithKey p l) (filterWithKey p r)
| otherwise = merge kx (filterWithKey p l) (filterWithKey p r)
-- | /O(n)/. Partition the map according to a predicate. The first
-- map contains all elements that satisfy the predicate, the second all
-- elements that fail the predicate. See also 'split'.
--
-- > partition (> "a") (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", singleton 5 "a")
-- > partition (< "x") (fromList [(5,"a"), (3,"b")]) == (fromList [(3, "b"), (5, "a")], empty)
-- > partition (> "x") (fromList [(5,"a"), (3,"b")]) == (empty, fromList [(3, "b"), (5, "a")])
{-@ partition :: (a -> Bool) -> OMap k a -> (OMap k a, OMap k a) @-}
partition :: (a -> Bool) -> Map k a -> (Map k a,Map k a)
partition p m
= partitionWithKey (\_ x -> p x) m
-- | /O(n)/. Partition the map according to a predicate. The first
-- map contains all elements that satisfy the predicate, the second all
-- elements that fail the predicate. See also 'split'.
--
-- > partitionWithKey (\ k _ -> k > 3) (fromList [(5,"a"), (3,"b")]) == (singleton 5 "a", singleton 3 "b")
-- > partitionWithKey (\ k _ -> k < 7) (fromList [(5,"a"), (3,"b")]) == (fromList [(3, "b"), (5, "a")], empty)
-- > partitionWithKey (\ k _ -> k > 7) (fromList [(5,"a"), (3,"b")]) == (empty, fromList [(3, "b"), (5, "a")])
{-@ partitionWithKey :: (k -> a -> Bool) -> OMap k a -> (OMap k a, OMap k a) @-}
partitionWithKey :: (k -> a -> Bool) -> Map k a -> (Map k a, Map k a)
partitionWithKey _ Tip = (Tip,Tip)
partitionWithKey p (Bin _ kx x l r)
| p kx x = (join kx x l1 r1,merge kx l2 r2)
| otherwise = (merge kx l1 r1,join kx x l2 r2)
where
(l1,l2) = partitionWithKey p l
(r1,r2) = partitionWithKey p r
-- | /O(n)/. Map values and collect the 'Just' results.
--
-- > let f x = if x == "a" then Just "new a" else Nothing
-- > mapMaybe f (fromList [(5,"a"), (3,"b")]) == singleton 5 "new a"
{-@ mapMaybe :: (a -> Maybe b) -> OMap k a -> OMap k b @-}
mapMaybe :: (a -> Maybe b) -> Map k a -> Map k b
mapMaybe f = mapMaybeWithKey (\_ x -> f x)
-- | /O(n)/. Map keys\/values and collect the 'Just' results.
--
-- > let f k _ = if k < 5 then Just ("key : " ++ (show k)) else Nothing
-- > mapMaybeWithKey f (fromList [(5,"a"), (3,"b")]) == singleton 3 "key : 3"
{-@ mapMaybeWithKey :: (k -> a -> Maybe b) -> OMap k a -> OMap k b @-}
mapMaybeWithKey :: (k -> a -> Maybe b) -> Map k a -> Map k b
mapMaybeWithKey _ Tip = Tip
mapMaybeWithKey f (Bin _ kx x l r) = case f kx x of
Just y -> join kx y (mapMaybeWithKey f l) (mapMaybeWithKey f r)
Nothing -> merge kx (mapMaybeWithKey f l) (mapMaybeWithKey f r)
-- | /O(n)/. Map values and separate the 'Left' and 'Right' results.
--
-- > let f a = if a < "c" then Left a else Right a
-- > mapEither f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(3,"b"), (5,"a")], fromList [(1,"x"), (7,"z")])
-- >
-- > mapEither (\ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
{-@ mapEither :: (a -> Either b c) -> OMap k a -> (OMap k b, OMap k c) @-}
mapEither :: (a -> Either b c) -> Map k a -> (Map k b, Map k c)
mapEither f m
= mapEitherWithKey (\_ x -> f x) m
-- | /O(n)/. Map keys\/values and separate the 'Left' and 'Right' results.
--
-- > let f k a = if k < 5 then Left (k * 2) else Right (a ++ a)
-- > mapEitherWithKey f (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (fromList [(1,2), (3,6)], fromList [(5,"aa"), (7,"zz")])
-- >
-- > mapEitherWithKey (\_ a -> Right a) (fromList [(5,"a"), (3,"b"), (1,"x"), (7,"z")])
-- > == (empty, fromList [(1,"x"), (3,"b"), (5,"a"), (7,"z")])
{-@ mapEitherWithKey :: (k -> a -> Either b c) -> OMap k a -> (OMap k b, OMap k c) @-}
mapEitherWithKey :: (k -> a -> Either b c) -> Map k a -> (Map k b, Map k c)
mapEitherWithKey _ Tip = (Tip, Tip)
mapEitherWithKey f (Bin _ kx x l r) = case f kx x of
Left y -> (join kx y l1 r1, merge kx l2 r2)
Right z -> (merge kx l1 r1, join kx z l2 r2)
where
(l1,l2) = mapEitherWithKey f l
(r1,r2) = mapEitherWithKey f r
{--------------------------------------------------------------------
Mapping
--------------------------------------------------------------------}
-- | /O(n)/. Map a function over all values in the map.
--
-- > map (++ "x") (fromList [(5,"a"), (3,"b")]) == fromList [(3, "bx"), (5, "ax")]
{-@ map :: (a -> b) -> OMap k a -> OMap k b @-}
map :: (a -> b) -> Map k a -> Map k b
map _ Tip = Tip
map f (Bin sx kx x l r) = Bin sx kx (f x) (map f l) (map f r)
-- | /O(n)/. Map a function over all values in the map.
--
-- > let f key x = (show key) ++ ":" ++ x
-- > mapWithKey f (fromList [(5,"a"), (3,"b")]) == fromList [(3, "3:b"), (5, "5:a")]
{-@ mapWithKey :: (k -> a -> b) -> OMap k a -> OMap k b @-}
mapWithKey :: (k -> a -> b) -> Map k a -> Map k b
mapWithKey _ Tip = Tip
mapWithKey f (Bin sx kx x l r) = Bin sx kx (f kx x) (mapWithKey f l) (mapWithKey f r)
-- | /O(n)/.
-- @'traverseWithKey' f s == 'fromList' <$> 'traverse' (\(k, v) -> (,) k <$> f k v) ('toList' m)@
-- That is, behaves exactly like a regular 'traverse' except that the traversing
-- function also has access to the key associated with a value.
--
-- > traverseWithKey (\k v -> if odd k then Just (succ v) else Nothing) (fromList [(1, 'a'), (5, 'e')]) == Just (fromList [(1, 'b'), (5, 'f')])
-- > traverseWithKey (\k v -> if odd k then Just (succ v) else Nothing) (fromList [(2, 'c')]) == Nothing
--{-# INLINE traverseWithKey #-}
--traverseWithKey :: Applicative t => (k -> a -> t b) -> Map k a -> t (Map k b)
--traverseWithKey f = go
-- where
-- go Tip = pure Tip
-- go (Bin s k v l r)
-- = flip (Bin s k) <$> go l <*> f k v <*> go r
-- | /O(n)/. The function 'mapAccum' threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a b = (a ++ b, b ++ "X")
-- > mapAccum f "Everything: " (fromList [(5,"a"), (3,"b")]) == ("Everything: ba", fromList [(3, "bX"), (5, "aX")])
{-@ mapAccum :: (a -> b -> (a,c)) -> a -> OMap k b -> (a, OMap k c) @-}
mapAccum :: (a -> b -> (a,c)) -> a -> Map k b -> (a, Map k c)
mapAccum f a m
= mapAccumWithKey (\a' _ x' -> f a' x') a m
-- | /O(n)/. The function 'mapAccumWithKey' threads an accumulating
-- argument through the map in ascending order of keys.
--
-- > let f a k b = (a ++ " " ++ (show k) ++ "-" ++ b, b ++ "X")
-- > mapAccumWithKey f "Everything:" (fromList [(5,"a"), (3,"b")]) == ("Everything: 3-b 5-a", fromList [(3, "bX"), (5, "aX")])
{-@ mapAccumWithKey :: (a -> k -> b -> (a,c)) -> a -> OMap k b -> (a, OMap k c) @-}
mapAccumWithKey :: (a -> k -> b -> (a,c)) -> a -> Map k b -> (a,Map k c)
mapAccumWithKey f a t
= mapAccumL f a t
-- | /O(n)/. The function 'mapAccumL' threads an accumulating
-- argument through the map in ascending order of keys.
mapAccumL :: (a -> k -> b -> (a,c)) -> a -> Map k b -> (a,Map k c)
mapAccumL _ a Tip = (a,Tip)
mapAccumL f a (Bin sx kx x l r) =
let (a1,l') = mapAccumL f a l
(a2,x') = f a1 kx x
(a3,r') = mapAccumL f a2 r
in (a3,Bin sx kx x' l' r')
-- | /O(n)/. The function 'mapAccumR' threads an accumulating
-- argument through the map in descending order of keys.
{-@ mapAccumRWithKey :: (a -> k -> b -> (a,c)) -> a -> OMap k b -> (a, OMap k c) @-}
mapAccumRWithKey :: (a -> k -> b -> (a,c)) -> a -> Map k b -> (a,Map k c)
mapAccumRWithKey _ a Tip = (a,Tip)
mapAccumRWithKey f a (Bin sx kx x l r) =
let (a1,r') = mapAccumRWithKey f a r
(a2,x') = f a1 kx x
(a3,l') = mapAccumRWithKey f a2 l
in (a3,Bin sx kx x' l' r')
-- | /O(n*log n)/.
-- @'mapKeys' f s@ is the map obtained by applying @f@ to each key of @s@.
--
-- The size of the result may be smaller if @f@ maps two or more distinct
-- keys to the same new key. In this case the value at the greatest of the
-- original keys is retained.
--
-- > mapKeys (+ 1) (fromList [(5,"a"), (3,"b")]) == fromList [(4, "b"), (6, "a")]
-- > mapKeys (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 1 "c"
-- > mapKeys (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 3 "c"
{-@ mapKeys :: (Ord k2) => (k1 -> k2) -> OMap k1 a -> OMap k2 a @-}
mapKeys :: Ord k2 => (k1->k2) -> Map k1 a -> Map k2 a
mapKeys f = fromList . foldrWithKey (\k x xs -> (f k, x) : xs) []
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE mapKeys #-}
#endif
-- | /O(n*log n)/.
-- @'mapKeysWith' c f s@ is the map obtained by applying @f@ to each key of @s@.
--
-- The size of the result may be smaller if @f@ maps two or more distinct
-- keys to the same new key. In this case the associated values will be
-- combined using @c@.
--
-- > mapKeysWith (++) (\ _ -> 1) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 1 "cdab"
-- > mapKeysWith (++) (\ _ -> 3) (fromList [(1,"b"), (2,"a"), (3,"d"), (4,"c")]) == singleton 3 "cdab"
{-@ mapKeysWith :: (Ord k2) => (a -> a -> a) -> (k1->k2) -> OMap k1 a -> OMap k2 a @-}
mapKeysWith :: Ord k2 => (a -> a -> a) -> (k1->k2) -> Map k1 a -> Map k2 a
mapKeysWith c f = fromListWith c . foldrWithKey (\k x xs -> (f k, x) : xs) []
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE mapKeysWith #-}
#endif
-- | /O(n)/.
-- @'mapKeysMonotonic' f s == 'mapKeys' f s@, but works only when @f@
-- is strictly monotonic.
-- That is, for any values @x@ and @y@, if @x@ < @y@ then @f x@ < @f y@.
-- /The precondition is not checked./
-- Semi-formally, we have:
--
-- > and [x < y ==> f x < f y | x <- ls, y <- ls]
-- > ==> mapKeysMonotonic f s == mapKeys f s
-- > where ls = keys s
--
-- This means that @f@ maps distinct original keys to distinct resulting keys.
-- This function has better performance than 'mapKeys'.
--
-- > mapKeysMonotonic (\ k -> k * 2) (fromList [(5,"a"), (3,"b")]) == fromList [(6, "b"), (10, "a")]
-- > valid (mapKeysMonotonic (\ k -> k * 2) (fromList [(5,"a"), (3,"b")])) == True
-- > valid (mapKeysMonotonic (\ _ -> 1) (fromList [(5,"a"), (3,"b")])) == False
-- LIQUIDFAIL
mapKeysMonotonic :: (k1->k2) -> Map k1 a -> Map k2 a
mapKeysMonotonic _ Tip = Tip
mapKeysMonotonic f (Bin sz k x l r) =
Bin sz (f k) x (mapKeysMonotonic f l) (mapKeysMonotonic f r)
{--------------------------------------------------------------------
Folds
--------------------------------------------------------------------}
-- | /O(n)/. Fold the values in the map using the given right-associative
-- binary operator, such that @'foldr' f z == 'Prelude.foldr' f z . 'elems'@.
--
-- For example,
--
-- > elems map = foldr (:) [] map
--
-- > let f a len = len + (length a)
-- > foldr f 0 (fromList [(5,"a"), (3,"bbb")]) == 4
foldr :: (a -> b -> b) -> b -> Map k a -> b
foldr f z = go z
where
go z' Tip = z'
go z' (Bin _ _ x l r) = go (f x (go z' r)) l
{-# INLINE foldr #-}
-- | /O(n)/. A strict version of 'foldr'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldr' :: (a -> b -> b) -> b -> Map k a -> b
foldr' f z = go z
where
STRICT_1_OF_2(go)
go z' Tip = z'
go z' (Bin _ _ x l r) = go (f x (go z' r)) l
{-# INLINE foldr' #-}
-- | /O(n)/. Fold the values in the map using the given left-associative
-- binary operator, such that @'foldl' f z == 'Prelude.foldl' f z . 'elems'@.
--
-- For example,
--
-- > elems = reverse . foldl (flip (:)) []
--
-- > let f len a = len + (length a)
-- > foldl f 0 (fromList [(5,"a"), (3,"bbb")]) == 4
foldl :: (a -> b -> a) -> a -> Map k b -> a
foldl f z = go z
where
go z' Tip = z'
go z' (Bin _ _ x l r) = go (f (go z' l) x) r
{-# INLINE foldl #-}
-- | /O(n)/. A strict version of 'foldl'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldl' :: (a -> b -> a) -> a -> Map k b -> a
foldl' f z = go z
where
STRICT_1_OF_2(go)
go z' Tip = z'
go z' (Bin _ _ x l r) = go (f (go z' l) x) r
{-# INLINE foldl' #-}
-- | /O(n)/. Fold the keys and values in the map using the given right-associative
-- binary operator, such that
-- @'foldrWithKey' f z == 'Prelude.foldr' ('uncurry' f) z . 'toAscList'@.
--
-- For example,
--
-- > keys map = foldrWithKey (\k x ks -> k:ks) [] map
--
-- > let f k a result = result ++ "(" ++ (show k) ++ ":" ++ a ++ ")"
-- > foldrWithKey f "Map: " (fromList [(5,"a"), (3,"b")]) == "Map: (5:a)(3:b)"
foldrWithKey :: (k -> a -> b -> b) -> b -> Map k a -> b
foldrWithKey f z = go z
where
go z' Tip = z'
go z' (Bin _ kx x l r) = go (f kx x (go z' r)) l
{-# INLINE foldrWithKey #-}
-- | /O(n)/. A strict version of 'foldrWithKey'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldrWithKey' :: (k -> a -> b -> b) -> b -> Map k a -> b
foldrWithKey' f z = go z
where
STRICT_1_OF_2(go)
go z' Tip = z'
go z' (Bin _ kx x l r) = go (f kx x (go z' r)) l
{-# INLINE foldrWithKey' #-}
-- | /O(n)/. Fold the keys and values in the map using the given left-associative
-- binary operator, such that
-- @'foldlWithKey' f z == 'Prelude.foldl' (\\z' (kx, x) -> f z' kx x) z . 'toAscList'@.
--
-- For example,
--
-- > keys = reverse . foldlWithKey (\ks k x -> k:ks) []
--
-- > let f result k a = result ++ "(" ++ (show k) ++ ":" ++ a ++ ")"
-- > foldlWithKey f "Map: " (fromList [(5,"a"), (3,"b")]) == "Map: (3:b)(5:a)"
foldlWithKey :: (a -> k -> b -> a) -> a -> Map k b -> a
foldlWithKey f z = go z
where
go z' Tip = z'
go z' (Bin _ kx x l r) = go (f (go z' l) kx x) r
{-# INLINE foldlWithKey #-}
-- | /O(n)/. A strict version of 'foldlWithKey'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldlWithKey' :: (a -> k -> b -> a) -> a -> Map k b -> a
foldlWithKey' f z = go z
where
STRICT_1_OF_2(go)
go z' Tip = z'
go z' (Bin _ kx x l r) = go (f (go z' l) kx x) r
{-# INLINE foldlWithKey' #-}
{--------------------------------------------------------------------
List variations
--------------------------------------------------------------------}
-- | /O(n)/.
-- Return all elements of the map in the ascending order of their keys.
-- Subject to list fusion.
--
-- > elems (fromList [(5,"a"), (3,"b")]) == ["b","a"]
-- > elems empty == []
{-@ elems :: m:Map k a -> [a] / [mlen m] @-}
elems :: Map k a -> [a]
elems = foldr (:) []
-- | /O(n)/. Return all keys of the map in ascending order. Subject to list
-- fusion.
--
-- > keys (fromList [(5,"a"), (3,"b")]) == [3,5]
-- > keys empty == []
{- LIQUID: SUMMARY-VALUES: keys :: OMap k a -> [k]<{v: k | v >= fld}> @-}
{-@ keys :: m:Map k a -> [k] / [mlen m] @-}
keys :: Map k a -> [k]
keys = foldrWithKey (\k _ ks -> k : ks) []
-- | /O(n)/. An alias for 'toAscList'. Return all key\/value pairs in the map
-- in ascending key order. Subject to list fusion.
--
-- > assocs (fromList [(5,"a"), (3,"b")]) == [(3,"b"), (5,"a")]
-- > assocs empty == []
{- LIQUID: SUMMARY-VALUES: assocs :: OMap k a -> [(k, a)]<{v: (k, a) | fst(v) >= fst(fld) }> @-}
assocs :: Map k a -> [(k,a)]
assocs m
= toAscList m
-- | /O(n)/. The set of all keys of the map.
--
-- > keysSet (fromList [(5,"a"), (3,"b")]) == Data.Set.fromList [3,5]
-- > keysSet empty == Data.Set.empty
-- LIQUID keysSet :: Map k a -> Set.Set k
-- LIQUID keysSet Tip = Set.Tip
-- LIQUID keysSet (Bin sz kx _ l r) = Set.Bin sz kx (keysSet l) (keysSet r)
-- | /O(n)/. Build a map from a set of keys and a function which for each key
-- computes its value.
--
-- > fromSet (\k -> replicate k 'a') (Data.Set.fromList [3, 5]) == fromList [(5,"aaaaa"), (3,"aaa")]
-- > fromSet undefined Data.Set.empty == empty
-- LIQUID fromSet :: (k -> a) -> Set.Set k -> Map k a
-- LIQUID fromSet _ Set.Tip = Tip
-- LIQUID fromSet f (Set.Bin sz x l r) = Bin sz x (f x) (fromSet f l) (fromSet f r)
{--------------------------------------------------------------------
Lists
use [foldlStrict] to reduce demand on the control-stack
--------------------------------------------------------------------}
-- | /O(n*log n)/. Build a map from a list of key\/value pairs. See also 'fromAscList'.
-- If the list contains more than one value for the same key, the last value
-- for the key is retained.
--
-- > fromList [] == empty
-- > fromList [(5,"a"), (3,"b"), (5, "c")] == fromList [(5,"c"), (3,"b")]
-- > fromList [(5,"c"), (3,"b"), (5, "a")] == fromList [(5,"a"), (3,"b")]
{-@ fromList :: (Ord k) => [(k,a)] -> OMap k a @-}
fromList :: Ord k => [(k,a)] -> Map k a
fromList xs
= foldlStrict ins empty xs
where
ins t (k,x) = insert k x t
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromList #-}
#endif
-- | /O(n*log n)/. Build a map from a list of key\/value pairs with a combining function. See also 'fromAscListWith'.
--
-- > fromListWith (++) [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"a")] == fromList [(3, "ab"), (5, "aba")]
-- > fromListWith (++) [] == empty
{-@ fromListWith :: (Ord k) => (a -> a -> a) -> [(k,a)] -> OMap k a @-}
fromListWith :: Ord k => (a -> a -> a) -> [(k,a)] -> Map k a
fromListWith f xs
= fromListWithKey (\_ x y -> f x y) xs
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromListWith #-}
#endif
-- | /O(n*log n)/. Build a map from a list of key\/value pairs with a combining function. See also 'fromAscListWithKey'.
--
-- > let f k a1 a2 = (show k) ++ a1 ++ a2
-- > fromListWithKey f [(5,"a"), (5,"b"), (3,"b"), (3,"a"), (5,"a")] == fromList [(3, "3ab"), (5, "5a5ba")]
-- > fromListWithKey f [] == empty
{-@ fromListWithKey :: (Ord k) => (k -> a -> a -> a) -> [(k,a)] -> OMap k a @-}
fromListWithKey :: Ord k => (k -> a -> a -> a) -> [(k,a)] -> Map k a
fromListWithKey f xs
= foldlStrict ins empty xs
where
ins t (k,x) = insertWithKey f k x t
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromListWithKey #-}
#endif
-- | /O(n)/. Convert the map to a list of key\/value pairs. Subject to list fusion.
--
-- > toList (fromList [(5,"a"), (3,"b")]) == [(3,"b"), (5,"a")]
-- > toList empty == []
{- LIQUIDTODO: toList:: OMap k a -> [(k, a)]<{v: (k, a) | fst(v) > fst(fld) }> @-}
toList :: Map k a -> [(k,a)]
toList = toAscList
-- | /O(n)/. Convert the map to a list of key\/value pairs where the keys are
-- in ascending order. Subject to list fusion.
--
-- > toAscList (fromList [(5,"a"), (3,"b")]) == [(3,"b"), (5,"a")]
{- LIQUIDTODO: toAscList :: OMap k a -> [(k, a)]<{v: (k, a) | fst(v) > fst(fld) }> @-}
{-@ toAscList :: m:Map k a -> [(k,a)] / [mlen m] @-}
toAscList :: Map k a -> [(k,a)]
toAscList = foldrWithKey (\k x xs -> (k,x):xs) []
-- | /O(n)/. Convert the map to a list of key\/value pairs where the keys
-- are in descending order. Subject to list fusion.
--
-- > toDescList (fromList [(5,"a"), (3,"b")]) == [(5,"a"), (3,"b")]
{- LIQUIDTODO: toAscList :: OMap k a -> [(k, a)]<{v: (k, a) | fst(v) < fst(fld) }> @-}
{-@ toDescList :: m:Map k a -> [(k,a)] / [mlen m] @-}
toDescList :: Map k a -> [(k,a)]
toDescList = foldlWithKey (\xs k x -> (k,x):xs) []
-- List fusion for the list generating functions.
#if __GLASGOW_HASKELL__
-- The foldrFB and foldlFB are fold{r,l}WithKey equivalents, used for list fusion.
-- They are important to convert unfused methods back, see mapFB in prelude.
{-@ foldrFB :: (k -> a -> b -> b) -> b -> m:Map k a -> b / [mlen m] @-}
foldrFB :: (k -> a -> b -> b) -> b -> Map k a -> b
foldrFB = foldrWithKey
{-# INLINE[0] foldrFB #-}
{-@ foldlFB :: (a -> k -> b -> a) -> a -> m:Map k b -> a / [mlen m] @-}
foldlFB :: (a -> k -> b -> a) -> a -> Map k b -> a
foldlFB = foldlWithKey
{-# INLINE[0] foldlFB #-}
-- Inline assocs and toList, so that we need to fuse only toAscList.
{-# INLINE assocs #-}
{-# INLINE toList #-}
-- The fusion is enabled up to phase 2 included. If it does not succeed,
-- convert in phase 1 the expanded elems,keys,to{Asc,Desc}List calls back to
-- elems,keys,to{Asc,Desc}List. In phase 0, we inline fold{lr}FB (which were
-- used in a list fusion, otherwise it would go away in phase 1), and let compiler
-- do whatever it wants with elems,keys,to{Asc,Desc}List -- it was forbidden to
-- inline it before phase 0, otherwise the fusion rules would not fire at all.
{-# NOINLINE[0] elems #-}
{-# NOINLINE[0] keys #-}
{-# NOINLINE[0] toAscList #-}
{-# NOINLINE[0] toDescList #-}
{-# RULES "Map.elems" [~1] forall m . elems m = build (\c n -> foldrFB (\_ x xs -> c x xs) n m) #-}
{-# RULES "Map.elemsBack" [1] foldrFB (\_ x xs -> x : xs) [] = elems #-}
{-# RULES "Map.keys" [~1] forall m . keys m = build (\c n -> foldrFB (\k _ xs -> c k xs) n m) #-}
{-# RULES "Map.keysBack" [1] foldrFB (\k _ xs -> k : xs) [] = keys #-}
{-# RULES "Map.toAscList" [~1] forall m . toAscList m = build (\c n -> foldrFB (\k x xs -> c (k,x) xs) n m) #-}
{-# RULES "Map.toAscListBack" [1] foldrFB (\k x xs -> (k, x) : xs) [] = toAscList #-}
{-# RULES "Map.toDescList" [~1] forall m . toDescList m = build (\c n -> foldlFB (\xs k x -> c (k,x) xs) n m) #-}
{-# RULES "Map.toDescListBack" [1] foldlFB (\xs k x -> (k, x) : xs) [] = toDescList #-}
#endif
{--------------------------------------------------------------------
Building trees from ascending/descending lists can be done in linear time.
Note that if [xs] is ascending that:
fromAscList xs == fromList xs
fromAscListWith f xs == fromListWith f xs
--------------------------------------------------------------------}
-- | /O(n)/. Build a map from an ascending list in linear time.
-- /The precondition (input list is ascending) is not checked./
--
-- > fromAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
-- > fromAscList [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "b")]
-- > valid (fromAscList [(3,"b"), (5,"a"), (5,"b")]) == True
-- > valid (fromAscList [(5,"a"), (3,"b"), (5,"b")]) == False
{- LIQUIDTODO fromAscList :: (Eq k) => [(k,a)]<{v: (k, a) | fst(v) > fst(fld)}> -> OMap k a -}
{-@ fromAscList :: (Eq k) => {v: [(k,a)] | false} -> OMap k a @-}
fromAscList :: Eq k => [(k,a)] -> Map k a
fromAscList xs
= fromAscListWithKey (\_ x _ -> x) xs
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromAscList #-}
#endif
-- | /O(n)/. Build a map from an ascending list in linear time with a combining function for equal keys.
-- /The precondition (input list is ascending) is not checked./
--
-- > fromAscListWith (++) [(3,"b"), (5,"a"), (5,"b")] == fromList [(3, "b"), (5, "ba")]
-- > valid (fromAscListWith (++) [(3,"b"), (5,"a"), (5,"b")]) == True
-- > valid (fromAscListWith (++) [(5,"a"), (3,"b"), (5,"b")]) == False
{- LIQUIDTODO fromAscListWith :: (Eq k) => (a -> a -> a) -> [(k,a)]<{v: (k, a) | fst(v) > fst(fld)}> -> OMap k a -}
{-@ fromAscListWith :: Eq k => (a -> a -> a) -> {v:[(k,a)] | false} -> OMap k a @-}
fromAscListWith :: Eq k => (a -> a -> a) -> [(k,a)] -> Map k a
fromAscListWith f xs
= fromAscListWithKey (\_ x y -> f x y) xs
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromAscListWith #-}
#endif
-- | /O(n)/. Build a map from an ascending list in linear time with a
-- combining function for equal keys.
-- /The precondition (input list is ascending) is not checked./
--
-- > let f k a1 a2 = (show k) ++ ":" ++ a1 ++ a2
-- > fromAscListWithKey f [(3,"b"), (5,"a"), (5,"b"), (5,"b")] == fromList [(3, "b"), (5, "5:b5:ba")]
-- > valid (fromAscListWithKey f [(3,"b"), (5,"a"), (5,"b"), (5,"b")]) == True
-- > valid (fromAscListWithKey f [(5,"a"), (3,"b"), (5,"b"), (5,"b")]) == False
{- LIQUIDTODO fromAscListWithKey :: (Eq k) => (k -> a -> a -> a) -> [(k,a)]<{v: (k, a) | fst(v) > fst(fld)}> -> OMap k a -}
{-@ fromAscListWithKey :: (Eq k) => (k -> a -> a -> a) -> {v: [(k,a)] | false} -> OMap k a @-}
fromAscListWithKey :: Eq k => (k -> a -> a -> a) -> [(k,a)] -> Map k a
fromAscListWithKey f xs
= fromDistinctAscList (combineEq f xs)
where
-- [combineEq f xs] combines equal elements with function [f] in an ordered list [xs]
combineEq _ xs'
= case xs' of
[] -> []
[x] -> [x]
(x:xx) -> combineEq' x xx
combineEq' z [] = [z]
combineEq' z@(kz,zz) (x@(kx,xx):xs')
| kx==kz = let yy = f kx xx zz in combineEq' (kx,yy) xs'
| otherwise = z:combineEq' x xs'
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE fromAscListWithKey #-}
#endif
-- | /O(n)/. Build a map from an ascending list of distinct elements in linear time.
-- /The precondition is not checked./
--
-- > fromDistinctAscList [(3,"b"), (5,"a")] == fromList [(3, "b"), (5, "a")]
-- > valid (fromDistinctAscList [(3,"b"), (5,"a")]) == True
-- > valid (fromDistinctAscList [(3,"b"), (5,"a"), (5,"b")]) == False
{- LIQUIDTODO fromDistinctAscList :: [(k,a)]<{v: (k, a) | fst(v) > fst(fld)}> -> OMap k a -}
{-@ Lazy fromDistinctAscList @-}
{-@ fromDistinctAscList :: {v: [(k, a)] | false} -> OMap k a @-}
fromDistinctAscList :: [(k,a)] -> Map k a
fromDistinctAscList xs
= create const (length xs) xs
where
-- 1) use continuations so that we use heap space instead of stack space.
-- 2) special case for n==5 to create bushier trees.
create c 0 xs' = c Tip xs'
create c 5 xs' = case xs' of
((k1,x1):(k2,x2):(k3,x3):(k4,x4):(k5,x5):xx)
-> c (bin k4 x4 (bin k2 x2 (singleton k1 x1) (singleton k3 x3)) (singleton k5 x5)) xx
_ -> error "fromDistinctAscList create"
create c n xs' = seq nr $ create (createR nr c) nl xs'
where nl = n `div` 2
nr = n - nl - 1
createR n c l ((k,x):ys) = create (createB l k x c) n ys
createR _ _ _ [] = error "fromDistinctAscList createR []"
createB l k x c r zs = c (bin k x l r) zs
{--------------------------------------------------------------------
Utility functions that return sub-ranges of the original
tree. Some functions take a `Maybe value` as an argument to
allow comparisons against infinite values. These are called `blow`
(Nothing is -\infty) and `bhigh` (here Nothing is +\infty).
We use MaybeS value, which is a Maybe strict in the Just case.
[trim blow bhigh t] A tree that is either empty or where [x > blow]
and [x < bhigh] for the value [x] of the root.
[filterGt blow t] A tree where for all values [k]. [k > blow]
[filterLt bhigh t] A tree where for all values [k]. [k < bhigh]
[split k t] Returns two trees [l] and [r] where all keys
in [l] are <[k] and all keys in [r] are >[k].
[splitLookup k t] Just like [split] but also returns whether [k]
was found in the tree.
--------------------------------------------------------------------}
data MaybeS a = NothingS | JustS a -- LIQUID: !-annot-fix
{--------------------------------------------------------------------
[trim blo bhi t] trims away all subtrees that surely contain no
values between the range [blo] to [bhi]. The returned tree is either
empty or the key of the root is between @blo@ and @bhi@.
--------------------------------------------------------------------}
-- LIQUID: EXPANDED CASE-EXPRS for lesser, greater, middle to avoid DEFAULT hassle
{-@ trim :: (Ord k) => lo:MaybeS k
-> hi:MaybeS k
-> OMap k a
-> {v: OMap k a | (RootBetween lo hi v) }
@-}
trim :: Ord k => MaybeS k -> MaybeS k -> Map k a -> Map k a
trim NothingS NothingS t = t
trim (JustS lk) NothingS t = greater lk t
where greater lo t@(Bin _ k _ _ r) | k <= lo = greater lo r
| otherwise = t
greater _ t'@Tip = t'
trim NothingS (JustS hk) t = lesser hk t
where lesser hi t'@(Bin _ k _ l _) | k >= hi = lesser hi l
| otherwise = t'
lesser _ t'@Tip = t'
trim (JustS lk) (JustS hk) t = middle lk hk t
where middle lo hi t'@(Bin _ k _ l r) | k <= lo = middle lo hi r
| k >= hi = middle lo hi l
| otherwise = t'
middle _ _ t'@Tip = t'
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE trim #-}
#endif
-- LIQUID QUALIFIER DEBUG SILLINESS
{-@ zoo1 :: (Ord k) => lo:k -> OMap k a -> {v: OMap k a | ((isBin(v)) => (lo < key(v)))} @-}
zoo1 :: Ord k => k -> Map k a -> Map k a
zoo1 = error "TODO"
{-@ zoo2 :: (Ord k) => lo:k -> OMap k a -> {v: OMap k a | ((isBin(v)) => (lo > key(v)))} @-}
zoo2 :: Ord k => k -> Map k a -> Map k a
zoo2 = error "TODO"
-- Helper function for 'mergeWithKey'. The @'trimLookupLo' lk hk t@ performs both
-- @'trim' (JustS lk) hk t@ and @'lookup' lk t@.
-- See Note: Type of local 'go' function
-- LIQUID trimLookupLo :: Ord k => k -> MaybeS k -> Map k a -> (Maybe a, Map k a)
-- LIQUID trimLookupLo lk NothingS t = greater lk t
-- LIQUID where greater :: Ord k => k -> Map k a -> (Maybe a, Map k a)
-- LIQUID greater lo t'@(Bin _ kx x l r) = case compare lo kx of LT -> (lookup lo l, {-`strictPair`-} t')
-- LIQUID EQ -> (Just x, r)
-- LIQUID GT -> greater lo r
-- LIQUID greater _ Tip = (Nothing, Tip)
-- LIQUID trimLookupLo lk (JustS hk) t = middle lk hk t
-- LIQUID where middle :: Ord k => k -> k -> Map k a -> (Maybe a, Map k a)
-- LIQUID middle lo hi t'@(Bin _ kx x l r) = case compare lo kx of LT | kx < hi -> (lookup lo l, {- `strictPair` -} t')
-- LIQUID | otherwise -> middle lo hi l
-- LIQUID EQ -> (Just x, {-`strictPair`-} lesser hi r)
-- LIQUID GT -> middle lo hi r
-- LIQUID middle _ _ Tip = (Nothing, Tip)
-- LIQUID
-- LIQUID lesser :: Ord k => k -> Map k a -> Map k a
-- LIQUID lesser hi (Bin _ k _ l _) | k >= hi = lesser hi l
-- LIQUID lesser _ t' = t'
{-@ trimLookupLo :: (Ord k)
=> lo:k
-> bhi:{v: MaybeS k | (isJustS(v) => (lo < fromJustS(v)))}
-> OMap k a
-> (Maybe a, {v: OMap k a | ((isBin(v) => (lo < key(v))) && ((isBin(v) && isJustS(bhi)) => (fromJustS(bhi) > key(v)))) }) @-}
trimLookupLo :: Ord k => k -> MaybeS k -> Map k a -> (Maybe a, Map k a)
trimLookupLo lk NothingS t = greater lk t
where greater :: Ord k => k -> Map k a -> (Maybe a, Map k a)
greater lo t'@(Bin _ kx x l r) = case compare lo kx of LT -> (lookup lo l, {-`strictPair`-} t')
EQ -> (Just x, (case r of {r'@(Bin _ _ _ _ _) -> r' ; r'@Tip -> r'}))
GT -> greater lo r
greater _ Tip = (Nothing, Tip)
trimLookupLo lk (JustS hk) t = middle lk hk t
where middle :: Ord k => k -> k -> Map k a -> (Maybe a, Map k a)
middle lo hi t'@(Bin _ kx x l r) = case compare lo kx of LT | kx < hi -> (lookup lo l, {- `strictPair` -} t')
| otherwise -> middle lo hi l
EQ -> (Just x, {-`strictPair`-} lesser lo hi (case r of {r'@(Bin _ _ _ _ _) -> r' ; r'@Tip -> r'}))
GT -> middle lo hi r
middle _ _ Tip = (Nothing, Tip)
lesser :: Ord k => k -> k -> Map k a -> Map k a
lesser lo hi t'@(Bin _ k _ l _) | k >= hi = lesser lo hi l
| otherwise = t'
lesser _ _ t'@Tip = t'
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE trimLookupLo #-}
#endif
{--------------------------------------------------------------------
[filterGt b t] filter all keys >[b] from tree [t]
[filterLt b t] filter all keys <[b] from tree [t]
--------------------------------------------------------------------}
{-@ filterGt :: (Ord k) => x:MaybeS k -> OMap k v -> OMap {v:k | ((isJustS(x)) => (v > fromJustS(x))) } v @-}
filterGt :: Ord k => MaybeS k -> Map k v -> Map k v
filterGt NothingS t = t
filterGt (JustS b) t = filterGt' b t
-- LIQUID TXREC-TOPLEVEL-ISSUE
filterGt' _ Tip = Tip
filterGt' b' (Bin _ kx x l r) =
case compare b' kx of LT -> join kx x (filterGt' b' l) r
EQ -> r
GT -> filterGt' b' r
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE filterGt #-}
#endif
{-@ filterLt :: (Ord k) => x:MaybeS k -> OMap k v -> OMap {v:k | ((isJustS(x)) => (v < fromJustS(x))) } v @-}
filterLt :: Ord k => MaybeS k -> Map k v -> Map k v
filterLt NothingS t = t
filterLt (JustS b) t = filterLt' b t
-- LIQUID TXREC-TOPLEVEL-ISSUE
filterLt' _ Tip = Tip
filterLt' b' (Bin _ kx x l r) =
case compare kx b' of LT -> join kx x l (filterLt' b' r)
EQ -> l
GT -> filterLt' b' l
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE filterLt #-}
#endif
{--------------------------------------------------------------------
Split
--------------------------------------------------------------------}
-- | /O(log n)/. The expression (@'split' k map@) is a pair @(map1,map2)@ where
-- the keys in @map1@ are smaller than @k@ and the keys in @map2@ larger than @k@.
-- Any key equal to @k@ is found in neither @map1@ nor @map2@.
--
-- > split 2 (fromList [(5,"a"), (3,"b")]) == (empty, fromList [(3,"b"), (5,"a")])
-- > split 3 (fromList [(5,"a"), (3,"b")]) == (empty, singleton 5 "a")
-- > split 4 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", singleton 5 "a")
-- > split 5 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", empty)
-- > split 6 (fromList [(5,"a"), (3,"b")]) == (fromList [(3,"b"), (5,"a")], empty)
{-@ split :: (Ord k) => x:k -> OMap k a -> (OMap {v: k | v < x} a, OMap {v:k | v > x} a) @-}
split :: Ord k => k -> Map k a -> (Map k a, Map k a)
split k t = k `seq`
case t of
Tip -> (Tip, Tip)
Bin _ kx x l r -> case compare k kx of
LT -> let (lt,gt) = split k l in (lt,join kx x gt r)
GT -> let (lt,gt) = split k r in (join kx x l lt,gt)
EQ -> (l,r)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE split #-}
#endif
-- | /O(log n)/. The expression (@'splitLookup' k map@) splits a map just
-- like 'split' but also returns @'lookup' k map@.
--
-- > splitLookup 2 (fromList [(5,"a"), (3,"b")]) == (empty, Nothing, fromList [(3,"b"), (5,"a")])
-- > splitLookup 3 (fromList [(5,"a"), (3,"b")]) == (empty, Just "b", singleton 5 "a")
-- > splitLookup 4 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", Nothing, singleton 5 "a")
-- > splitLookup 5 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", Just "a", empty)
-- > splitLookup 6 (fromList [(5,"a"), (3,"b")]) == (fromList [(3,"b"), (5,"a")], Nothing, empty)
{-@ splitLookup :: (Ord k) => x:k -> OMap k a -> (OMap {v: k | v < x} a, Maybe a, OMap {v:k | v > x} a) @-}
splitLookup :: Ord k => k -> Map k a -> (Map k a,Maybe a,Map k a)
splitLookup k t = k `seq`
case t of
Tip -> (Tip,Nothing,Tip)
Bin _ kx x l r -> case compare k kx of
LT -> let (lt,z,gt) = splitLookup k l in (lt,z,join kx x gt r)
GT -> let (lt,z,gt) = splitLookup k r in (join kx x l lt,z,gt)
EQ -> (l,Just x,r)
#if __GLASGOW_HASKELL__ >= 700
{-# INLINABLE splitLookup #-}
#endif
{--------------------------------------------------------------------
Utility functions that maintain the balance properties of the tree.
All constructors assume that all values in [l] < [k] and all values
in [r] > [k], and that [l] and [r] are valid trees.
In order of sophistication:
[Bin sz k x l r] The type constructor.
[bin k x l r] Maintains the correct size, assumes that both [l]
and [r] are balanced with respect to each other.
[balance k x l r] Restores the balance and size.
Assumes that the original tree was balanced and
that [l] or [r] has changed by at most one element.
[join k x l r] Restores balance and size.
Furthermore, we can construct a new tree from two trees. Both operations
assume that all values in [l] < all values in [r] and that [l] and [r]
are valid:
[glue l r] Glues [l] and [r] together. Assumes that [l] and
[r] are already balanced with respect to each other.
[merge l r] Merges two trees and restores balance.
Note: in contrast to Adam's paper, we use (<=) comparisons instead
of (<) comparisons in [join], [merge] and [balance].
Quickcheck (on [difference]) showed that this was necessary in order
to maintain the invariants. It is quite unsatisfactory that I haven't
been able to find out why this is actually the case! Fortunately, it
doesn't hurt to be a bit more conservative.
--------------------------------------------------------------------}
{--------------------------------------------------------------------
Join
--------------------------------------------------------------------}
{-@ join :: k:k -> a -> OMap {v:k | v < k} a -> OMap {v:k| v > k} a -> OMap k a @-}
join :: k -> a -> Map k a -> Map k a -> Map k a
join k x m1 m2 = joinT k x m1 m2 (mlen m1 + mlen m2)
--LIQUID join kx x Tip r = insertMin kx x r
--LIQUID join kx x l Tip = insertMax kx x l
--LIQUID join kx x l@(Bin sizeL ky y ly ry) r@(Bin sizeR kz z lz rz)
--LIQUID | delta*sizeL < sizeR = balanceL kz z (join kx x l lz) rz
--LIQUID | delta*sizeR < sizeL = balanceR ky y ly (join kx x ry r)
--LIQUID | otherwise = bin kx x l r
{-@ joinT :: k:k -> a -> a:OMap {v:k | v < k} a -> b:OMap {v:k| v > k} a -> SumMLen a b -> OMap k a @-}
{-@ Decrease joinT 5 @-}
{- LIQUID WITNESS -}
joinT :: k -> a -> Map k a -> Map k a -> Int -> Map k a
joinT kx x Tip r _ = insertMin kx x r
joinT kx x l Tip _ = insertMax kx x l
joinT kx x l@(Bin sizeL ky y ly ry) r@(Bin sizeR kz z lz rz) d
| delta*sizeL < sizeR = balanceL kz z (joinT kx x l lz (d-(mlen rz)-1)) rz
| delta*sizeR < sizeL = balanceR ky y ly (joinT kx x ry r (d-(mlen ly)-1))
| otherwise = bin kx x l r
-- insertMin and insertMax don't perform potentially expensive comparisons.
insertMax, insertMin :: k -> a -> Map k a -> Map k a
insertMax kx x t
= case t of
Tip -> singleton kx x
Bin _ ky y l r
-> balanceR ky y l (insertMax kx x r)
insertMin kx x t
= case t of
Tip -> singleton kx x
Bin _ ky y l r
-> balanceL ky y (insertMin kx x l) r
{--------------------------------------------------------------------
[merge l r]: merges two trees.
--------------------------------------------------------------------}
{-@ merge :: kcut:k -> OMap {v:k | v < kcut} a -> OMap {v:k| v > kcut} a -> OMap k a @-}
merge :: k -> Map k a -> Map k a -> Map k a
merge k m1 m2 = mergeT k m1 m2 (mlen m1 + mlen m2)
--LIQUID merge _ Tip r = r
--LIQUID merge _ l Tip = l
--LIQUID merge kcut l@(Bin sizeL kx x lx rx) r@(Bin sizeR ky y ly ry)
--LIQUID | delta*sizeL < sizeR = balanceL ky y (merge kcut l ly) ry
--LIQUID | delta*sizeR < sizeL = balanceR kx x lx (merge kcut rx r)
--LIQUID | otherwise = glue kcut l r
{-@ mergeT :: kcut:k -> a:OMap {v:k | v < kcut} a -> b:OMap {v:k| v > kcut} a -> SumMLen a b -> OMap k a @-}
{-@ Decrease mergeT 4 @-}
{- LIQUID WITNESS -}
mergeT :: k -> Map k a -> Map k a -> Int -> Map k a
mergeT _ Tip r _ = r
mergeT _ l Tip _ = l
mergeT kcut l@(Bin sizeL kx x lx rx) r@(Bin sizeR ky y ly ry) d
| delta*sizeL < sizeR = balanceL ky y (mergeT kcut l ly (d-(mlen ry)-1)) ry
| delta*sizeR < sizeL = balanceR kx x lx (mergeT kcut rx r (d-(mlen lx)-1))
| otherwise = glue kcut l r
{--------------------------------------------------------------------
[glue l r]: glues two trees together.
Assumes that [l] and [r] are already balanced with respect to each other.
--------------------------------------------------------------------}
{-@ glue :: kcut:k -> OMap {v:k | v < kcut} a -> OMap {v:k| v > kcut} a -> OMap k a @-}
glue :: k -> Map k a -> Map k a -> Map k a
glue _ Tip r = r
glue _ l Tip = l
glue kcut l r
| size l > size r = let (km, m, l') = deleteFindMax l in balanceR km m l' r
| otherwise = let (km, m, r') = deleteFindMin r in balanceL km m l r'
-- | /O(log n)/. Delete and find the minimal element.
--
-- > deleteFindMin (fromList [(5,"a"), (3,"b"), (10,"c")]) == ((3,"b"), fromList[(5,"a"), (10,"c")])
-- > deleteFindMin Error: can not return the minimal element of an empty map
{-@ deleteFindMin :: OMap k a -> (k, a, OMap k a)<{\k a -> true}, \a k -> {v0:Map ({v:k | v > k}) a | true}> @-}
deleteFindMin :: Map k a -> (k, a, Map k a)
deleteFindMin t
= case t of
Bin _ k x Tip r -> (k, x, r)
Bin _ k x l r -> let (km, m, l') = deleteFindMin l in (km, m, balanceR k x l' r)
Tip -> error "Map.deleteFindMin: can not return the minimal element of an empty map"
-- | /O(log n)/. Delete and find the maximal element.
--
-- > deleteFindMax (fromList [(5,"a"), (3,"b"), (10,"c")]) == ((10,"c"), fromList [(3,"b"), (5,"a")])
-- > deleteFindMax empty Error: can not return the maximal element of an empty map
{-@ deleteFindMax :: OMap k a -> (k, a, OMap k a)<{\k a -> true}, \a k -> {v0:Map ({v:k | v < k}) a | true}> @-}
deleteFindMax :: Map k a -> (k, a, Map k a)
deleteFindMax t
= case t of
Bin _ k x l Tip -> (k, x, l)
Bin _ k x l r -> let (km, m, r') = deleteFindMax r in (km, m, balanceL k x l r')
Tip -> error "Map.deleteFindMax: can not return the maximal element of an empty map"
{--------------------------------------------------------------------
[balance l x r] balances two trees with value x.
The sizes of the trees should balance after decreasing the
size of one of them. (a rotation).
[delta] is the maximal relative difference between the sizes of
two trees, it corresponds with the [w] in Adams' paper.
[ratio] is the ratio between an outer and inner sibling of the
heavier subtree in an unbalanced setting. It determines
whether a double or single rotation should be performed
to restore balance. It is corresponds with the inverse
of $\alpha$ in Adam's article.
Note that according to the Adam's paper:
- [delta] should be larger than 4.646 with a [ratio] of 2.
- [delta] should be larger than 3.745 with a [ratio] of 1.534.
But the Adam's paper is erroneous:
- It can be proved that for delta=2 and delta>=5 there does
not exist any ratio that would work.
- Delta=4.5 and ratio=2 does not work.
That leaves two reasonable variants, delta=3 and delta=4,
both with ratio=2.
- A lower [delta] leads to a more 'perfectly' balanced tree.
- A higher [delta] performs less rebalancing.
In the benchmarks, delta=3 is faster on insert operations,
and delta=4 has slightly better deletes. As the insert speedup
is larger, we currently use delta=3.
--------------------------------------------------------------------}
delta,ratio :: Int
delta = 3
ratio = 2
-- The balance function is equivalent to the following:
--
-- balance :: k -> a -> Map k a -> Map k a -> Map k a
-- balance k x l r
-- | sizeL + sizeR <= 1 = Bin sizeX k x l r
-- | sizeR > delta*sizeL = rotateL k x l r
-- | sizeL > delta*sizeR = rotateR k x l r
-- | otherwise = Bin sizeX k x l r
-- where
-- sizeL = size l
-- sizeR = size r
-- sizeX = sizeL + sizeR + 1
--
-- rotateL :: a -> b -> Map a b -> Map a b -> Map a b
-- rotateL k x l r@(Bin _ _ _ ly ry) | size ly < ratio*size ry = singleL k x l r
-- | otherwise = doubleL k x l r
--
-- rotateR :: a -> b -> Map a b -> Map a b -> Map a b
-- rotateR k x l@(Bin _ _ _ ly ry) r | size ry < ratio*size ly = singleR k x l r
-- | otherwise = doubleR k x l r
--
-- singleL, singleR :: a -> b -> Map a b -> Map a b -> Map a b
-- singleL k1 x1 t1 (Bin _ k2 x2 t2 t3) = bin k2 x2 (bin k1 x1 t1 t2) t3
-- singleR k1 x1 (Bin _ k2 x2 t1 t2) t3 = bin k2 x2 t1 (bin k1 x1 t2 t3)
--
-- doubleL, doubleR :: a -> b -> Map a b -> Map a b -> Map a b
-- doubleL k1 x1 t1 (Bin _ k2 x2 (Bin _ k3 x3 t2 t3) t4) = bin k3 x3 (bin k1 x1 t1 t2) (bin k2 x2 t3 t4)
-- doubleR k1 x1 (Bin _ k2 x2 t1 (Bin _ k3 x3 t2 t3)) t4 = bin k3 x3 (bin k2 x2 t1 t2) (bin k1 x1 t3 t4)
--
-- It is only written in such a way that every node is pattern-matched only once.
{-@ balance :: k:k -> a -> OMap {v:k|v<k} a -> OMap {v:k|v>k} a -> OMap k a @-}
balance :: k -> a -> Map k a -> Map k a -> Map k a
balance k x l r = case l of
Tip -> case r of
Tip -> Bin 1 k x Tip Tip
(Bin _ _ _ Tip Tip) -> Bin 2 k x Tip r
(Bin _ rk rx Tip rr@(Bin _ _ _ _ _)) -> Bin 3 rk rx (Bin 1 k x Tip Tip) rr
(Bin _ rk rx (Bin _ rlk rlx _ _) Tip) -> Bin 3 rlk rlx (Bin 1 k x Tip Tip) (Bin 1 rk rx Tip Tip)
(Bin rs rk rx rl@(Bin rls rlk rlx rll rlr) rr@(Bin rrs _ _ _ _))
| rls < ratio*rrs -> Bin (1+rs) rk rx (Bin (1+rls) k x Tip rl) rr
| otherwise -> Bin (1+rs) rlk rlx (Bin (1+size rll) k x Tip rll) (Bin (1+rrs+size rlr) rk rx rlr rr)
(Bin ls lk lx ll lr) -> case r of
Tip -> case (ll, lr) of
(Tip, Tip) -> Bin 2 k x l Tip
(Tip, (Bin _ lrk lrx _ _)) -> Bin 3 lrk lrx (Bin 1 lk lx Tip Tip) (Bin 1 k x Tip Tip)
((Bin _ _ _ _ _), Tip) -> Bin 3 lk lx ll (Bin 1 k x Tip Tip)
((Bin lls _ _ _ _), (Bin lrs lrk lrx lrl lrr))
| lrs < ratio*lls -> Bin (1+ls) lk lx ll (Bin (1+lrs) k x lr Tip)
| otherwise -> Bin (1+ls) lrk lrx (Bin (1+lls+size lrl) lk lx ll lrl) (Bin (1+size lrr) k x lrr Tip)
(Bin rs rk rx rl rr)
| rs > delta*ls -> case (rl, rr) of
(Bin rls rlk rlx rll rlr, Bin rrs _ _ _ _)
| rls < ratio*rrs -> Bin (1+ls+rs) rk rx (Bin (1+ls+rls) k x l rl) rr
| otherwise -> Bin (1+ls+rs) rlk rlx (Bin (1+ls+size rll) k x l rll) (Bin (1+rrs+size rlr) rk rx rlr rr)
(_, _) -> error "Failure in Data.Map.balance"
| ls > delta*rs -> case (ll, lr) of
(Bin lls _ _ _ _, Bin lrs lrk lrx lrl lrr)
| lrs < ratio*lls -> Bin (1+ls+rs) lk lx ll (Bin (1+rs+lrs) k x lr r)
| otherwise -> Bin (1+ls+rs) lrk lrx (Bin (1+lls+size lrl) lk lx ll lrl) (Bin (1+rs+size lrr) k x lrr r)
(_, _) -> error "Failure in Data.Map.balance"
| otherwise -> Bin (1+ls+rs) k x l r
{-# NOINLINE balance #-}
-- Functions balanceL and balanceR are specialised versions of balance.
-- balanceL only checks whether the left subtree is too big,
-- balanceR only checks whether the right subtree is too big.
-- balanceL is called when left subtree might have been inserted to or when
-- right subtree might have been deleted from.
{-@ balanceL :: kcut:k -> a -> OMap {v:k | v < kcut} a -> OMap {v:k| v > kcut} a -> OMap k a @-}
balanceL :: k -> a -> Map k a -> Map k a -> Map k a
balanceL k x l r = case r of
Tip -> case l of
Tip -> Bin 1 k x Tip Tip
(Bin _ _ _ Tip Tip) -> Bin 2 k x l Tip
(Bin _ lk lx Tip (Bin _ lrk lrx _ _)) -> Bin 3 lrk lrx (Bin 1 lk lx Tip Tip) (Bin 1 k x Tip Tip)
(Bin _ lk lx ll@(Bin _ _ _ _ _) Tip) -> Bin 3 lk lx ll (Bin 1 k x Tip Tip)
(Bin ls lk lx ll@(Bin lls _ _ _ _) lr@(Bin lrs lrk lrx lrl lrr))
| lrs < ratio*lls -> Bin (1+ls) lk lx ll (Bin (1+lrs) k x lr Tip)
| otherwise -> Bin (1+ls) lrk lrx (Bin (1+lls+size lrl) lk lx ll lrl) (Bin (1+size lrr) k x lrr Tip)
(Bin rs _ _ _ _) -> case l of
Tip -> Bin (1+rs) k x Tip r
(Bin ls lk lx ll lr)
| ls > delta*rs -> case (ll, lr) of
(Bin lls _ _ _ _, Bin lrs lrk lrx lrl lrr)
| lrs < ratio*lls -> Bin (1+ls+rs) lk lx ll (Bin (1+rs+lrs) k x lr r)
| otherwise -> Bin (1+ls+rs) lrk lrx (Bin (1+lls+size lrl) lk lx ll lrl) (Bin (1+rs+size lrr) k x lrr r)
(_, _) -> error "Failure in Data.Map.balanceL"
| otherwise -> Bin (1+ls+rs) k x l r
{-# NOINLINE balanceL #-}
-- balanceR is called when right subtree might have been inserted to or when
-- left subtree might have been deleted from.
{-@ balanceR :: kcut:k -> a -> OMap {v:k | v < kcut} a -> OMap {v:k| v > kcut} a -> OMap k a @-}
balanceR :: k -> a -> Map k a -> Map k a -> Map k a
balanceR k x l r = case l of
Tip -> case r of
Tip -> Bin 1 k x Tip Tip
(Bin _ _ _ Tip Tip) -> Bin 2 k x Tip r
(Bin _ rk rx Tip rr@(Bin _ _ _ _ _)) -> Bin 3 rk rx (Bin 1 k x Tip Tip) rr
(Bin _ rk rx (Bin _ rlk rlx _ _) Tip) -> Bin 3 rlk rlx (Bin 1 k x Tip Tip) (Bin 1 rk rx Tip Tip)
(Bin rs rk rx rl@(Bin rls rlk rlx rll rlr) rr@(Bin rrs _ _ _ _))
| rls < ratio*rrs -> Bin (1+rs) rk rx (Bin (1+rls) k x Tip rl) rr
| otherwise -> Bin (1+rs) rlk rlx (Bin (1+size rll) k x Tip rll) (Bin (1+rrs+size rlr) rk rx rlr rr)
(Bin ls _ _ _ _) -> case r of
Tip -> Bin (1+ls) k x l Tip
(Bin rs rk rx rl rr)
| rs > delta*ls -> case (rl, rr) of
(Bin rls rlk rlx rll rlr, Bin rrs _ _ _ _)
| rls < ratio*rrs -> Bin (1+ls+rs) rk rx (Bin (1+ls+rls) k x l rl) rr
| otherwise -> Bin (1+ls+rs) rlk rlx (Bin (1+ls+size rll) k x l rll) (Bin (1+rrs+size rlr) rk rx rlr rr)
(_, _) -> error "Failure in Data.Map.balanceR"
| otherwise -> Bin (1+ls+rs) k x l r
{-# NOINLINE balanceR #-}
{--------------------------------------------------------------------
The bin constructor maintains the size of the tree
--------------------------------------------------------------------}
{-@ bin :: k:k -> a -> OMap {v:k | v < k} a -> OMap {v:k| v > k} a -> OMap k a @-}
bin :: k -> a -> Map k a -> Map k a -> Map k a
bin k x l r
= Bin (size l + size r + 1) k x l r
{-# INLINE bin #-}
{--------------------------------------------------------------------
Eq converts the tree to a list. In a lazy setting, this
actually seems one of the faster methods to compare two trees
and it is certainly the simplest :-)
--------------------------------------------------------------------}
instance (Eq k,Eq a) => Eq (Map k a) where
t1 == t2 = (size t1 == size t2) && (toAscList t1 == toAscList t2)
{--------------------------------------------------------------------
Ord
--------------------------------------------------------------------}
instance (Ord k, Ord v) => Ord (Map k v) where
compare m1 m2 = compare (toAscList m1) (toAscList m2)
{--------------------------------------------------------------------
Functor
--------------------------------------------------------------------}
-- LIQUID instance Functor (Map k) where
-- LIQUID fmap f m = map f m
-- LIQUID
-- LIQUID instance Traversable (Map k) where
-- LIQUID traverse f = traverseWithKey (\_ -> f)
-- LIQUID
-- LIQUID instance Foldable.Foldable (Map k) where
-- LIQUID fold Tip = mempty
-- LIQUID fold (Bin _ _ v l r) = Foldable.fold l `mappend` v `mappend` Foldable.fold r
-- LIQUID foldr = foldr
-- LIQUID foldl = foldl
-- LIQUID foldMap _ Tip = mempty
-- LIQUID foldMap f (Bin _ _ v l r) = Foldable.foldMap f l `mappend` f v `mappend` Foldable.foldMap f r
-- LIQUID
-- LIQUID instance (NFData k, NFData a) => NFData (Map k a) where
-- LIQUID rnf Tip = ()
-- LIQUID rnf (Bin _ kx x l r) = rnf kx `seq` rnf x `seq` rnf l `seq` rnf r
{--------------------------------------------------------------------
Read
--------------------------------------------------------------------}
-- LIQUID instance (Ord k, Read k, Read e) => Read (Map k e) where
-- LIQUID #ifdef __GLASGOW_HASKELL__
-- LIQUID readPrec = parens $ prec 10 $ do
-- LIQUID Ident "fromList" <- lexP
-- LIQUID xs <- readPrec
-- LIQUID return (fromList xs)
-- LIQUID
-- LIQUID readListPrec = readListPrecDefault
-- LIQUID #else
-- LIQUID readsPrec p = readParen (p > 10) $ \ r -> do
-- LIQUID ("fromList",s) <- lex r
-- LIQUID (xs,t) <- reads s
-- LIQUID return (fromList xs,t)
-- LIQUID #endif
{--------------------------------------------------------------------
Show
--------------------------------------------------------------------}
-- LIQUID instance (Show k, Show a) => Show (Map k a) where
-- LIQUID showsPrec d m = showParen (d > 10) $
-- LIQUID showString "fromList " . shows (toList m)
-- | /O(n)/. Show the tree that implements the map. The tree is shown
-- in a compressed, hanging format. See 'showTreeWith'.
showTree :: (Show k,Show a) => Map k a -> String
showTree m
= showTreeWith showElem True False m
where
showElem k x = show k ++ ":=" ++ show x
{- | /O(n)/. The expression (@'showTreeWith' showelem hang wide map@) shows
the tree that implements the map. Elements are shown using the @showElem@ function. If @hang@ is
'True', a /hanging/ tree is shown otherwise a rotated tree is shown. If
@wide@ is 'True', an extra wide version is shown.
> Map> let t = fromDistinctAscList [(x,()) | x <- [1..5]]
> Map> putStrLn $ showTreeWith (\k x -> show (k,x)) True False t
> (4,())
> +--(2,())
> | +--(1,())
> | +--(3,())
> +--(5,())
>
> Map> putStrLn $ showTreeWith (\k x -> show (k,x)) True True t
> (4,())
> |
> +--(2,())
> | |
> | +--(1,())
> | |
> | +--(3,())
> |
> +--(5,())
>
> Map> putStrLn $ showTreeWith (\k x -> show (k,x)) False True t
> +--(5,())
> |
> (4,())
> |
> | +--(3,())
> | |
> +--(2,())
> |
> +--(1,())
-}
showTreeWith :: (k -> a -> String) -> Bool -> Bool -> Map k a -> String
showTreeWith showelem hang wide t
| hang = (showsTreeHang showelem wide [] t) ""
| otherwise = (showsTree showelem wide [] [] t) ""
{-@ Decrease showsTree 5 @-}
showsTree :: (k -> a -> String) -> Bool -> [String] -> [String] -> Map k a -> ShowS
showsTree showelem wide lbars rbars t
= case t of
Tip -> showsBars lbars . showString "|\n"
Bin _ kx x Tip Tip
-> showsBars lbars . showString (showelem kx x) . showString "\n"
Bin _ kx x l r
-> showsTree showelem wide (withBar rbars) (withEmpty rbars) r .
showWide wide rbars .
showsBars lbars . showString (showelem kx x) . showString "\n" .
showWide wide lbars .
showsTree showelem wide (withEmpty lbars) (withBar lbars) l
{-@ Decrease showsTreeHang 4 @-}
showsTreeHang :: (k -> a -> String) -> Bool -> [String] -> Map k a -> ShowS
showsTreeHang showelem wide bars t
= case t of
Tip -> showsBars bars . showString "|\n"
Bin _ kx x Tip Tip
-> showsBars bars . showString (showelem kx x) . showString "\n"
Bin _ kx x l r
-> showsBars bars . showString (showelem kx x) . showString "\n" .
showWide wide bars .
showsTreeHang showelem wide (withBar bars) l .
showWide wide bars .
showsTreeHang showelem wide (withEmpty bars) r
showWide :: Bool -> [String] -> String -> String
showWide wide bars
| wide = showString (concat (reverse bars)) . showString "|\n"
| otherwise = id
showsBars :: [String] -> ShowS
showsBars bars
= case bars of
[] -> id
_ -> showString (concat (reverse (tail bars))) . showString node
node :: String
node = "+--"
withBar, withEmpty :: [String] -> [String]
withBar bars = "| ":bars
withEmpty bars = " ":bars
{--------------------------------------------------------------------
Typeable
--------------------------------------------------------------------}
-- LIQUID #include "Typeable.h"
-- LIQUID INSTANCE_TYPEABLE2(Map,mapTc,"Map")
{--------------------------------------------------------------------
Assertions
--------------------------------------------------------------------}
-- | /O(n)/. Test if the internal map structure is valid.
--
-- > valid (fromAscList [(3,"b"), (5,"a")]) == True
-- > valid (fromAscList [(5,"a"), (3,"b")]) == False
valid :: Ord k => Map k a -> Bool
valid t
= balanced t && ordered t && validsize t
ordered :: Ord a => Map a b -> Bool
ordered t
= bounded (const True) (const True) t
where
bounded lo hi t'
= case t' of
Tip -> True
Bin _ kx _ l r -> (lo kx) && (hi kx) && bounded lo (<kx) l && bounded (>kx) hi r
-- | Exported only for "Debug.QuickCheck"
balanced :: Map k a -> Bool
balanced t
= case t of
Tip -> True
Bin _ _ _ l r -> (size l + size r <= 1 || (size l <= delta*size r && size r <= delta*size l)) &&
balanced l && balanced r
validsize :: Map a b -> Bool
validsize t
= (realsize t == Just (size t))
where
realsize t'
= case t' of
Tip -> Just 0
Bin sz _ _ l r -> case (realsize l,realsize r) of
(Just n,Just m) | n+m+1 == sz -> Just sz
_ -> Nothing
{--------------------------------------------------------------------
Utilities
--------------------------------------------------------------------}
foldlStrict :: (a -> b -> a) -> a -> [b] -> a
foldlStrict f = go
where
go z [] = z
go z (x:xs) = let z' = f z x in z' `seq` go z' xs
{-# INLINE foldlStrict #-}
| ssaavedra/liquidhaskell | benchmarks/esop2013-submission/Base.hs | bsd-3-clause | 129,187 | 0 | 21 | 34,739 | 22,181 | 11,801 | 10,380 | -1 | -1 |
module Qualifier where
{-Rename 'sumSquares' to 'sum' will fail. The user
need to qualify the use of 'sum' first. Another
implemenation option is to let the refactorer qualify
the use of 'sum' automatically, but the user might overlook
notice this change.
-}
sumSquares (x:xs) = sq x + sumSquares xs
where sq x = x ^pow
pow = 2
sumSquares [] = 0
main = sumSquares [1..4] + sum [1..4]
| kmate/HaRe | test/testdata/Renaming/Qualifier_TokOut.hs | bsd-3-clause | 411 | 0 | 7 | 97 | 84 | 44 | 40 | 6 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Lib where
import TH
val = $(splice)
| tolysz/prepare-ghcjs | spec-lts8/cabal/Cabal/tests/PackageTests/TemplateHaskell/vanilla/Lib.hs | bsd-3-clause | 78 | 0 | 6 | 14 | 17 | 11 | 6 | 4 | 1 |
-- Load in ghci with: stack ghci --package QuickCheck
module Reverse.Test where
import Reverse
import Test.QuickCheck
rvrs' :: String -> String
rvrs' s = unwords (reverse $ words cleanS)
where cleanS = map replaceWithSpace s
prop_bookReverse s = (rvrs' s) == (rvrs s)
test = quickCheck prop_bookReverse
| JeremyLWright/haskellbook | src-test/Reverse.Test.hs | isc | 310 | 0 | 8 | 54 | 89 | 47 | 42 | 8 | 1 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.ClientDatanodeProtocolProtos.StartReconfigurationRequestProto (StartReconfigurationRequestProto(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data StartReconfigurationRequestProto = StartReconfigurationRequestProto{}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable StartReconfigurationRequestProto where
mergeAppend StartReconfigurationRequestProto StartReconfigurationRequestProto = StartReconfigurationRequestProto
instance P'.Default StartReconfigurationRequestProto where
defaultValue = StartReconfigurationRequestProto
instance P'.Wire StartReconfigurationRequestProto where
wireSize ft' self'@(StartReconfigurationRequestProto)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = 0
wirePut ft' self'@(StartReconfigurationRequestProto)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
Prelude'.return ()
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> StartReconfigurationRequestProto) StartReconfigurationRequestProto where
getVal m' f' = f' m'
instance P'.GPB StartReconfigurationRequestProto
instance P'.ReflectDescriptor StartReconfigurationRequestProto where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".hadoop.hdfs.StartReconfigurationRequestProto\", haskellPrefix = [MName \"Hadoop\",MName \"Protos\"], parentModule = [MName \"ClientDatanodeProtocolProtos\"], baseName = MName \"StartReconfigurationRequestProto\"}, descFilePath = [\"Hadoop\",\"Protos\",\"ClientDatanodeProtocolProtos\",\"StartReconfigurationRequestProto.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False}"
instance P'.TextType StartReconfigurationRequestProto where
tellT = P'.tellSubMessage
getT = P'.getSubMessage
instance P'.TextMsg StartReconfigurationRequestProto where
textPut msg = Prelude'.return ()
textGet = Prelude'.return P'.defaultValue | alexbiehl/hoop | hadoop-protos/src/Hadoop/Protos/ClientDatanodeProtocolProtos/StartReconfigurationRequestProto.hs | mit | 3,114 | 1 | 16 | 542 | 554 | 291 | 263 | 53 | 0 |
{-# LANGUAGE PackageImports #-}
import "Chupacabra" Application (getApplicationDev)
import Network.Wai.Handler.Warp
(runSettings, defaultSettings, setPort)
import Control.Concurrent (forkIO)
import System.Directory (doesFileExist, removeFile)
import System.Exit (exitSuccess)
import Control.Concurrent (threadDelay)
main :: IO ()
main = do
putStrLn "Starting devel application"
(port, app) <- getApplicationDev
forkIO $ runSettings (setPort port defaultSettings) app
loop
loop :: IO ()
loop = do
threadDelay 100000
e <- doesFileExist "yesod-devel/devel-terminate"
if e then terminateDevel else loop
terminateDevel :: IO ()
terminateDevel = exitSuccess
| athanclark/Chupacabra | devel.hs | mit | 681 | 0 | 10 | 104 | 186 | 100 | 86 | 21 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE CPP #-}
module Shapes.Linear.Template where
import Test.QuickCheck.Arbitrary
import Control.Monad
import Language.Haskell.TH
-- TODO: Use a wrapper type to hold multiple sizes of vector?
data ValueInfo = ValueInfo { _valueN :: Name
, _valueWrap :: Name
, _valueBoxed :: Name
, _valueAdd :: Name
, _valueSub :: Name
, _valueMul :: Name
, _valueDiv :: Name
, _valueNeg :: Name
, _valueEq :: Name
, _valueNeq :: Name
, _valueLeq :: Name
, _valueGeq :: Name
, _valueGt :: Name
, _valueLt :: Name
}
makeInlineD :: Name -> DecQ
makeInlineD n = pragInlD n Inline FunLike AllPhases
makeVectorN :: Int -> Name
makeVectorN dim = mkName $ "V" ++ show dim
makeVectorType :: ValueInfo -> Int -> DecsQ
makeVectorType vi@ValueInfo{..} dim = do
#if MIN_VERSION_template_haskell(2,11,0)
notStrict_ <- bang noSourceUnpackedness noSourceStrictness
#else
notStrict_ <- notStrict
#endif
let vectorN = makeVectorN dim
constrArg = (notStrict_, ConT _valueN)
definers = [ defineLift
, defineLift2
, defineDot
, defineFromList
, defineToList
, deriveShow
, deriveArbitrary
]
impls <- concat <$> mapM (\f -> f vectorN vi dim) definers
#if MIN_VERSION_template_haskell(2,11,0)
let decs = DataD [] vectorN [] Nothing [NormalC vectorN (replicate dim constrArg)] [] : impls
#else
let decs = DataD [] vectorN [] [NormalC vectorN (replicate dim constrArg)] [] : impls
#endif
return decs
deriveShow :: Name -> ValueInfo -> Int -> DecsQ
deriveShow vectorN ValueInfo{..} dim = do
(pat, vars) <- conPE vectorN "a" dim
let f [] = [| "" |]
f (v:vs) = [| " " ++ show $(appE (conE _valueWrap) v) ++ $(f vs) |]
constructorShown = nameBase vectorN
showClause = clause [pat] (normalB [| constructorShown ++ $(f vars) |]) []
return <$> instanceD (cxt []) (appT (conT ''Show) (conT vectorN)) [funD 'show [showClause]]
dimE :: Int -> ExpQ
dimE = litE . integerL . fromIntegral
deriveArbitrary :: Name -> ValueInfo -> Int -> DecsQ
deriveArbitrary vectorN ValueInfo{..} dim = do
let arbClause = clause [] (normalB $ infixApp (fromListE vectorN) (varE '(<$>)) arbList) []
arbList = [| replicateM $(dimE dim) arbitrary |]
return <$> instanceD (cxt []) (appT (conT ''Arbitrary) (conT vectorN)) [funD 'arbitrary [arbClause]]
defineLift :: Name -> ValueInfo -> Int -> DecsQ
defineLift vectorN ValueInfo{..} dim = do
(funcP, funcV) <- newPE "f"
(vecP, elemVars) <- conPE vectorN "a" dim
let liftClause = clause [funcP, vecP] liftBody []
f = appE funcV
liftBody = normalB $ appsE (conE vectorN : fmap f elemVars)
liftName = mkName $ "lift" ++ nameBase vectorN
valueT = conT _valueN
vectorT = conT vectorN
liftType = arrowsT [arrowsT [valueT, valueT], vectorT, vectorT]
inlSigDef liftName liftType [liftClause]
defineLift2 :: Name -> ValueInfo -> Int -> DecsQ
defineLift2 vectorN ValueInfo{..} dim = do
(funcP, funcV) <- newPE "f"
(vecP, elemVars) <- conPE vectorN "a" dim
(vecP', elemVars') <- conPE vectorN "b" dim
let pairVars = zip elemVars elemVars'
liftClause = clause [funcP, vecP, vecP'] liftBody []
f (x, y) = appsE [funcV, x, y]
liftBody = normalB $ appsE (conE vectorN : fmap f pairVars)
liftName = mkName $ "lift2" ++ nameBase vectorN
valueT = conT _valueN
vectorT = conT vectorN
liftType = arrowsT [arrowsT [valueT, valueT, valueT], vectorT, vectorT, vectorT]
inlSigDef liftName liftType [liftClause]
dotE :: ValueInfo -> [ExpQ] -> [ExpQ] -> ExpQ
dotE ValueInfo{..} row col = foldl1 (infixApp' $ varE _valueAdd) products
where products = uncurry (infixApp' $ varE _valueMul) <$> zip row col
defineDot :: Name -> ValueInfo -> Int -> DecsQ
defineDot vectorN vi@ValueInfo{..} dim = do
(vecP, elemVars) <- conPE vectorN "a" dim
(vecP', elemVars') <- conPE vectorN "b" dim
let dotClause = clause [vecP, vecP'] (normalB $ dotE vi elemVars elemVars') []
dotName = mkName $ "dot" ++ nameBase vectorN
valueT = conT _valueN
vectorT = conT vectorN
dotType = arrowsT [vectorT, vectorT, valueT]
inlSigDef dotName dotType [dotClause]
defineJoinSplit :: ValueInfo -> (Int, Int) -> DecsQ
defineJoinSplit ValueInfo{..} (left, right) = do
let vecN = makeVectorN left
vecN' = makeVectorN right
vecN'' = makeVectorN (left + right)
(vecP, elemVs) <- conPE vecN "a" left
(vecP', elemVs') <- conPE vecN' "b" right
(vecP'', elemVs'') <- conPE vecN'' "c" (left + right)
let joinE = appsE (conE vecN'' : elemVs ++ elemVs')
joinC = simpleClause [vecP, vecP'] joinE
joinN = mkName $ "join" ++ show left ++ "v" ++ show right
joinT = arrowsT [vecT, vecT', vecT'']
(leftVs, rightVs) = splitAt left elemVs''
splitE = tupE [ appsE $ conE vecN : leftVs
, appsE $ conE vecN' : rightVs
]
splitC = simpleClause [vecP''] splitE
splitN = mkName $ "split" ++ show left ++ "v" ++ show right
splitT = arrowsT [vecT'', tupT [vecT, vecT']]
vecT = conT vecN
vecT' = conT vecN'
vecT'' = conT vecN''
joinI <- inlSigDef joinN joinT [joinC]
splitI <- inlSigDef splitN splitT [splitC]
return $ joinI ++ splitI
fromListN :: Name -> Name
fromListN = mkName . ("fromList" ++) . nameBase
fromListE :: Name -> ExpQ
fromListE = varE . fromListN
defineFromList :: Name -> ValueInfo -> Int -> DecsQ
defineFromList vectorN ValueInfo{..} dim = do
(pats, vars) <- genPEWith "x" dim (conP _valueWrap . return . varP) varE
let listPat = listP pats
vecE = appsE (conE vectorN : vars)
fromListClause0 = clause [listPat] (normalB vecE) []
fromListClause1 = clause [wildP] (normalB [| error "wrong number of elements" |]) []
vectorT = conT vectorN
argT = appT listT (conT _valueBoxed)
fromListType = arrowsT [argT, vectorT]
inlSigDef (fromListN vectorN) fromListType [fromListClause0, fromListClause1]
defineToList :: Name -> ValueInfo -> Int -> DecsQ
defineToList vectorN ValueInfo{..} dim = do
(vecP, elemVars) <- conPE vectorN "a" dim
let boxedElemVars = fmap (appE $ conE _valueWrap) elemVars
toListClause = clause [vecP] (normalB $ listE boxedElemVars) []
toListName = mkName $ "toList" ++ nameBase vectorN
vectorT = conT vectorN
resultT = appT listT (conT _valueBoxed)
toListType = arrowsT [vectorT, resultT]
inlSigDef toListName toListType [toListClause]
infixApp' :: ExpQ -> ExpQ -> ExpQ -> ExpQ
infixApp' = flip infixApp
inlSigDef :: Name -> TypeQ -> [ClauseQ] -> DecsQ
inlSigDef funN funT funCs = do
sigdef <- funSigDef funN funT funCs
inl <- makeInlineD funN
return $ sigdef ++ [inl]
funSigDef :: Name -> TypeQ -> [ClauseQ] -> DecsQ
funSigDef funN funT funCs = do
funSig <- sigD funN funT
funDef <- funD funN funCs
return [funSig, funDef]
tupT :: [TypeQ] -> TypeQ
tupT ts = foldl appT (tupleT $ length ts) ts
arrowsT :: [TypeQ] -> TypeQ
arrowsT [] = error "can't have no type"
arrowsT [t] = t
arrowsT (t:ts) = appT (appT arrowT t) $ arrowsT ts
newPE :: String -> Q (PatQ, ExpQ)
newPE x = do
x' <- newName x
return (varP x', varE x')
conPE :: Name -> String -> Int -> Q (PatQ, [ExpQ])
conPE conN x dim = do
(pats, vars) <- genPE x dim
return (conP conN pats, vars)
genPEWith :: String -> Int -> (Name -> PatQ) -> (Name -> ExpQ) -> Q ([PatQ], [ExpQ])
genPEWith x n mkP mkE = do
ids <- replicateM n (newName x)
return (fmap mkP ids, fmap mkE ids)
genPE :: String -> Int -> Q ([PatQ], [ExpQ])
genPE x n = genPEWith x n varP varE
simpleClause :: [PatQ] -> ExpQ -> ClauseQ
simpleClause ps e = clause ps (normalB e) []
| ublubu/shapes | shapes-math/src/Shapes/Linear/Template.hs | mit | 8,172 | 0 | 15 | 2,157 | 2,867 | 1,490 | 1,377 | 178 | 2 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.ClientDatanodeProtocolProtos.ShutdownDatanodeResponseProto (ShutdownDatanodeResponseProto(..)) where
import Prelude ((+), (/))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data ShutdownDatanodeResponseProto = ShutdownDatanodeResponseProto{}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable ShutdownDatanodeResponseProto where
mergeAppend ShutdownDatanodeResponseProto ShutdownDatanodeResponseProto = ShutdownDatanodeResponseProto
instance P'.Default ShutdownDatanodeResponseProto where
defaultValue = ShutdownDatanodeResponseProto
instance P'.Wire ShutdownDatanodeResponseProto where
wireSize ft' self'@(ShutdownDatanodeResponseProto)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = 0
wirePut ft' self'@(ShutdownDatanodeResponseProto)
= case ft' of
10 -> put'Fields
11 -> do
P'.putSize (P'.wireSize 10 self')
put'Fields
_ -> P'.wirePutErr ft' self'
where
put'Fields
= do
Prelude'.return ()
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith update'Self
11 -> P'.getMessageWith update'Self
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> ShutdownDatanodeResponseProto) ShutdownDatanodeResponseProto where
getVal m' f' = f' m'
instance P'.GPB ShutdownDatanodeResponseProto
instance P'.ReflectDescriptor ShutdownDatanodeResponseProto where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".hadoop.hdfs.ShutdownDatanodeResponseProto\", haskellPrefix = [MName \"Hadoop\",MName \"Protos\"], parentModule = [MName \"ClientDatanodeProtocolProtos\"], baseName = MName \"ShutdownDatanodeResponseProto\"}, descFilePath = [\"Hadoop\",\"Protos\",\"ClientDatanodeProtocolProtos\",\"ShutdownDatanodeResponseProto.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False}"
instance P'.TextType ShutdownDatanodeResponseProto where
tellT = P'.tellSubMessage
getT = P'.getSubMessage
instance P'.TextMsg ShutdownDatanodeResponseProto where
textPut msg = Prelude'.return ()
textGet = Prelude'.return P'.defaultValue | alexbiehl/hoop | hadoop-protos/src/Hadoop/Protos/ClientDatanodeProtocolProtos/ShutdownDatanodeResponseProto.hs | mit | 3,045 | 1 | 16 | 539 | 554 | 291 | 263 | 53 | 0 |
import Data.Char (digitToInt)
solve :: Int
solve = sum $ map digitToInt $ show $ (2 :: Integer)^1000
main = putStrLn $ show solve
| pshendry/project-euler-solutions | 0016/solution.hs | mit | 132 | 0 | 9 | 26 | 58 | 31 | 27 | 4 | 1 |
{-# LANGUAGE RecordWildCards, ScopedTypeVariables #-}
module Main where
import Data.List
import Data.Monoid
import Text.Printf
import Options.Applicative
import Data.List.Split (splitOn)
import Control.Applicative
import Data.Char (isDigit, isSpace)
import Control.Monad (when)
import System.Process (readProcess)
import qualified Text.CSV as CSV
data Options = Options {
splitter :: String -> [String] -- delimiter splitting function
, csvInput :: Bool
, suppressRowDividers :: Bool
, printHeaderDivider :: Bool
, maxWidth :: Int
}
parseOptions :: Parser Options
parseOptions = Options
<$> (setDelimiter <|> whiteSpaceDelimiter <|> pure (splitOn "\t"))
<*> switch (short 'c' <> help "Parse input as CSV")
<*> switch (short 'R' <> help "Don't print row dividers")
<*> switch (short 'H' <> help "Print header row divider")
<*> parseMaxWidth
setDelimiter =
splitOn <$> strOption (metavar "DELIM" <> short 'd' <> help "Input field delimiter. Default is TAB (\\t).")
whiteSpaceDelimiter = flag'
words
(short 's' <> help "Use any run of whitespace as input field delimiter")
parseMaxWidth = read <$> strOption (
value "0" <> short 'w' <> metavar "WIDTH"
<> help "Max table width. Defaults to value of `tput cols` command.")
opts = info (helper <*> parseOptions)
(fullDesc
<> progDesc "Pretty format TSV input into table with aligned and wrapped cells"
<> header "table"
<> footer "https://github.com/danchoi/table")
main = do
Options {..} <- execParser opts
s <- getContents
let rawRows = if csvInput
then case CSV.parseCSV "" s of
Left err -> error $ "Error: " ++ show err
Right xs' -> stripBlankRows xs'
else map splitter . lines $ s
let initialWidths = getCellWidths rawRows
-- Adjust the max width subtract padding for gutters on side and ' | ' between cells
maxWidth' <- do
if maxWidth == 0
then (read <$> readProcess "tput" ["cols"] [])
else (return maxWidth)
let adjMaxWidth = maxWidth' - 2 - ((length initialWidths - 1) * 3)
let adjustedWidths = adjustWidths adjMaxWidth initialWidths
let rows = mkCells adjustedWidths rawRows
mapM_ (\(n, row) -> do
when (not suppressRowDividers || (printHeaderDivider && n == 1)) $
putStrLn $ printDivider 1 $ map width row
putStrLn . printRow 1 $ row
) $ zip [0..] rows
when (not suppressRowDividers) $
putStrLn $ printDivider 1 $ map width (head rows)
stripBlankRows :: [[String]] -> [[String]]
stripBlankRows xs = [x | x <- xs, all (> 0) $ map length x ]
adjustWidths :: Int -> [Int] -> [Int]
adjustWidths maxWidth xs | sum xs <= maxWidth = xs
| otherwise = adjustWidths maxWidth $ reduceWidest xs
reduceWidest :: [Int] -> [Int]
reduceWidest xs = let m = maximum xs
in [ if x == m then x - 1 else x | x <- xs ]
data Cell = Cell {
content :: [String]
, width :: Int
, height :: Int
, isNumeric :: Bool
} deriving (Show)
-- | get initial column widths
getCellWidths :: [[String]] -> [Int]
getCellWidths rows = map (maximum . map length) . transpose $ rows
{- Each row is represented as Cell, which contains dimension information. The
first value is the text content; the second is the normalized of the column
width for that cell. -}
mkCells :: [Int] -> [[String]] -> [[Cell]]
mkCells columnWidths rows =
let cols = transpose rows
colCells = map (\(width, cell) -> addCellDimensions width cell) $ zip columnWidths cols
in transpose colCells
{- Input is a column of strings. Wraps data in a Cell, which adds width and
height to each cell in a column, and also a flag if the column looks numeric,
which determines the alignment. Also wraps stings to max cell width -}
addCellDimensions :: Int -> [String] -> [Cell]
addCellDimensions maxWidth xs =
let w = min (maximum . map length $ xs) maxWidth
xs' = map (wrapString w) xs -- wrapped string content
numeric = all (all isDigit) (if length xs > 1 then (tail xs) else xs)
in map (\lines -> Cell lines w (length lines) numeric) xs'
wrapString :: Int -> String -> [String]
wrapString maxWidth x = map trim . wrapLine maxWidth $ x
-- | prints a row of cells with dividers
-- gutter is the width of the blank space at left and right of table
printRow :: Int -> [Cell] -> String
printRow gutter xs =
let rowHeight = maximum $ map height xs
subcells :: [[String]]
subcells = map content xs
lines = map (\n ->
let ss :: [String]
ss = map (cellLine n) xs
in formatRow ss)
[0..(rowHeight - 1)]
in mconcat $ intersperse "\n" $ lines
where formatRow :: [String] -> String
formatRow ss =
mconcat [margin gutter ' ' , (intercalate " | " ss) , margin gutter ' ']
-- prints the nth line of a cell
cellLine :: Int -> Cell -> String
cellLine n Cell {..} =
if n < length content
then printf fmt (content !! n)
else printf fmt ""
where fmt | isNumeric = "%" ++ show width ++ "s"
| otherwise = "%-" ++ show width ++ "s"
margin :: Int -> Char -> String
margin w c = take w $ repeat c
printDivider :: Int -> [Int] -> String
printDivider gutter widths =
mconcat [margin gutter '-'
, (intercalate "-+-"
$ map (\w -> take w $ repeat '-') widths)
, margin gutter '-']
------------------------------------------------------------------------
-- Word wrapping
-- taken from http://moreindirection.blogspot.com/2010/08/blog-post.html
trim :: String -> String
trim = trimAndReverse . trimAndReverse
where trimAndReverse = reverse . dropWhile isSpace
reverseBreak :: (a -> Bool) -> [a] -> ([a], [a])
reverseBreak f xs = (reverse before, reverse after)
where (after, before) = break f $ reverse xs
wrapLine :: Int -> String -> [String]
wrapLine maxLen line
| length line <= maxLen = [line]
| any isSpace beforeMax = beforeSpace : (wrapLine maxLen $ afterSpace ++ afterMax)
| otherwise = beforeMax : wrapLine maxLen afterMax
where (beforeMax, afterMax) = splitAt maxLen line
(beforeSpace, afterSpace) = reverseBreak isSpace beforeMax
| danchoi/table | Main.hs | mit | 6,346 | 0 | 21 | 1,632 | 1,929 | 992 | 937 | 132 | 4 |
{-# LANGUAGE NamedFieldPuns #-}
module Tracks.Train where
import Tracks.Network
import Tracks.Service
import Tracks.Signals
import Control.Monad.STM
import Data.Maybe (fromMaybe)
data Train = Train { trainName :: String
, block :: Block
, service :: [Station]
, signals :: Signals
}
instance Show Train where
show Train { trainName, block, service } =
trainName ++ "@" ++ show block ++ " -> " ++ show (head service)
placeTrain :: String -> [Station] -> Line -> Signals -> Network -> STM (Maybe Train)
placeTrain name stations@(start:prev:_) line signals network = do
let platform = Platform prev start line
platformClear <- isClear platform signals
if (not platformClear) || (not $ isServiceValid stations line network)
then return Nothing
else do
setOccupied platform signals
return $ Just Train { trainName = name
, block = platform
, service = drop 1 $ cycle stations
, signals = signals
}
nextBlock :: Train -> Block
nextBlock Train { block = Between prev next line } =
Platform prev next line
nextBlock Train { block = Platform prev current line, service } =
let next = head service
in Between current next line
proceed :: Train -> Block -> STM Bool
proceed train@Train { block, service, signals } next = do
nextClear <- isClear next signals
if not nextClear
then return False
else do
setOccupied next signals
setClear block signals
return True
stepTrain :: Train -> STM Train
stepTrain train@Train { block = Between _ _ _, service } = do
let next = nextBlock train
moved <- proceed train next
if not moved
then return train
else return train { block = next
, service = drop 1 service
}
stepTrain train@Train { block = Platform _ _ _ } = do
let next = nextBlock train
moved <- proceed train next
if not moved
then return train
else return train { block = next }
| derkyjadex/tracks | Tracks/Train.hs | mit | 2,367 | 0 | 15 | 909 | 681 | 345 | 336 | 55 | 3 |
module GHCJS.DOM.TouchList (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/TouchList.hs | mit | 39 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
main :: IO ()
main = do
a <- fmap read getLine
s <- getLine
putStrLn $ f s a
main
f :: String -> Int -> String
f [] x | x > 5 = "no string, big int"
f [] x | x < -5 = "no string, small int"
f [c] _ = "single char"
f _ _ = "other"
| nushio3/formura | attic/test-patternmatch.hs | mit | 239 | 0 | 9 | 72 | 132 | 62 | 70 | 11 | 1 |
module Expression.Tokens where
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Language
import qualified Text.ParserCombinators.Parsec.Token as P
me = P.makeTokenParser meDef
lexeme = P.lexeme me
parens = P.parens me
braces = P.braces me
brackets = P.brackets me
squares = P.squares me
semiSep = P.semiSep me
semiSep1 = P.semiSep1 me
commaSep = P.commaSep me
commaSep1 = P.commaSep1 me
whiteSpace = P.whiteSpace me
symbol = P.symbol me
identifier = P.identifier me
reserved = P.reserved me
reservedOp = P.reservedOp me
natural = P.natural me
integer = P.integer me
stringLiteral = P.stringLiteral me
meDef
= haskellStyle
{ opStart = opLetter meDef
, opLetter = oneOf ","
-- muß auch feldangaben wie "@3" verstehen
, identStart = letter <|> oneOf "_'@"
, identLetter = alphaNum <|> oneOf "_'@"
}
| marcellussiegburg/autotool | collection/src/Expression/Tokens.hs | gpl-2.0 | 1,085 | 0 | 8 | 382 | 261 | 140 | 121 | 28 | 1 |
module GRPTests
( test
) where
import GRPSeed
import GRPFitness
import System.Random
import GRPCommon
import PartitioningProblem
import GRPMath
--There should really be some unit-tests or so here.
--Currently, all this file does is monitor the means and variances of the fitness values of various act functions.
--This is so the parameters for the fitness checks of actual genomes can be validated to be reasonably accurate.
test :: IO ()
test = do
let (cnt, lng) = (500,20)
(m1,v1) <- fitnessStability act [] cnt lng
(m3,v3) <- fitnessStability goodNAct [] cnt lng
(m2,v2) <- fitnessStability badButBetter [] cnt lng
putStrLn ("means: " ++ (show (m1/m2)) ++ " variance: " ++ (show(v1/v2)))
fitnessStability :: ([StdGen] -> State -> Input -> (Output, State)) -> State -> Int -> Int -> IO (Float, Float)
fitnessStability act state testCount testLength= do
inputs <- sequence $ take testCount $ repeat (generateInput testLength) :: IO [Input]
rng <- newStdGen
let outputs = map fst $ map (act [rng] state) inputs
fitValues <- mapM (\(i,o) -> fitness i o) (zip inputs outputs)
--putStrLn $ show fitValues
putStrLn ("mean: " ++ show (mean fitValues) ++ " stdDev: " ++ show (sqrt $ variance fitValues) ++ " relative Dev: " ++ (show ((sqrt $ variance fitValues) / (mean fitValues))))
return (mean fitValues, variance fitValues)
goodNAct :: [StdGen] -> State -> Input -> (Output, State)
goodNAct rngs state inp = (foldr (\num (a, b) -> if sum a < sum b then ( (num:a) , b) else (a, (num:b) ) ) ([],[]) inp , state)
badButBetter :: [StdGen] -> State -> Input -> (Output, State)
badButBetter rngs state (a:b:xs) =
let (x,y) = ( take ( div ( length xs ) 2 ) xs, drop ( div ( length xs ) 2 ) xs )
in
if (sum x > sum y)
then
if sum x > sum (a:y)
then ((x , a:b:y), state)
else ((b:x , a:y ), state)
else
if sum (a:x) > sum y
then ((a:x , b:y ), state)
else ((a:b:x, y ), state)
| vektordev/GP | src/GRPTests.hs | gpl-2.0 | 1,959 | 0 | 16 | 430 | 852 | 454 | 398 | 35 | 4 |
{-# LANGUAGE FlexibleInstances, UndecidableInstances, OverloadedStrings, CPP #-}
module Gateway.Help (
Help (..),
Source (..),
drift
) where
import Autolib.Output
#if ( __GLASGOW_HASKELL__ >= 710 )
-- must be created with: autotool-package-translator > ./src/Package_Translator.hs
import Package_Translator
import qualified Data.Map as M
#endif
import Data.Typeable
import Data.List ( intersperse, isPrefixOf )
import Data.Char (isDigit)
class ( Typeable a ) => Help a where
help :: a -> Output
help x = doc_link $ typeOf x
instance ( Typeable a ) => Help a
tuple :: [Output] -> Output
tuple os = besides $ [ Text "(" ] ++ intersperse (Text ", ") os ++ [ Text ")" ]
doc_link :: TypeRep -> Output
doc_link t =
let ( tyc, tapps ) = splitTyConApp t
twodee top entries = Above top $ Itemize entries
onedee top entries = besides
$ top : map paren entries
paren entry = besides [ Text "(", entry, Text ")" ]
tapps' = map doc_link tapps
in case show tyc of
"[]" -> besides $ [ Text "[" ] ++ tapps' ++ [ Text "]" ]
"(,)" -> tuple tapps'
"(,,)" -> tuple tapps'
"(,,,)" -> tuple tapps'
_ -> onedee ( tycon_link tyc ) tapps'
besides = foldr1 Beside
tycon_link tyc =
let local = "http://autotool.imn.htwk-leipzig.de/docs/"
hackage = "http://hackage.haskell.org/package/"
ty = tyConName tyc
mod = redot $ undot $ tyConModule tyc
pack =
#if (__GLASGOW_HASKELL__ >= 710 )
case M.lookup (tyConPackage tyc) package_translator of
Nothing -> tyConPackage tyc ; Just s -> s
#else
unversion $ tyConPackage tyc
#endif
unversion = reverse
. dropWhile ( \ c -> isDigit c || c == '.' || c == '-' )
. reverse
in Named_Link ( show tyc )
$ if isPrefixOf "autotool" pack || isPrefixOf "autolib" pack
then local ++ pack ++ "/" ++ mod ++ ".html#t:" ++ ty
else hackage ++ pack ++ "/docs/" ++ mod ++ ".html#t:" ++ ty
builtin t = undot $ case t of
"Int" -> "GHC.Types"
"Char" -> "GHC.Types"
"Float" -> "GHC.Types"
"Double" -> "GHC.Types"
"Integer" -> "GHC.Integer"
"Bool" -> "GHC.Bool"
"Maybe" -> "Data.Maybe"
"Either" -> "Data.Either"
"()" -> "GHC.Unit"
_ -> "Unknown"
undot xs =
let ( pre, post ) = span (/= '.') xs
in pre : if null post then [] else undot $ tail post
redot xs = concat $ intersperse "-" xs -- for haddock-0.7 need dash
----------------------------------------------------------------------
class Source a where
source :: a -> Maybe FilePath
instance Source a where
source x = Nothing
drift :: FilePath -> Maybe FilePath
drift f = Just $ f ++ ".drift"
source_link :: Source a
=> a
-> Output
source_link x = case source x of
Nothing -> Text "no source location found"
Just s ->
let archive = "http://141.57.11.163/cgi-bin/cvsweb/tool/src/"
in Beside ( Text "source location" ) ( Link $ archive ++ s )
| marcellussiegburg/autotool | interface/src/Gateway/Help.hs | gpl-2.0 | 3,053 | 8 | 18 | 834 | 908 | 463 | 445 | 73 | 10 |
module AST(AST(..), printCode) where
import RegFile
data AST =
Symbol Char
| Definition
| Index
| Lookup
| Duplicate
| Execute
| Self
| Parents
| Quine
| Conditional
| Swap
| Register RegisterIndex
| Code [AST]
| LString String
| LNumber Integer
| NewFrame AST
deriving (Show, Eq)
printCode :: AST -> String
printCode = show
| kavigupta/N-programming-language | src/AST.hs | gpl-3.0 | 402 | 0 | 7 | 134 | 111 | 69 | 42 | 22 | 1 |
{-# LANGUAGE ImplicitParams #-}
------------------------------------------------------------------------------------------------
-- |
-- Module : Pearl.GaDtTLHT.Internal.Hom
-- Description : Homs
-- Copyright : (c) Drew Day 2012
-- (c) Shin-Cheng Mu 2011
-- (c) Akimasa Morihata 2011
-- License : BSD-style (see LICENSE)
-- Maintainer : Drew Day <[email protected]>
-- Stability : stable
-- Portability : portable (ViewPatterns, PatternGuards)
--
------------------------------------------------------------------------------------------------
module Pearl.GaDtTLHT.Internal.Hom where
import Data.Maybe
{-
hom :: [a] -> b
e :: b
k :: a -> b
f :: (b,b) -> b
-}
-- |
-- This is the a "correct" definition of hom, except in (at least) two senses:
--
-- 1. f is the leftmost biased choice (it splits (h [head], h [tail])
--
-- 2. in pointfree style, f, k, should be implicit.
hom :: ((b,b) -> b) -> (a -> b) -> b -> [a] -> b
hom f k e [] = e
hom f k e [x] = k x
hom f k e (x:xs) = f (h [x], h xs)
where h = hom f k e
-- The "trivial examples" (along with their definitions) in Gibbons' paper are:
--
-- * id
--
-- @
-- id x = x
-- @
idH = hom f k e
where
f (l,r) = id l ++ id r
k x = x
e = []
--idHr = foldr (id) []
--idHl = foldl (id) []
--
-- * map f
--
-- @
-- map _ [ ] = []
-- map f (x:xs) = f x : map f xs
-- @
--
--
lengthH = length' . map (\_ -> 1)
where
length' = sumH
lenH = hom f k e
where
f (l,r) = l
k x = 1
e = 0
-- lenHr = foldr (length) 0
-- lenHl = foldl (length) 0
-- * head
--
-- @
-- head (x:_) = x
-- head [] = undefined
-- @
--
headH = hom f k e
where
f (Just l, _) = Just l
k x = Just x
e = Nothing
headHr = foldr (head) Nothing
-- headHl = foldl (fst) []
-- * max
-- @ max x y = if x <= y then y else x @
maxH = hom f k e
where
f (l,r) = l `max` r
k x = x
e = minBound :: Int
maxHr = foldr (max) (minBound :: Int)
maxHl = foldl (max) (minBound :: Int)
-- * min
-- @ min x y = if x <= y then x else y @
minH = hom f k e
where
f (l,r) = l `min` r
k x = x
e = maxBound :: Int
minHr = foldr (min) (maxBound :: Int)
minHl = foldl (min) (maxBound :: Int)
-- * any
-- @ any p = or . map p @
--
anyH = hom f k e
where
f (l,r) = l || r
k x = x
e = False
anyHr = foldr (||) False
anyHl = foldl (||) False
-- * all
-- @ all p = and . map p @
--
allH = hom f k e
where
f (l,r) = (&&) l r
k x = x
e = True
allHr = foldr (&&) True
allHl = foldl (&&) True
-- * concat
--
-- @
-- concat = foldr (++) []
-- @
--
-- * (++)
--
-- @
-- [ ] ++ ys = ys
-- (x:xs) ++ ys = x : (xs ++ ys)
--
-- -- OR --
--
-- xs ++ ys = foldr (:) ys xs
-- @
--
catH = hom f k e
where
f (l,r) = l ++ r
k x = x
e = []
catHr = foldr (++) []
catHl = foldl (++) []
-- * sum
-- @ sum = foldl (+) 0 @
--
sumH = hom f k e
where
f (l,r) = l + r
k x = x
e = 0
sumHr = foldr (+) 0 {- xs -}
sumHl = foldl (+) 0 {- xs -}
sumHLR xs = (sumH, sumHr, sumHl)
| technogeeky/centered | src/Pearl/GaDtTLHT/Internal/Hom.hs | gpl-3.0 | 3,825 | 0 | 9 | 1,695 | 889 | 523 | 366 | 60 | 1 |
module HEP.Automation.MadGraph.Dataset.Set20110712set5 where
import HEP.Storage.WebDAV.Type
import HEP.Automation.MadGraph.Model
import HEP.Automation.MadGraph.Machine
import HEP.Automation.MadGraph.UserCut
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Model.C1V
import HEP.Automation.MadGraph.Dataset.Processes
import HEP.Automation.JobQueue.JobType
processSetup :: ProcessSetup C1V
processSetup = PS {
model = C1V
, process = preDefProcess TTBar0or1J
, processBrief = "TTBar0or1J"
, workname = "711_C1V_TTBar0or1J_TEV"
}
paramSet :: [ModelParam C1V]
paramSet = [ C1VParam { mnp = m, gnpR = g, gnpL = 0 }
| (m,g) <- (map (\x->(200,x)) [0.65,0.70..0.95] )
++ (map (\x->(300,x)) [1.05,1.1..1.30] )
++ (map (\x->(400,x)) [1.15,1.20..1.40] )
++ (map (\x->(600,x)) [1.65,1.70..1.90] )
++ (map (\x->(800,x)) [1.55,1.60..2.2] ) ]
-- | (m,g) <- (map (\x->(200,x)) [0.4,0.45,0.50,0.55,0.60] )
-- ++ (map (\x->(300,x)) [0.4,0.45..1.0] )
-- ++ (map (\x->(400,x)) [0.6,0.65..1.10] )
-- ++ (map (\x->(600,x)) [1.0,1.05..1.60] )
-- ++ (map (\x->(800,x)) [1.30,1.35..1.50] ) ]
-- [ (200,0.5), (200,1.0)
-- , (400,0.5), (400,1.0), (400,1.5), (400,2.0)
-- , (600,0.5), (600,1.0), (600,1.5), (600,2.0), (600,2.5)
-- , (800,0.5), (800,1.0), (800,1.5), (800,2.0), (800,2.5), (800,3.0), (800,3.5)
-- , (1000,0.5), (1000,1.0), (1000,1.5), (1000,2.0), (1000,2.5), (1000,3.0), (1000,3.5), (1000,4.0) ] ]
sets :: [Int]
sets = [1]
ucut :: UserCut
ucut = UserCut {
uc_metcut = 15.0
, uc_etacutlep = 2.7
, uc_etcutlep = 18.0
, uc_etacutjet = 2.7
, uc_etcutjet = 15.0
}
eventsets :: [EventSet]
eventsets =
[ EventSet processSetup
(RS { param = p
, numevent = 100000
, machine = TeVatron
, rgrun = Fixed
, rgscale = 200.0
, match = MLM
, cut = DefCut
, pythia = RunPYTHIA
, usercut = UserCutDef ucut -- NoUserCutDef --
, pgs = RunPGS
, jetalgo = Cone 0.4
, uploadhep = NoUploadHEP
, setnum = num
})
| p <- paramSet , num <- sets ]
webdavdir :: WebDAVRemoteDir
webdavdir = WebDAVRemoteDir "paper3/ttbar_TEV_c1v_pgsscan"
| wavewave/madgraph-auto-dataset | src/HEP/Automation/MadGraph/Dataset/Set20110716set2.hs | gpl-3.0 | 2,640 | 0 | 16 | 902 | 536 | 336 | 200 | 50 | 1 |
-----------------------------------------------------------------------------
--
-- Module : Genimplicit_types
-- Copyright : (c) hokum
-- License : GPL3
--
-- Maintainer :
-- Stability : experimental
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
{-# LANGUAGE DeriveGeneric #-}
module Genimplicit_types (
BlenderObject (..)
,BlenderData (..)
,Generation_settings (..)
) where
import Data.Aeson
import qualified Data.Text as T
import GHC.Generics
-- | Type of each JSON entry in record syntax.
data BlenderObject =
BlenderObject { name :: !T.Text
,type_ :: !T.Text
,group :: ![T.Text]
,rounding :: Double
,x :: Double
,y :: Double
,z :: Double
,dim_x :: Double
,dim_y :: Double
,dim_z :: Double
,scale_x :: Double
,scale_y :: Double
,scale_z :: Double
,rot_x :: Double
,rot_y :: Double
,rot_z :: Double
,rot_w :: Double
} deriving (Show,Generic)
data BlenderData =
BlenderData { objects :: [BlenderObject]
,groups :: ![T.Text]
} deriving (Show,Generic)
-- Instances to convert our type to/from JSON.
instance FromJSON BlenderObject
instance ToJSON BlenderObject
instance FromJSON BlenderData
instance ToJSON BlenderData
data Generation_settings = Generation_settings
{
overall_union_rounding :: Double
}
| Collocalini/GenImplicit | genimplicit/src/Genimplicit_types.hs | gpl-3.0 | 1,690 | 0 | 11 | 594 | 279 | 176 | 103 | 45 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Chat.Spaces.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns a space.
--
-- /See:/ <https://developers.google.com/hangouts/chat Google Chat API Reference> for @chat.spaces.get@.
module Network.Google.Resource.Chat.Spaces.Get
(
-- * REST Resource
SpacesGetResource
-- * Creating a Request
, spacesGet
, SpacesGet
-- * Request Lenses
, sgXgafv
, sgUploadProtocol
, sgAccessToken
, sgUploadType
, sgName
, sgCallback
) where
import Network.Google.Chat.Types
import Network.Google.Prelude
-- | A resource alias for @chat.spaces.get@ method which the
-- 'SpacesGet' request conforms to.
type SpacesGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Space
-- | Returns a space.
--
-- /See:/ 'spacesGet' smart constructor.
data SpacesGet =
SpacesGet'
{ _sgXgafv :: !(Maybe Xgafv)
, _sgUploadProtocol :: !(Maybe Text)
, _sgAccessToken :: !(Maybe Text)
, _sgUploadType :: !(Maybe Text)
, _sgName :: !Text
, _sgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'SpacesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sgXgafv'
--
-- * 'sgUploadProtocol'
--
-- * 'sgAccessToken'
--
-- * 'sgUploadType'
--
-- * 'sgName'
--
-- * 'sgCallback'
spacesGet
:: Text -- ^ 'sgName'
-> SpacesGet
spacesGet pSgName_ =
SpacesGet'
{ _sgXgafv = Nothing
, _sgUploadProtocol = Nothing
, _sgAccessToken = Nothing
, _sgUploadType = Nothing
, _sgName = pSgName_
, _sgCallback = Nothing
}
-- | V1 error format.
sgXgafv :: Lens' SpacesGet (Maybe Xgafv)
sgXgafv = lens _sgXgafv (\ s a -> s{_sgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
sgUploadProtocol :: Lens' SpacesGet (Maybe Text)
sgUploadProtocol
= lens _sgUploadProtocol
(\ s a -> s{_sgUploadProtocol = a})
-- | OAuth access token.
sgAccessToken :: Lens' SpacesGet (Maybe Text)
sgAccessToken
= lens _sgAccessToken
(\ s a -> s{_sgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
sgUploadType :: Lens' SpacesGet (Maybe Text)
sgUploadType
= lens _sgUploadType (\ s a -> s{_sgUploadType = a})
-- | Required. Resource name of the space, in the form \"spaces\/*\".
-- Example: spaces\/AAAAMpdlehY
sgName :: Lens' SpacesGet Text
sgName = lens _sgName (\ s a -> s{_sgName = a})
-- | JSONP
sgCallback :: Lens' SpacesGet (Maybe Text)
sgCallback
= lens _sgCallback (\ s a -> s{_sgCallback = a})
instance GoogleRequest SpacesGet where
type Rs SpacesGet = Space
type Scopes SpacesGet = '[]
requestClient SpacesGet'{..}
= go _sgName _sgXgafv _sgUploadProtocol
_sgAccessToken
_sgUploadType
_sgCallback
(Just AltJSON)
chatService
where go
= buildClient (Proxy :: Proxy SpacesGetResource)
mempty
| brendanhay/gogol | gogol-chat/gen/Network/Google/Resource/Chat/Spaces/Get.hs | mpl-2.0 | 4,029 | 0 | 15 | 984 | 694 | 405 | 289 | 97 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Drive.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Drive.Types.Product where
import Network.Google.Drive.Types.Sum
import Network.Google.Prelude
-- | A list of files.
--
-- /See:/ 'fileList' smart constructor.
data FileList = FileList'
{ _flNextPageToken :: !(Maybe Text)
, _flKind :: !Text
, _flFiles :: !(Maybe [File])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'flNextPageToken'
--
-- * 'flKind'
--
-- * 'flFiles'
fileList
:: FileList
fileList =
FileList'
{ _flNextPageToken = Nothing
, _flKind = "drive#fileList"
, _flFiles = Nothing
}
-- | The page token for the next page of files. This will be absent if the
-- end of the files list has been reached. If the token is rejected for any
-- reason, it should be discarded, and pagination should be restarted from
-- the first page of results.
flNextPageToken :: Lens' FileList (Maybe Text)
flNextPageToken
= lens _flNextPageToken
(\ s a -> s{_flNextPageToken = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#fileList\".
flKind :: Lens' FileList Text
flKind = lens _flKind (\ s a -> s{_flKind = a})
-- | The list of files. If nextPageToken is populated, then this list may be
-- incomplete and an additional page of results should be fetched.
flFiles :: Lens' FileList [File]
flFiles
= lens _flFiles (\ s a -> s{_flFiles = a}) . _Default
. _Coerce
instance FromJSON FileList where
parseJSON
= withObject "FileList"
(\ o ->
FileList' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "drive#fileList")
<*> (o .:? "files" .!= mempty))
instance ToJSON FileList where
toJSON FileList'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _flNextPageToken,
Just ("kind" .= _flKind), ("files" .=) <$> _flFiles])
-- | The file content to which the comment refers, typically within the
-- anchor region. For a text file, for example, this would be the text at
-- the location of the comment.
--
-- /See:/ 'commentQuotedFileContent' smart constructor.
data CommentQuotedFileContent = CommentQuotedFileContent'
{ _cqfcValue :: !(Maybe Text)
, _cqfcMimeType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentQuotedFileContent' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cqfcValue'
--
-- * 'cqfcMimeType'
commentQuotedFileContent
:: CommentQuotedFileContent
commentQuotedFileContent =
CommentQuotedFileContent'
{ _cqfcValue = Nothing
, _cqfcMimeType = Nothing
}
-- | The quoted content itself. This is interpreted as plain text if set
-- through the API.
cqfcValue :: Lens' CommentQuotedFileContent (Maybe Text)
cqfcValue
= lens _cqfcValue (\ s a -> s{_cqfcValue = a})
-- | The MIME type of the quoted content.
cqfcMimeType :: Lens' CommentQuotedFileContent (Maybe Text)
cqfcMimeType
= lens _cqfcMimeType (\ s a -> s{_cqfcMimeType = a})
instance FromJSON CommentQuotedFileContent where
parseJSON
= withObject "CommentQuotedFileContent"
(\ o ->
CommentQuotedFileContent' <$>
(o .:? "value") <*> (o .:? "mimeType"))
instance ToJSON CommentQuotedFileContent where
toJSON CommentQuotedFileContent'{..}
= object
(catMaybes
[("value" .=) <$> _cqfcValue,
("mimeType" .=) <$> _cqfcMimeType])
-- | The user\'s storage quota limits and usage. All fields are measured in
-- bytes.
--
-- /See:/ 'aboutStorageQuota' smart constructor.
data AboutStorageQuota = AboutStorageQuota'
{ _asqUsageInDriveTrash :: !(Maybe (Textual Int64))
, _asqLimit :: !(Maybe (Textual Int64))
, _asqUsage :: !(Maybe (Textual Int64))
, _asqUsageInDrive :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AboutStorageQuota' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asqUsageInDriveTrash'
--
-- * 'asqLimit'
--
-- * 'asqUsage'
--
-- * 'asqUsageInDrive'
aboutStorageQuota
:: AboutStorageQuota
aboutStorageQuota =
AboutStorageQuota'
{ _asqUsageInDriveTrash = Nothing
, _asqLimit = Nothing
, _asqUsage = Nothing
, _asqUsageInDrive = Nothing
}
-- | The usage by trashed files in Google Drive.
asqUsageInDriveTrash :: Lens' AboutStorageQuota (Maybe Int64)
asqUsageInDriveTrash
= lens _asqUsageInDriveTrash
(\ s a -> s{_asqUsageInDriveTrash = a})
. mapping _Coerce
-- | The usage limit, if applicable. This will not be present if the user has
-- unlimited storage.
asqLimit :: Lens' AboutStorageQuota (Maybe Int64)
asqLimit
= lens _asqLimit (\ s a -> s{_asqLimit = a}) .
mapping _Coerce
-- | The total usage across all services.
asqUsage :: Lens' AboutStorageQuota (Maybe Int64)
asqUsage
= lens _asqUsage (\ s a -> s{_asqUsage = a}) .
mapping _Coerce
-- | The usage by all files in Google Drive.
asqUsageInDrive :: Lens' AboutStorageQuota (Maybe Int64)
asqUsageInDrive
= lens _asqUsageInDrive
(\ s a -> s{_asqUsageInDrive = a})
. mapping _Coerce
instance FromJSON AboutStorageQuota where
parseJSON
= withObject "AboutStorageQuota"
(\ o ->
AboutStorageQuota' <$>
(o .:? "usageInDriveTrash") <*> (o .:? "limit") <*>
(o .:? "usage")
<*> (o .:? "usageInDrive"))
instance ToJSON AboutStorageQuota where
toJSON AboutStorageQuota'{..}
= object
(catMaybes
[("usageInDriveTrash" .=) <$> _asqUsageInDriveTrash,
("limit" .=) <$> _asqLimit,
("usage" .=) <$> _asqUsage,
("usageInDrive" .=) <$> _asqUsageInDrive])
-- | A reply to a comment on a file.
--
-- /See:/ 'reply' smart constructor.
data Reply = Reply'
{ _rHTMLContent :: !(Maybe Text)
, _rModifiedTime :: !(Maybe DateTime')
, _rCreatedTime :: !(Maybe DateTime')
, _rKind :: !Text
, _rAction :: !(Maybe Text)
, _rContent :: !(Maybe Text)
, _rAuthor :: !(Maybe User)
, _rId :: !(Maybe Text)
, _rDeleted :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Reply' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rHTMLContent'
--
-- * 'rModifiedTime'
--
-- * 'rCreatedTime'
--
-- * 'rKind'
--
-- * 'rAction'
--
-- * 'rContent'
--
-- * 'rAuthor'
--
-- * 'rId'
--
-- * 'rDeleted'
reply
:: Reply
reply =
Reply'
{ _rHTMLContent = Nothing
, _rModifiedTime = Nothing
, _rCreatedTime = Nothing
, _rKind = "drive#reply"
, _rAction = Nothing
, _rContent = Nothing
, _rAuthor = Nothing
, _rId = Nothing
, _rDeleted = Nothing
}
-- | The content of the reply with HTML formatting.
rHTMLContent :: Lens' Reply (Maybe Text)
rHTMLContent
= lens _rHTMLContent (\ s a -> s{_rHTMLContent = a})
-- | The last time the reply was modified (RFC 3339 date-time).
rModifiedTime :: Lens' Reply (Maybe UTCTime)
rModifiedTime
= lens _rModifiedTime
(\ s a -> s{_rModifiedTime = a})
. mapping _DateTime
-- | The time at which the reply was created (RFC 3339 date-time).
rCreatedTime :: Lens' Reply (Maybe UTCTime)
rCreatedTime
= lens _rCreatedTime (\ s a -> s{_rCreatedTime = a})
. mapping _DateTime
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#reply\".
rKind :: Lens' Reply Text
rKind = lens _rKind (\ s a -> s{_rKind = a})
-- | The action the reply performed to the parent comment. Valid values are:
-- - resolve - reopen
rAction :: Lens' Reply (Maybe Text)
rAction = lens _rAction (\ s a -> s{_rAction = a})
-- | The plain text content of the reply. This field is used for setting the
-- content, while htmlContent should be displayed. This is required on
-- creates if no action is specified.
rContent :: Lens' Reply (Maybe Text)
rContent = lens _rContent (\ s a -> s{_rContent = a})
-- | The user who created the reply.
rAuthor :: Lens' Reply (Maybe User)
rAuthor = lens _rAuthor (\ s a -> s{_rAuthor = a})
-- | The ID of the reply.
rId :: Lens' Reply (Maybe Text)
rId = lens _rId (\ s a -> s{_rId = a})
-- | Whether the reply has been deleted. A deleted reply has no content.
rDeleted :: Lens' Reply (Maybe Bool)
rDeleted = lens _rDeleted (\ s a -> s{_rDeleted = a})
instance FromJSON Reply where
parseJSON
= withObject "Reply"
(\ o ->
Reply' <$>
(o .:? "htmlContent") <*> (o .:? "modifiedTime") <*>
(o .:? "createdTime")
<*> (o .:? "kind" .!= "drive#reply")
<*> (o .:? "action")
<*> (o .:? "content")
<*> (o .:? "author")
<*> (o .:? "id")
<*> (o .:? "deleted"))
instance ToJSON Reply where
toJSON Reply'{..}
= object
(catMaybes
[("htmlContent" .=) <$> _rHTMLContent,
("modifiedTime" .=) <$> _rModifiedTime,
("createdTime" .=) <$> _rCreatedTime,
Just ("kind" .= _rKind), ("action" .=) <$> _rAction,
("content" .=) <$> _rContent,
("author" .=) <$> _rAuthor, ("id" .=) <$> _rId,
("deleted" .=) <$> _rDeleted])
-- | A map of source MIME type to possible targets for all supported imports.
--
-- /See:/ 'aboutImportFormats' smart constructor.
newtype AboutImportFormats = AboutImportFormats'
{ _aifAddtional :: HashMap Text [Text]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AboutImportFormats' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aifAddtional'
aboutImportFormats
:: HashMap Text [Text] -- ^ 'aifAddtional'
-> AboutImportFormats
aboutImportFormats pAifAddtional_ =
AboutImportFormats'
{ _aifAddtional = _Coerce # pAifAddtional_
}
aifAddtional :: Lens' AboutImportFormats (HashMap Text [Text])
aifAddtional
= lens _aifAddtional (\ s a -> s{_aifAddtional = a})
. _Coerce
instance FromJSON AboutImportFormats where
parseJSON
= withObject "AboutImportFormats"
(\ o -> AboutImportFormats' <$> (parseJSONObject o))
instance ToJSON AboutImportFormats where
toJSON = toJSON . _aifAddtional
-- | Capabilities the current user has on the file.
--
-- /See:/ 'fileCapabilities' smart constructor.
data FileCapabilities = FileCapabilities'
{ _fcCanComment :: !(Maybe Bool)
, _fcCanEdit :: !(Maybe Bool)
, _fcCanReadRevisions :: !(Maybe Bool)
, _fcCanCopy :: !(Maybe Bool)
, _fcCanShare :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileCapabilities' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fcCanComment'
--
-- * 'fcCanEdit'
--
-- * 'fcCanReadRevisions'
--
-- * 'fcCanCopy'
--
-- * 'fcCanShare'
fileCapabilities
:: FileCapabilities
fileCapabilities =
FileCapabilities'
{ _fcCanComment = Nothing
, _fcCanEdit = Nothing
, _fcCanReadRevisions = Nothing
, _fcCanCopy = Nothing
, _fcCanShare = Nothing
}
-- | Whether the user can comment on the file.
fcCanComment :: Lens' FileCapabilities (Maybe Bool)
fcCanComment
= lens _fcCanComment (\ s a -> s{_fcCanComment = a})
-- | Whether the user can edit the file\'s content.
fcCanEdit :: Lens' FileCapabilities (Maybe Bool)
fcCanEdit
= lens _fcCanEdit (\ s a -> s{_fcCanEdit = a})
-- | Whether the current user has read access to the Revisions resource of
-- the file.
fcCanReadRevisions :: Lens' FileCapabilities (Maybe Bool)
fcCanReadRevisions
= lens _fcCanReadRevisions
(\ s a -> s{_fcCanReadRevisions = a})
-- | Whether the user can copy the file.
fcCanCopy :: Lens' FileCapabilities (Maybe Bool)
fcCanCopy
= lens _fcCanCopy (\ s a -> s{_fcCanCopy = a})
-- | Whether the user can modify the file\'s permissions and sharing
-- settings.
fcCanShare :: Lens' FileCapabilities (Maybe Bool)
fcCanShare
= lens _fcCanShare (\ s a -> s{_fcCanShare = a})
instance FromJSON FileCapabilities where
parseJSON
= withObject "FileCapabilities"
(\ o ->
FileCapabilities' <$>
(o .:? "canComment") <*> (o .:? "canEdit") <*>
(o .:? "canReadRevisions")
<*> (o .:? "canCopy")
<*> (o .:? "canShare"))
instance ToJSON FileCapabilities where
toJSON FileCapabilities'{..}
= object
(catMaybes
[("canComment" .=) <$> _fcCanComment,
("canEdit" .=) <$> _fcCanEdit,
("canReadRevisions" .=) <$> _fcCanReadRevisions,
("canCopy" .=) <$> _fcCanCopy,
("canShare" .=) <$> _fcCanShare])
-- | A list of replies to a comment on a file.
--
-- /See:/ 'replyList' smart constructor.
data ReplyList = ReplyList'
{ _rlNextPageToken :: !(Maybe Text)
, _rlKind :: !Text
, _rlReplies :: !(Maybe [Reply])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ReplyList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rlNextPageToken'
--
-- * 'rlKind'
--
-- * 'rlReplies'
replyList
:: ReplyList
replyList =
ReplyList'
{ _rlNextPageToken = Nothing
, _rlKind = "drive#replyList"
, _rlReplies = Nothing
}
-- | The page token for the next page of replies. This will be absent if the
-- end of the replies list has been reached. If the token is rejected for
-- any reason, it should be discarded, and pagination should be restarted
-- from the first page of results.
rlNextPageToken :: Lens' ReplyList (Maybe Text)
rlNextPageToken
= lens _rlNextPageToken
(\ s a -> s{_rlNextPageToken = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#replyList\".
rlKind :: Lens' ReplyList Text
rlKind = lens _rlKind (\ s a -> s{_rlKind = a})
-- | The list of replies. If nextPageToken is populated, then this list may
-- be incomplete and an additional page of results should be fetched.
rlReplies :: Lens' ReplyList [Reply]
rlReplies
= lens _rlReplies (\ s a -> s{_rlReplies = a}) .
_Default
. _Coerce
instance FromJSON ReplyList where
parseJSON
= withObject "ReplyList"
(\ o ->
ReplyList' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "drive#replyList")
<*> (o .:? "replies" .!= mempty))
instance ToJSON ReplyList where
toJSON ReplyList'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _rlNextPageToken,
Just ("kind" .= _rlKind),
("replies" .=) <$> _rlReplies])
-- | A thumbnail for the file. This will only be used if Drive cannot
-- generate a standard thumbnail.
--
-- /See:/ 'fileContentHintsThumbnail' smart constructor.
data FileContentHintsThumbnail = FileContentHintsThumbnail'
{ _fchtImage :: !(Maybe Bytes)
, _fchtMimeType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileContentHintsThumbnail' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fchtImage'
--
-- * 'fchtMimeType'
fileContentHintsThumbnail
:: FileContentHintsThumbnail
fileContentHintsThumbnail =
FileContentHintsThumbnail'
{ _fchtImage = Nothing
, _fchtMimeType = Nothing
}
-- | The thumbnail data encoded with URL-safe Base64 (RFC 4648 section 5).
fchtImage :: Lens' FileContentHintsThumbnail (Maybe ByteString)
fchtImage
= lens _fchtImage (\ s a -> s{_fchtImage = a}) .
mapping _Bytes
-- | The MIME type of the thumbnail.
fchtMimeType :: Lens' FileContentHintsThumbnail (Maybe Text)
fchtMimeType
= lens _fchtMimeType (\ s a -> s{_fchtMimeType = a})
instance FromJSON FileContentHintsThumbnail where
parseJSON
= withObject "FileContentHintsThumbnail"
(\ o ->
FileContentHintsThumbnail' <$>
(o .:? "image") <*> (o .:? "mimeType"))
instance ToJSON FileContentHintsThumbnail where
toJSON FileContentHintsThumbnail'{..}
= object
(catMaybes
[("image" .=) <$> _fchtImage,
("mimeType" .=) <$> _fchtMimeType])
-- | An notification channel used to watch for resource changes.
--
-- /See:/ 'channel' smart constructor.
data Channel = Channel'
{ _cResourceURI :: !(Maybe Text)
, _cResourceId :: !(Maybe Text)
, _cKind :: !Text
, _cExpiration :: !(Maybe (Textual Int64))
, _cToken :: !(Maybe Text)
, _cAddress :: !(Maybe Text)
, _cPayload :: !(Maybe Bool)
, _cParams :: !(Maybe ChannelParams)
, _cId :: !(Maybe Text)
, _cType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Channel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cResourceURI'
--
-- * 'cResourceId'
--
-- * 'cKind'
--
-- * 'cExpiration'
--
-- * 'cToken'
--
-- * 'cAddress'
--
-- * 'cPayload'
--
-- * 'cParams'
--
-- * 'cId'
--
-- * 'cType'
channel
:: Channel
channel =
Channel'
{ _cResourceURI = Nothing
, _cResourceId = Nothing
, _cKind = "api#channel"
, _cExpiration = Nothing
, _cToken = Nothing
, _cAddress = Nothing
, _cPayload = Nothing
, _cParams = Nothing
, _cId = Nothing
, _cType = Nothing
}
-- | A version-specific identifier for the watched resource.
cResourceURI :: Lens' Channel (Maybe Text)
cResourceURI
= lens _cResourceURI (\ s a -> s{_cResourceURI = a})
-- | An opaque ID that identifies the resource being watched on this channel.
-- Stable across different API versions.
cResourceId :: Lens' Channel (Maybe Text)
cResourceId
= lens _cResourceId (\ s a -> s{_cResourceId = a})
-- | Identifies this as a notification channel used to watch for changes to a
-- resource. Value: the fixed string \"api#channel\".
cKind :: Lens' Channel Text
cKind = lens _cKind (\ s a -> s{_cKind = a})
-- | Date and time of notification channel expiration, expressed as a Unix
-- timestamp, in milliseconds. Optional.
cExpiration :: Lens' Channel (Maybe Int64)
cExpiration
= lens _cExpiration (\ s a -> s{_cExpiration = a}) .
mapping _Coerce
-- | An arbitrary string delivered to the target address with each
-- notification delivered over this channel. Optional.
cToken :: Lens' Channel (Maybe Text)
cToken = lens _cToken (\ s a -> s{_cToken = a})
-- | The address where notifications are delivered for this channel.
cAddress :: Lens' Channel (Maybe Text)
cAddress = lens _cAddress (\ s a -> s{_cAddress = a})
-- | A Boolean value to indicate whether payload is wanted. Optional.
cPayload :: Lens' Channel (Maybe Bool)
cPayload = lens _cPayload (\ s a -> s{_cPayload = a})
-- | Additional parameters controlling delivery channel behavior. Optional.
cParams :: Lens' Channel (Maybe ChannelParams)
cParams = lens _cParams (\ s a -> s{_cParams = a})
-- | A UUID or similar unique string that identifies this channel.
cId :: Lens' Channel (Maybe Text)
cId = lens _cId (\ s a -> s{_cId = a})
-- | The type of delivery mechanism used for this channel.
cType :: Lens' Channel (Maybe Text)
cType = lens _cType (\ s a -> s{_cType = a})
instance FromJSON Channel where
parseJSON
= withObject "Channel"
(\ o ->
Channel' <$>
(o .:? "resourceUri") <*> (o .:? "resourceId") <*>
(o .:? "kind" .!= "api#channel")
<*> (o .:? "expiration")
<*> (o .:? "token")
<*> (o .:? "address")
<*> (o .:? "payload")
<*> (o .:? "params")
<*> (o .:? "id")
<*> (o .:? "type"))
instance ToJSON Channel where
toJSON Channel'{..}
= object
(catMaybes
[("resourceUri" .=) <$> _cResourceURI,
("resourceId" .=) <$> _cResourceId,
Just ("kind" .= _cKind),
("expiration" .=) <$> _cExpiration,
("token" .=) <$> _cToken,
("address" .=) <$> _cAddress,
("payload" .=) <$> _cPayload,
("params" .=) <$> _cParams, ("id" .=) <$> _cId,
("type" .=) <$> _cType])
-- | Additional metadata about video media. This may not be available
-- immediately upon upload.
--
-- /See:/ 'fileVideoMediaMetadata' smart constructor.
data FileVideoMediaMetadata = FileVideoMediaMetadata'
{ _fvmmHeight :: !(Maybe (Textual Int32))
, _fvmmWidth :: !(Maybe (Textual Int32))
, _fvmmDurationMillis :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileVideoMediaMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fvmmHeight'
--
-- * 'fvmmWidth'
--
-- * 'fvmmDurationMillis'
fileVideoMediaMetadata
:: FileVideoMediaMetadata
fileVideoMediaMetadata =
FileVideoMediaMetadata'
{ _fvmmHeight = Nothing
, _fvmmWidth = Nothing
, _fvmmDurationMillis = Nothing
}
-- | The height of the video in pixels.
fvmmHeight :: Lens' FileVideoMediaMetadata (Maybe Int32)
fvmmHeight
= lens _fvmmHeight (\ s a -> s{_fvmmHeight = a}) .
mapping _Coerce
-- | The width of the video in pixels.
fvmmWidth :: Lens' FileVideoMediaMetadata (Maybe Int32)
fvmmWidth
= lens _fvmmWidth (\ s a -> s{_fvmmWidth = a}) .
mapping _Coerce
-- | The duration of the video in milliseconds.
fvmmDurationMillis :: Lens' FileVideoMediaMetadata (Maybe Int64)
fvmmDurationMillis
= lens _fvmmDurationMillis
(\ s a -> s{_fvmmDurationMillis = a})
. mapping _Coerce
instance FromJSON FileVideoMediaMetadata where
parseJSON
= withObject "FileVideoMediaMetadata"
(\ o ->
FileVideoMediaMetadata' <$>
(o .:? "height") <*> (o .:? "width") <*>
(o .:? "durationMillis"))
instance ToJSON FileVideoMediaMetadata where
toJSON FileVideoMediaMetadata'{..}
= object
(catMaybes
[("height" .=) <$> _fvmmHeight,
("width" .=) <$> _fvmmWidth,
("durationMillis" .=) <$> _fvmmDurationMillis])
-- | A collection of arbitrary key-value pairs which are private to the
-- requesting app. Entries with null values are cleared in update and copy
-- requests.
--
-- /See:/ 'fileAppProperties' smart constructor.
newtype FileAppProperties = FileAppProperties'
{ _fapAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileAppProperties' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fapAddtional'
fileAppProperties
:: HashMap Text Text -- ^ 'fapAddtional'
-> FileAppProperties
fileAppProperties pFapAddtional_ =
FileAppProperties'
{ _fapAddtional = _Coerce # pFapAddtional_
}
fapAddtional :: Lens' FileAppProperties (HashMap Text Text)
fapAddtional
= lens _fapAddtional (\ s a -> s{_fapAddtional = a})
. _Coerce
instance FromJSON FileAppProperties where
parseJSON
= withObject "FileAppProperties"
(\ o -> FileAppProperties' <$> (parseJSONObject o))
instance ToJSON FileAppProperties where
toJSON = toJSON . _fapAddtional
-- | A change to a file.
--
-- /See:/ 'change' smart constructor.
data Change = Change'
{ _chaRemoved :: !(Maybe Bool)
, _chaTime :: !(Maybe DateTime')
, _chaKind :: !Text
, _chaFileId :: !(Maybe Text)
, _chaFile :: !(Maybe File)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Change' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'chaRemoved'
--
-- * 'chaTime'
--
-- * 'chaKind'
--
-- * 'chaFileId'
--
-- * 'chaFile'
change
:: Change
change =
Change'
{ _chaRemoved = Nothing
, _chaTime = Nothing
, _chaKind = "drive#change"
, _chaFileId = Nothing
, _chaFile = Nothing
}
-- | Whether the file has been removed from the view of the changes list, for
-- example by deletion or lost access.
chaRemoved :: Lens' Change (Maybe Bool)
chaRemoved
= lens _chaRemoved (\ s a -> s{_chaRemoved = a})
-- | The time of this change (RFC 3339 date-time).
chaTime :: Lens' Change (Maybe UTCTime)
chaTime
= lens _chaTime (\ s a -> s{_chaTime = a}) .
mapping _DateTime
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#change\".
chaKind :: Lens' Change Text
chaKind = lens _chaKind (\ s a -> s{_chaKind = a})
-- | The ID of the file which has changed.
chaFileId :: Lens' Change (Maybe Text)
chaFileId
= lens _chaFileId (\ s a -> s{_chaFileId = a})
-- | The updated state of the file. Present if the file has not been removed.
chaFile :: Lens' Change (Maybe File)
chaFile = lens _chaFile (\ s a -> s{_chaFile = a})
instance FromJSON Change where
parseJSON
= withObject "Change"
(\ o ->
Change' <$>
(o .:? "removed") <*> (o .:? "time") <*>
(o .:? "kind" .!= "drive#change")
<*> (o .:? "fileId")
<*> (o .:? "file"))
instance ToJSON Change where
toJSON Change'{..}
= object
(catMaybes
[("removed" .=) <$> _chaRemoved,
("time" .=) <$> _chaTime, Just ("kind" .= _chaKind),
("fileId" .=) <$> _chaFileId,
("file" .=) <$> _chaFile])
-- | A map of source MIME type to possible targets for all supported exports.
--
-- /See:/ 'aboutExportFormats' smart constructor.
newtype AboutExportFormats = AboutExportFormats'
{ _aefAddtional :: HashMap Text [Text]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AboutExportFormats' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aefAddtional'
aboutExportFormats
:: HashMap Text [Text] -- ^ 'aefAddtional'
-> AboutExportFormats
aboutExportFormats pAefAddtional_ =
AboutExportFormats'
{ _aefAddtional = _Coerce # pAefAddtional_
}
aefAddtional :: Lens' AboutExportFormats (HashMap Text [Text])
aefAddtional
= lens _aefAddtional (\ s a -> s{_aefAddtional = a})
. _Coerce
instance FromJSON AboutExportFormats where
parseJSON
= withObject "AboutExportFormats"
(\ o -> AboutExportFormats' <$> (parseJSONObject o))
instance ToJSON AboutExportFormats where
toJSON = toJSON . _aefAddtional
-- | Information about a Drive user.
--
-- /See:/ 'user' smart constructor.
data User = User'
{ _uPhotoLink :: !(Maybe Text)
, _uMe :: !(Maybe Bool)
, _uKind :: !Text
, _uEmailAddress :: !(Maybe Text)
, _uDisplayName :: !(Maybe Text)
, _uPermissionId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'User' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uPhotoLink'
--
-- * 'uMe'
--
-- * 'uKind'
--
-- * 'uEmailAddress'
--
-- * 'uDisplayName'
--
-- * 'uPermissionId'
user
:: User
user =
User'
{ _uPhotoLink = Nothing
, _uMe = Nothing
, _uKind = "drive#user"
, _uEmailAddress = Nothing
, _uDisplayName = Nothing
, _uPermissionId = Nothing
}
-- | A link to the user\'s profile photo, if available.
uPhotoLink :: Lens' User (Maybe Text)
uPhotoLink
= lens _uPhotoLink (\ s a -> s{_uPhotoLink = a})
-- | Whether this user is the requesting user.
uMe :: Lens' User (Maybe Bool)
uMe = lens _uMe (\ s a -> s{_uMe = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#user\".
uKind :: Lens' User Text
uKind = lens _uKind (\ s a -> s{_uKind = a})
-- | The email address of the user. This may not be present in certain
-- contexts if the user has not made their email address visible to the
-- requester.
uEmailAddress :: Lens' User (Maybe Text)
uEmailAddress
= lens _uEmailAddress
(\ s a -> s{_uEmailAddress = a})
-- | A plain text displayable name for this user.
uDisplayName :: Lens' User (Maybe Text)
uDisplayName
= lens _uDisplayName (\ s a -> s{_uDisplayName = a})
-- | The user\'s ID as visible in Permission resources.
uPermissionId :: Lens' User (Maybe Text)
uPermissionId
= lens _uPermissionId
(\ s a -> s{_uPermissionId = a})
instance FromJSON User where
parseJSON
= withObject "User"
(\ o ->
User' <$>
(o .:? "photoLink") <*> (o .:? "me") <*>
(o .:? "kind" .!= "drive#user")
<*> (o .:? "emailAddress")
<*> (o .:? "displayName")
<*> (o .:? "permissionId"))
instance ToJSON User where
toJSON User'{..}
= object
(catMaybes
[("photoLink" .=) <$> _uPhotoLink,
("me" .=) <$> _uMe, Just ("kind" .= _uKind),
("emailAddress" .=) <$> _uEmailAddress,
("displayName" .=) <$> _uDisplayName,
("permissionId" .=) <$> _uPermissionId])
-- | A list of changes for a user.
--
-- /See:/ 'changeList' smart constructor.
data ChangeList = ChangeList'
{ _clNewStartPageToken :: !(Maybe Text)
, _clNextPageToken :: !(Maybe Text)
, _clChanges :: !(Maybe [Change])
, _clKind :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ChangeList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'clNewStartPageToken'
--
-- * 'clNextPageToken'
--
-- * 'clChanges'
--
-- * 'clKind'
changeList
:: ChangeList
changeList =
ChangeList'
{ _clNewStartPageToken = Nothing
, _clNextPageToken = Nothing
, _clChanges = Nothing
, _clKind = "drive#changeList"
}
-- | The starting page token for future changes. This will be present only if
-- the end of the current changes list has been reached.
clNewStartPageToken :: Lens' ChangeList (Maybe Text)
clNewStartPageToken
= lens _clNewStartPageToken
(\ s a -> s{_clNewStartPageToken = a})
-- | The page token for the next page of changes. This will be absent if the
-- end of the changes list has been reached. If the token is rejected for
-- any reason, it should be discarded, and pagination should be restarted
-- from the first page of results.
clNextPageToken :: Lens' ChangeList (Maybe Text)
clNextPageToken
= lens _clNextPageToken
(\ s a -> s{_clNextPageToken = a})
-- | The list of changes. If nextPageToken is populated, then this list may
-- be incomplete and an additional page of results should be fetched.
clChanges :: Lens' ChangeList [Change]
clChanges
= lens _clChanges (\ s a -> s{_clChanges = a}) .
_Default
. _Coerce
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#changeList\".
clKind :: Lens' ChangeList Text
clKind = lens _clKind (\ s a -> s{_clKind = a})
instance FromJSON ChangeList where
parseJSON
= withObject "ChangeList"
(\ o ->
ChangeList' <$>
(o .:? "newStartPageToken") <*>
(o .:? "nextPageToken")
<*> (o .:? "changes" .!= mempty)
<*> (o .:? "kind" .!= "drive#changeList"))
instance ToJSON ChangeList where
toJSON ChangeList'{..}
= object
(catMaybes
[("newStartPageToken" .=) <$> _clNewStartPageToken,
("nextPageToken" .=) <$> _clNextPageToken,
("changes" .=) <$> _clChanges,
Just ("kind" .= _clKind)])
-- | Additional information about the content of the file. These fields are
-- never populated in responses.
--
-- /See:/ 'fileContentHints' smart constructor.
data FileContentHints = FileContentHints'
{ _fchThumbnail :: !(Maybe FileContentHintsThumbnail)
, _fchIndexableText :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileContentHints' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fchThumbnail'
--
-- * 'fchIndexableText'
fileContentHints
:: FileContentHints
fileContentHints =
FileContentHints'
{ _fchThumbnail = Nothing
, _fchIndexableText = Nothing
}
-- | A thumbnail for the file. This will only be used if Drive cannot
-- generate a standard thumbnail.
fchThumbnail :: Lens' FileContentHints (Maybe FileContentHintsThumbnail)
fchThumbnail
= lens _fchThumbnail (\ s a -> s{_fchThumbnail = a})
-- | Text to be indexed for the file to improve fullText queries. This is
-- limited to 128KB in length and may contain HTML elements.
fchIndexableText :: Lens' FileContentHints (Maybe Text)
fchIndexableText
= lens _fchIndexableText
(\ s a -> s{_fchIndexableText = a})
instance FromJSON FileContentHints where
parseJSON
= withObject "FileContentHints"
(\ o ->
FileContentHints' <$>
(o .:? "thumbnail") <*> (o .:? "indexableText"))
instance ToJSON FileContentHints where
toJSON FileContentHints'{..}
= object
(catMaybes
[("thumbnail" .=) <$> _fchThumbnail,
("indexableText" .=) <$> _fchIndexableText])
-- | Additional parameters controlling delivery channel behavior. Optional.
--
-- /See:/ 'channelParams' smart constructor.
newtype ChannelParams = ChannelParams'
{ _cpAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ChannelParams' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cpAddtional'
channelParams
:: HashMap Text Text -- ^ 'cpAddtional'
-> ChannelParams
channelParams pCpAddtional_ =
ChannelParams'
{ _cpAddtional = _Coerce # pCpAddtional_
}
-- | Declares a new parameter by name.
cpAddtional :: Lens' ChannelParams (HashMap Text Text)
cpAddtional
= lens _cpAddtional (\ s a -> s{_cpAddtional = a}) .
_Coerce
instance FromJSON ChannelParams where
parseJSON
= withObject "ChannelParams"
(\ o -> ChannelParams' <$> (parseJSONObject o))
instance ToJSON ChannelParams where
toJSON = toJSON . _cpAddtional
-- | A collection of arbitrary key-value pairs which are visible to all apps.
-- Entries with null values are cleared in update and copy requests.
--
-- /See:/ 'fileProperties' smart constructor.
newtype FileProperties = FileProperties'
{ _fpAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileProperties' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fpAddtional'
fileProperties
:: HashMap Text Text -- ^ 'fpAddtional'
-> FileProperties
fileProperties pFpAddtional_ =
FileProperties'
{ _fpAddtional = _Coerce # pFpAddtional_
}
fpAddtional :: Lens' FileProperties (HashMap Text Text)
fpAddtional
= lens _fpAddtional (\ s a -> s{_fpAddtional = a}) .
_Coerce
instance FromJSON FileProperties where
parseJSON
= withObject "FileProperties"
(\ o -> FileProperties' <$> (parseJSONObject o))
instance ToJSON FileProperties where
toJSON = toJSON . _fpAddtional
-- | A map of maximum import sizes by MIME type, in bytes.
--
-- /See:/ 'aboutMaxImportSizes' smart constructor.
newtype AboutMaxImportSizes = AboutMaxImportSizes'
{ _amisAddtional :: HashMap Text (Textual Int64)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AboutMaxImportSizes' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'amisAddtional'
aboutMaxImportSizes
:: HashMap Text Int64 -- ^ 'amisAddtional'
-> AboutMaxImportSizes
aboutMaxImportSizes pAmisAddtional_ =
AboutMaxImportSizes'
{ _amisAddtional = _Coerce # pAmisAddtional_
}
amisAddtional :: Lens' AboutMaxImportSizes (HashMap Text Int64)
amisAddtional
= lens _amisAddtional
(\ s a -> s{_amisAddtional = a})
. _Coerce
instance FromJSON AboutMaxImportSizes where
parseJSON
= withObject "AboutMaxImportSizes"
(\ o -> AboutMaxImportSizes' <$> (parseJSONObject o))
instance ToJSON AboutMaxImportSizes where
toJSON = toJSON . _amisAddtional
-- | Information about the user, the user\'s Drive, and system capabilities.
--
-- /See:/ 'about' smart constructor.
data About = About'
{ _aExportFormats :: !(Maybe AboutExportFormats)
, _aMaxImportSizes :: !(Maybe AboutMaxImportSizes)
, _aImportFormats :: !(Maybe AboutImportFormats)
, _aKind :: !Text
, _aAppInstalled :: !(Maybe Bool)
, _aUser :: !(Maybe User)
, _aStorageQuota :: !(Maybe AboutStorageQuota)
, _aMaxUploadSize :: !(Maybe (Textual Int64))
, _aFolderColorPalette :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'About' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aExportFormats'
--
-- * 'aMaxImportSizes'
--
-- * 'aImportFormats'
--
-- * 'aKind'
--
-- * 'aAppInstalled'
--
-- * 'aUser'
--
-- * 'aStorageQuota'
--
-- * 'aMaxUploadSize'
--
-- * 'aFolderColorPalette'
about
:: About
about =
About'
{ _aExportFormats = Nothing
, _aMaxImportSizes = Nothing
, _aImportFormats = Nothing
, _aKind = "drive#about"
, _aAppInstalled = Nothing
, _aUser = Nothing
, _aStorageQuota = Nothing
, _aMaxUploadSize = Nothing
, _aFolderColorPalette = Nothing
}
-- | A map of source MIME type to possible targets for all supported exports.
aExportFormats :: Lens' About (Maybe AboutExportFormats)
aExportFormats
= lens _aExportFormats
(\ s a -> s{_aExportFormats = a})
-- | A map of maximum import sizes by MIME type, in bytes.
aMaxImportSizes :: Lens' About (Maybe AboutMaxImportSizes)
aMaxImportSizes
= lens _aMaxImportSizes
(\ s a -> s{_aMaxImportSizes = a})
-- | A map of source MIME type to possible targets for all supported imports.
aImportFormats :: Lens' About (Maybe AboutImportFormats)
aImportFormats
= lens _aImportFormats
(\ s a -> s{_aImportFormats = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#about\".
aKind :: Lens' About Text
aKind = lens _aKind (\ s a -> s{_aKind = a})
-- | Whether the user has installed the requesting app.
aAppInstalled :: Lens' About (Maybe Bool)
aAppInstalled
= lens _aAppInstalled
(\ s a -> s{_aAppInstalled = a})
-- | The authenticated user.
aUser :: Lens' About (Maybe User)
aUser = lens _aUser (\ s a -> s{_aUser = a})
-- | The user\'s storage quota limits and usage. All fields are measured in
-- bytes.
aStorageQuota :: Lens' About (Maybe AboutStorageQuota)
aStorageQuota
= lens _aStorageQuota
(\ s a -> s{_aStorageQuota = a})
-- | The maximum upload size in bytes.
aMaxUploadSize :: Lens' About (Maybe Int64)
aMaxUploadSize
= lens _aMaxUploadSize
(\ s a -> s{_aMaxUploadSize = a})
. mapping _Coerce
-- | The currently supported folder colors as RGB hex strings.
aFolderColorPalette :: Lens' About [Text]
aFolderColorPalette
= lens _aFolderColorPalette
(\ s a -> s{_aFolderColorPalette = a})
. _Default
. _Coerce
instance FromJSON About where
parseJSON
= withObject "About"
(\ o ->
About' <$>
(o .:? "exportFormats") <*> (o .:? "maxImportSizes")
<*> (o .:? "importFormats")
<*> (o .:? "kind" .!= "drive#about")
<*> (o .:? "appInstalled")
<*> (o .:? "user")
<*> (o .:? "storageQuota")
<*> (o .:? "maxUploadSize")
<*> (o .:? "folderColorPalette" .!= mempty))
instance ToJSON About where
toJSON About'{..}
= object
(catMaybes
[("exportFormats" .=) <$> _aExportFormats,
("maxImportSizes" .=) <$> _aMaxImportSizes,
("importFormats" .=) <$> _aImportFormats,
Just ("kind" .= _aKind),
("appInstalled" .=) <$> _aAppInstalled,
("user" .=) <$> _aUser,
("storageQuota" .=) <$> _aStorageQuota,
("maxUploadSize" .=) <$> _aMaxUploadSize,
("folderColorPalette" .=) <$> _aFolderColorPalette])
-- | Geographic location information stored in the image.
--
-- /See:/ 'fileImageMediaMetadataLocation' smart constructor.
data FileImageMediaMetadataLocation = FileImageMediaMetadataLocation'
{ _fimmlLatitude :: !(Maybe (Textual Double))
, _fimmlAltitude :: !(Maybe (Textual Double))
, _fimmlLongitude :: !(Maybe (Textual Double))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileImageMediaMetadataLocation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fimmlLatitude'
--
-- * 'fimmlAltitude'
--
-- * 'fimmlLongitude'
fileImageMediaMetadataLocation
:: FileImageMediaMetadataLocation
fileImageMediaMetadataLocation =
FileImageMediaMetadataLocation'
{ _fimmlLatitude = Nothing
, _fimmlAltitude = Nothing
, _fimmlLongitude = Nothing
}
-- | The latitude stored in the image.
fimmlLatitude :: Lens' FileImageMediaMetadataLocation (Maybe Double)
fimmlLatitude
= lens _fimmlLatitude
(\ s a -> s{_fimmlLatitude = a})
. mapping _Coerce
-- | The altitude stored in the image.
fimmlAltitude :: Lens' FileImageMediaMetadataLocation (Maybe Double)
fimmlAltitude
= lens _fimmlAltitude
(\ s a -> s{_fimmlAltitude = a})
. mapping _Coerce
-- | The longitude stored in the image.
fimmlLongitude :: Lens' FileImageMediaMetadataLocation (Maybe Double)
fimmlLongitude
= lens _fimmlLongitude
(\ s a -> s{_fimmlLongitude = a})
. mapping _Coerce
instance FromJSON FileImageMediaMetadataLocation
where
parseJSON
= withObject "FileImageMediaMetadataLocation"
(\ o ->
FileImageMediaMetadataLocation' <$>
(o .:? "latitude") <*> (o .:? "altitude") <*>
(o .:? "longitude"))
instance ToJSON FileImageMediaMetadataLocation where
toJSON FileImageMediaMetadataLocation'{..}
= object
(catMaybes
[("latitude" .=) <$> _fimmlLatitude,
("altitude" .=) <$> _fimmlAltitude,
("longitude" .=) <$> _fimmlLongitude])
--
-- /See:/ 'startPageToken' smart constructor.
data StartPageToken = StartPageToken'
{ _sptKind :: !Text
, _sptStartPageToken :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'StartPageToken' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sptKind'
--
-- * 'sptStartPageToken'
startPageToken
:: StartPageToken
startPageToken =
StartPageToken'
{ _sptKind = "drive#startPageToken"
, _sptStartPageToken = Nothing
}
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#startPageToken\".
sptKind :: Lens' StartPageToken Text
sptKind = lens _sptKind (\ s a -> s{_sptKind = a})
-- | The starting page token for listing changes.
sptStartPageToken :: Lens' StartPageToken (Maybe Text)
sptStartPageToken
= lens _sptStartPageToken
(\ s a -> s{_sptStartPageToken = a})
instance FromJSON StartPageToken where
parseJSON
= withObject "StartPageToken"
(\ o ->
StartPageToken' <$>
(o .:? "kind" .!= "drive#startPageToken") <*>
(o .:? "startPageToken"))
instance ToJSON StartPageToken where
toJSON StartPageToken'{..}
= object
(catMaybes
[Just ("kind" .= _sptKind),
("startPageToken" .=) <$> _sptStartPageToken])
-- | Additional metadata about image media, if available.
--
-- /See:/ 'fileImageMediaMetadata' smart constructor.
data FileImageMediaMetadata = FileImageMediaMetadata'
{ _fimmRotation :: !(Maybe (Textual Int32))
, _fimmHeight :: !(Maybe (Textual Int32))
, _fimmSubjectDistance :: !(Maybe (Textual Int32))
, _fimmMaxApertureValue :: !(Maybe (Textual Double))
, _fimmIsoSpeed :: !(Maybe (Textual Int32))
, _fimmTime :: !(Maybe Text)
, _fimmLocation :: !(Maybe FileImageMediaMetadataLocation)
, _fimmAperture :: !(Maybe (Textual Double))
, _fimmFocalLength :: !(Maybe (Textual Double))
, _fimmCameraMake :: !(Maybe Text)
, _fimmWidth :: !(Maybe (Textual Int32))
, _fimmExposureTime :: !(Maybe (Textual Double))
, _fimmCameraModel :: !(Maybe Text)
, _fimmWhiteBalance :: !(Maybe Text)
, _fimmLens :: !(Maybe Text)
, _fimmFlashUsed :: !(Maybe Bool)
, _fimmExposureBias :: !(Maybe (Textual Double))
, _fimmMeteringMode :: !(Maybe Text)
, _fimmExposureMode :: !(Maybe Text)
, _fimmSensor :: !(Maybe Text)
, _fimmColorSpace :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FileImageMediaMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fimmRotation'
--
-- * 'fimmHeight'
--
-- * 'fimmSubjectDistance'
--
-- * 'fimmMaxApertureValue'
--
-- * 'fimmIsoSpeed'
--
-- * 'fimmTime'
--
-- * 'fimmLocation'
--
-- * 'fimmAperture'
--
-- * 'fimmFocalLength'
--
-- * 'fimmCameraMake'
--
-- * 'fimmWidth'
--
-- * 'fimmExposureTime'
--
-- * 'fimmCameraModel'
--
-- * 'fimmWhiteBalance'
--
-- * 'fimmLens'
--
-- * 'fimmFlashUsed'
--
-- * 'fimmExposureBias'
--
-- * 'fimmMeteringMode'
--
-- * 'fimmExposureMode'
--
-- * 'fimmSensor'
--
-- * 'fimmColorSpace'
fileImageMediaMetadata
:: FileImageMediaMetadata
fileImageMediaMetadata =
FileImageMediaMetadata'
{ _fimmRotation = Nothing
, _fimmHeight = Nothing
, _fimmSubjectDistance = Nothing
, _fimmMaxApertureValue = Nothing
, _fimmIsoSpeed = Nothing
, _fimmTime = Nothing
, _fimmLocation = Nothing
, _fimmAperture = Nothing
, _fimmFocalLength = Nothing
, _fimmCameraMake = Nothing
, _fimmWidth = Nothing
, _fimmExposureTime = Nothing
, _fimmCameraModel = Nothing
, _fimmWhiteBalance = Nothing
, _fimmLens = Nothing
, _fimmFlashUsed = Nothing
, _fimmExposureBias = Nothing
, _fimmMeteringMode = Nothing
, _fimmExposureMode = Nothing
, _fimmSensor = Nothing
, _fimmColorSpace = Nothing
}
-- | The rotation in clockwise degrees from the image\'s original
-- orientation.
fimmRotation :: Lens' FileImageMediaMetadata (Maybe Int32)
fimmRotation
= lens _fimmRotation (\ s a -> s{_fimmRotation = a})
. mapping _Coerce
-- | The height of the image in pixels.
fimmHeight :: Lens' FileImageMediaMetadata (Maybe Int32)
fimmHeight
= lens _fimmHeight (\ s a -> s{_fimmHeight = a}) .
mapping _Coerce
-- | The distance to the subject of the photo, in meters.
fimmSubjectDistance :: Lens' FileImageMediaMetadata (Maybe Int32)
fimmSubjectDistance
= lens _fimmSubjectDistance
(\ s a -> s{_fimmSubjectDistance = a})
. mapping _Coerce
-- | The smallest f-number of the lens at the focal length used to create the
-- photo (APEX value).
fimmMaxApertureValue :: Lens' FileImageMediaMetadata (Maybe Double)
fimmMaxApertureValue
= lens _fimmMaxApertureValue
(\ s a -> s{_fimmMaxApertureValue = a})
. mapping _Coerce
-- | The ISO speed used to create the photo.
fimmIsoSpeed :: Lens' FileImageMediaMetadata (Maybe Int32)
fimmIsoSpeed
= lens _fimmIsoSpeed (\ s a -> s{_fimmIsoSpeed = a})
. mapping _Coerce
-- | The date and time the photo was taken (EXIF DateTime).
fimmTime :: Lens' FileImageMediaMetadata (Maybe Text)
fimmTime = lens _fimmTime (\ s a -> s{_fimmTime = a})
-- | Geographic location information stored in the image.
fimmLocation :: Lens' FileImageMediaMetadata (Maybe FileImageMediaMetadataLocation)
fimmLocation
= lens _fimmLocation (\ s a -> s{_fimmLocation = a})
-- | The aperture used to create the photo (f-number).
fimmAperture :: Lens' FileImageMediaMetadata (Maybe Double)
fimmAperture
= lens _fimmAperture (\ s a -> s{_fimmAperture = a})
. mapping _Coerce
-- | The focal length used to create the photo, in millimeters.
fimmFocalLength :: Lens' FileImageMediaMetadata (Maybe Double)
fimmFocalLength
= lens _fimmFocalLength
(\ s a -> s{_fimmFocalLength = a})
. mapping _Coerce
-- | The make of the camera used to create the photo.
fimmCameraMake :: Lens' FileImageMediaMetadata (Maybe Text)
fimmCameraMake
= lens _fimmCameraMake
(\ s a -> s{_fimmCameraMake = a})
-- | The width of the image in pixels.
fimmWidth :: Lens' FileImageMediaMetadata (Maybe Int32)
fimmWidth
= lens _fimmWidth (\ s a -> s{_fimmWidth = a}) .
mapping _Coerce
-- | The length of the exposure, in seconds.
fimmExposureTime :: Lens' FileImageMediaMetadata (Maybe Double)
fimmExposureTime
= lens _fimmExposureTime
(\ s a -> s{_fimmExposureTime = a})
. mapping _Coerce
-- | The model of the camera used to create the photo.
fimmCameraModel :: Lens' FileImageMediaMetadata (Maybe Text)
fimmCameraModel
= lens _fimmCameraModel
(\ s a -> s{_fimmCameraModel = a})
-- | The white balance mode used to create the photo.
fimmWhiteBalance :: Lens' FileImageMediaMetadata (Maybe Text)
fimmWhiteBalance
= lens _fimmWhiteBalance
(\ s a -> s{_fimmWhiteBalance = a})
-- | The lens used to create the photo.
fimmLens :: Lens' FileImageMediaMetadata (Maybe Text)
fimmLens = lens _fimmLens (\ s a -> s{_fimmLens = a})
-- | Whether a flash was used to create the photo.
fimmFlashUsed :: Lens' FileImageMediaMetadata (Maybe Bool)
fimmFlashUsed
= lens _fimmFlashUsed
(\ s a -> s{_fimmFlashUsed = a})
-- | The exposure bias of the photo (APEX value).
fimmExposureBias :: Lens' FileImageMediaMetadata (Maybe Double)
fimmExposureBias
= lens _fimmExposureBias
(\ s a -> s{_fimmExposureBias = a})
. mapping _Coerce
-- | The metering mode used to create the photo.
fimmMeteringMode :: Lens' FileImageMediaMetadata (Maybe Text)
fimmMeteringMode
= lens _fimmMeteringMode
(\ s a -> s{_fimmMeteringMode = a})
-- | The exposure mode used to create the photo.
fimmExposureMode :: Lens' FileImageMediaMetadata (Maybe Text)
fimmExposureMode
= lens _fimmExposureMode
(\ s a -> s{_fimmExposureMode = a})
-- | The type of sensor used to create the photo.
fimmSensor :: Lens' FileImageMediaMetadata (Maybe Text)
fimmSensor
= lens _fimmSensor (\ s a -> s{_fimmSensor = a})
-- | The color space of the photo.
fimmColorSpace :: Lens' FileImageMediaMetadata (Maybe Text)
fimmColorSpace
= lens _fimmColorSpace
(\ s a -> s{_fimmColorSpace = a})
instance FromJSON FileImageMediaMetadata where
parseJSON
= withObject "FileImageMediaMetadata"
(\ o ->
FileImageMediaMetadata' <$>
(o .:? "rotation") <*> (o .:? "height") <*>
(o .:? "subjectDistance")
<*> (o .:? "maxApertureValue")
<*> (o .:? "isoSpeed")
<*> (o .:? "time")
<*> (o .:? "location")
<*> (o .:? "aperture")
<*> (o .:? "focalLength")
<*> (o .:? "cameraMake")
<*> (o .:? "width")
<*> (o .:? "exposureTime")
<*> (o .:? "cameraModel")
<*> (o .:? "whiteBalance")
<*> (o .:? "lens")
<*> (o .:? "flashUsed")
<*> (o .:? "exposureBias")
<*> (o .:? "meteringMode")
<*> (o .:? "exposureMode")
<*> (o .:? "sensor")
<*> (o .:? "colorSpace"))
instance ToJSON FileImageMediaMetadata where
toJSON FileImageMediaMetadata'{..}
= object
(catMaybes
[("rotation" .=) <$> _fimmRotation,
("height" .=) <$> _fimmHeight,
("subjectDistance" .=) <$> _fimmSubjectDistance,
("maxApertureValue" .=) <$> _fimmMaxApertureValue,
("isoSpeed" .=) <$> _fimmIsoSpeed,
("time" .=) <$> _fimmTime,
("location" .=) <$> _fimmLocation,
("aperture" .=) <$> _fimmAperture,
("focalLength" .=) <$> _fimmFocalLength,
("cameraMake" .=) <$> _fimmCameraMake,
("width" .=) <$> _fimmWidth,
("exposureTime" .=) <$> _fimmExposureTime,
("cameraModel" .=) <$> _fimmCameraModel,
("whiteBalance" .=) <$> _fimmWhiteBalance,
("lens" .=) <$> _fimmLens,
("flashUsed" .=) <$> _fimmFlashUsed,
("exposureBias" .=) <$> _fimmExposureBias,
("meteringMode" .=) <$> _fimmMeteringMode,
("exposureMode" .=) <$> _fimmExposureMode,
("sensor" .=) <$> _fimmSensor,
("colorSpace" .=) <$> _fimmColorSpace])
-- | A comment on a file.
--
-- /See:/ 'comment' smart constructor.
data Comment = Comment'
{ _comHTMLContent :: !(Maybe Text)
, _comModifiedTime :: !(Maybe DateTime')
, _comCreatedTime :: !(Maybe DateTime')
, _comKind :: !Text
, _comResolved :: !(Maybe Bool)
, _comQuotedFileContent :: !(Maybe CommentQuotedFileContent)
, _comAnchor :: !(Maybe Text)
, _comContent :: !(Maybe Text)
, _comReplies :: !(Maybe [Reply])
, _comAuthor :: !(Maybe User)
, _comId :: !(Maybe Text)
, _comDeleted :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Comment' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'comHTMLContent'
--
-- * 'comModifiedTime'
--
-- * 'comCreatedTime'
--
-- * 'comKind'
--
-- * 'comResolved'
--
-- * 'comQuotedFileContent'
--
-- * 'comAnchor'
--
-- * 'comContent'
--
-- * 'comReplies'
--
-- * 'comAuthor'
--
-- * 'comId'
--
-- * 'comDeleted'
comment
:: Comment
comment =
Comment'
{ _comHTMLContent = Nothing
, _comModifiedTime = Nothing
, _comCreatedTime = Nothing
, _comKind = "drive#comment"
, _comResolved = Nothing
, _comQuotedFileContent = Nothing
, _comAnchor = Nothing
, _comContent = Nothing
, _comReplies = Nothing
, _comAuthor = Nothing
, _comId = Nothing
, _comDeleted = Nothing
}
-- | The content of the comment with HTML formatting.
comHTMLContent :: Lens' Comment (Maybe Text)
comHTMLContent
= lens _comHTMLContent
(\ s a -> s{_comHTMLContent = a})
-- | The last time the comment or any of its replies was modified (RFC 3339
-- date-time).
comModifiedTime :: Lens' Comment (Maybe UTCTime)
comModifiedTime
= lens _comModifiedTime
(\ s a -> s{_comModifiedTime = a})
. mapping _DateTime
-- | The time at which the comment was created (RFC 3339 date-time).
comCreatedTime :: Lens' Comment (Maybe UTCTime)
comCreatedTime
= lens _comCreatedTime
(\ s a -> s{_comCreatedTime = a})
. mapping _DateTime
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#comment\".
comKind :: Lens' Comment Text
comKind = lens _comKind (\ s a -> s{_comKind = a})
-- | Whether the comment has been resolved by one of its replies.
comResolved :: Lens' Comment (Maybe Bool)
comResolved
= lens _comResolved (\ s a -> s{_comResolved = a})
-- | The file content to which the comment refers, typically within the
-- anchor region. For a text file, for example, this would be the text at
-- the location of the comment.
comQuotedFileContent :: Lens' Comment (Maybe CommentQuotedFileContent)
comQuotedFileContent
= lens _comQuotedFileContent
(\ s a -> s{_comQuotedFileContent = a})
-- | A region of the document represented as a JSON string. See anchor
-- documentation for details on how to define and interpret anchor
-- properties.
comAnchor :: Lens' Comment (Maybe Text)
comAnchor
= lens _comAnchor (\ s a -> s{_comAnchor = a})
-- | The plain text content of the comment. This field is used for setting
-- the content, while htmlContent should be displayed.
comContent :: Lens' Comment (Maybe Text)
comContent
= lens _comContent (\ s a -> s{_comContent = a})
-- | The full list of replies to the comment in chronological order.
comReplies :: Lens' Comment [Reply]
comReplies
= lens _comReplies (\ s a -> s{_comReplies = a}) .
_Default
. _Coerce
-- | The user who created the comment.
comAuthor :: Lens' Comment (Maybe User)
comAuthor
= lens _comAuthor (\ s a -> s{_comAuthor = a})
-- | The ID of the comment.
comId :: Lens' Comment (Maybe Text)
comId = lens _comId (\ s a -> s{_comId = a})
-- | Whether the comment has been deleted. A deleted comment has no content.
comDeleted :: Lens' Comment (Maybe Bool)
comDeleted
= lens _comDeleted (\ s a -> s{_comDeleted = a})
instance FromJSON Comment where
parseJSON
= withObject "Comment"
(\ o ->
Comment' <$>
(o .:? "htmlContent") <*> (o .:? "modifiedTime") <*>
(o .:? "createdTime")
<*> (o .:? "kind" .!= "drive#comment")
<*> (o .:? "resolved")
<*> (o .:? "quotedFileContent")
<*> (o .:? "anchor")
<*> (o .:? "content")
<*> (o .:? "replies" .!= mempty)
<*> (o .:? "author")
<*> (o .:? "id")
<*> (o .:? "deleted"))
instance ToJSON Comment where
toJSON Comment'{..}
= object
(catMaybes
[("htmlContent" .=) <$> _comHTMLContent,
("modifiedTime" .=) <$> _comModifiedTime,
("createdTime" .=) <$> _comCreatedTime,
Just ("kind" .= _comKind),
("resolved" .=) <$> _comResolved,
("quotedFileContent" .=) <$> _comQuotedFileContent,
("anchor" .=) <$> _comAnchor,
("content" .=) <$> _comContent,
("replies" .=) <$> _comReplies,
("author" .=) <$> _comAuthor, ("id" .=) <$> _comId,
("deleted" .=) <$> _comDeleted])
-- | The metadata for a revision to a file.
--
-- /See:/ 'revision' smart constructor.
data Revision = Revision'
{ _revModifiedTime :: !(Maybe DateTime')
, _revSize :: !(Maybe (Textual Int64))
, _revOriginalFilename :: !(Maybe Text)
, _revKind :: !Text
, _revPublished :: !(Maybe Bool)
, _revLastModifyingUser :: !(Maybe User)
, _revPublishAuto :: !(Maybe Bool)
, _revMD5Checksum :: !(Maybe Text)
, _revKeepForever :: !(Maybe Bool)
, _revMimeType :: !(Maybe Text)
, _revPublishedOutsideDomain :: !(Maybe Bool)
, _revId :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Revision' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'revModifiedTime'
--
-- * 'revSize'
--
-- * 'revOriginalFilename'
--
-- * 'revKind'
--
-- * 'revPublished'
--
-- * 'revLastModifyingUser'
--
-- * 'revPublishAuto'
--
-- * 'revMD5Checksum'
--
-- * 'revKeepForever'
--
-- * 'revMimeType'
--
-- * 'revPublishedOutsideDomain'
--
-- * 'revId'
revision
:: Revision
revision =
Revision'
{ _revModifiedTime = Nothing
, _revSize = Nothing
, _revOriginalFilename = Nothing
, _revKind = "drive#revision"
, _revPublished = Nothing
, _revLastModifyingUser = Nothing
, _revPublishAuto = Nothing
, _revMD5Checksum = Nothing
, _revKeepForever = Nothing
, _revMimeType = Nothing
, _revPublishedOutsideDomain = Nothing
, _revId = Nothing
}
-- | The last time the revision was modified (RFC 3339 date-time).
revModifiedTime :: Lens' Revision (Maybe UTCTime)
revModifiedTime
= lens _revModifiedTime
(\ s a -> s{_revModifiedTime = a})
. mapping _DateTime
-- | The size of the revision\'s content in bytes. This is only applicable to
-- files with binary content in Drive.
revSize :: Lens' Revision (Maybe Int64)
revSize
= lens _revSize (\ s a -> s{_revSize = a}) .
mapping _Coerce
-- | The original filename used to create this revision. This is only
-- applicable to files with binary content in Drive.
revOriginalFilename :: Lens' Revision (Maybe Text)
revOriginalFilename
= lens _revOriginalFilename
(\ s a -> s{_revOriginalFilename = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#revision\".
revKind :: Lens' Revision Text
revKind = lens _revKind (\ s a -> s{_revKind = a})
-- | Whether this revision is published. This is only applicable to Google
-- Docs.
revPublished :: Lens' Revision (Maybe Bool)
revPublished
= lens _revPublished (\ s a -> s{_revPublished = a})
-- | The last user to modify this revision.
revLastModifyingUser :: Lens' Revision (Maybe User)
revLastModifyingUser
= lens _revLastModifyingUser
(\ s a -> s{_revLastModifyingUser = a})
-- | Whether subsequent revisions will be automatically republished. This is
-- only applicable to Google Docs.
revPublishAuto :: Lens' Revision (Maybe Bool)
revPublishAuto
= lens _revPublishAuto
(\ s a -> s{_revPublishAuto = a})
-- | The MD5 checksum of the revision\'s content. This is only applicable to
-- files with binary content in Drive.
revMD5Checksum :: Lens' Revision (Maybe Text)
revMD5Checksum
= lens _revMD5Checksum
(\ s a -> s{_revMD5Checksum = a})
-- | Whether to keep this revision forever, even if it is no longer the head
-- revision. If not set, the revision will be automatically purged 30 days
-- after newer content is uploaded. This can be set on a maximum of 200
-- revisions for a file. This field is only applicable to files with binary
-- content in Drive.
revKeepForever :: Lens' Revision (Maybe Bool)
revKeepForever
= lens _revKeepForever
(\ s a -> s{_revKeepForever = a})
-- | The MIME type of the revision.
revMimeType :: Lens' Revision (Maybe Text)
revMimeType
= lens _revMimeType (\ s a -> s{_revMimeType = a})
-- | Whether this revision is published outside the domain. This is only
-- applicable to Google Docs.
revPublishedOutsideDomain :: Lens' Revision (Maybe Bool)
revPublishedOutsideDomain
= lens _revPublishedOutsideDomain
(\ s a -> s{_revPublishedOutsideDomain = a})
-- | The ID of the revision.
revId :: Lens' Revision (Maybe Text)
revId = lens _revId (\ s a -> s{_revId = a})
instance FromJSON Revision where
parseJSON
= withObject "Revision"
(\ o ->
Revision' <$>
(o .:? "modifiedTime") <*> (o .:? "size") <*>
(o .:? "originalFilename")
<*> (o .:? "kind" .!= "drive#revision")
<*> (o .:? "published")
<*> (o .:? "lastModifyingUser")
<*> (o .:? "publishAuto")
<*> (o .:? "md5Checksum")
<*> (o .:? "keepForever")
<*> (o .:? "mimeType")
<*> (o .:? "publishedOutsideDomain")
<*> (o .:? "id"))
instance ToJSON Revision where
toJSON Revision'{..}
= object
(catMaybes
[("modifiedTime" .=) <$> _revModifiedTime,
("size" .=) <$> _revSize,
("originalFilename" .=) <$> _revOriginalFilename,
Just ("kind" .= _revKind),
("published" .=) <$> _revPublished,
("lastModifyingUser" .=) <$> _revLastModifyingUser,
("publishAuto" .=) <$> _revPublishAuto,
("md5Checksum" .=) <$> _revMD5Checksum,
("keepForever" .=) <$> _revKeepForever,
("mimeType" .=) <$> _revMimeType,
("publishedOutsideDomain" .=) <$>
_revPublishedOutsideDomain,
("id" .=) <$> _revId])
-- | A permission for a file. A permission grants a user, group, domain or
-- the world access to a file or a folder hierarchy.
--
-- /See:/ 'permission' smart constructor.
data Permission = Permission'
{ _pPhotoLink :: !(Maybe Text)
, _pKind :: !Text
, _pDomain :: !(Maybe Text)
, _pRole :: !(Maybe Text)
, _pEmailAddress :: !(Maybe Text)
, _pAllowFileDiscovery :: !(Maybe Bool)
, _pDisplayName :: !(Maybe Text)
, _pId :: !(Maybe Text)
, _pType :: !(Maybe Text)
, _pExpirationTime :: !(Maybe DateTime')
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Permission' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pPhotoLink'
--
-- * 'pKind'
--
-- * 'pDomain'
--
-- * 'pRole'
--
-- * 'pEmailAddress'
--
-- * 'pAllowFileDiscovery'
--
-- * 'pDisplayName'
--
-- * 'pId'
--
-- * 'pType'
--
-- * 'pExpirationTime'
permission
:: Permission
permission =
Permission'
{ _pPhotoLink = Nothing
, _pKind = "drive#permission"
, _pDomain = Nothing
, _pRole = Nothing
, _pEmailAddress = Nothing
, _pAllowFileDiscovery = Nothing
, _pDisplayName = Nothing
, _pId = Nothing
, _pType = Nothing
, _pExpirationTime = Nothing
}
-- | A link to the user\'s profile photo, if available.
pPhotoLink :: Lens' Permission (Maybe Text)
pPhotoLink
= lens _pPhotoLink (\ s a -> s{_pPhotoLink = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#permission\".
pKind :: Lens' Permission Text
pKind = lens _pKind (\ s a -> s{_pKind = a})
-- | The domain to which this permission refers.
pDomain :: Lens' Permission (Maybe Text)
pDomain = lens _pDomain (\ s a -> s{_pDomain = a})
-- | The role granted by this permission. Valid values are: - owner - writer
-- - commenter - reader
pRole :: Lens' Permission (Maybe Text)
pRole = lens _pRole (\ s a -> s{_pRole = a})
-- | The email address of the user or group to which this permission refers.
pEmailAddress :: Lens' Permission (Maybe Text)
pEmailAddress
= lens _pEmailAddress
(\ s a -> s{_pEmailAddress = a})
-- | Whether the permission allows the file to be discovered through search.
-- This is only applicable for permissions of type domain or anyone.
pAllowFileDiscovery :: Lens' Permission (Maybe Bool)
pAllowFileDiscovery
= lens _pAllowFileDiscovery
(\ s a -> s{_pAllowFileDiscovery = a})
-- | A displayable name for users, groups or domains.
pDisplayName :: Lens' Permission (Maybe Text)
pDisplayName
= lens _pDisplayName (\ s a -> s{_pDisplayName = a})
-- | The ID of this permission. This is a unique identifier for the grantee,
-- and is published in User resources as permissionId.
pId :: Lens' Permission (Maybe Text)
pId = lens _pId (\ s a -> s{_pId = a})
-- | The type of the grantee. Valid values are: - user - group - domain -
-- anyone
pType :: Lens' Permission (Maybe Text)
pType = lens _pType (\ s a -> s{_pType = a})
-- | The time at which this permission will expire (RFC 3339 date-time).
pExpirationTime :: Lens' Permission (Maybe UTCTime)
pExpirationTime
= lens _pExpirationTime
(\ s a -> s{_pExpirationTime = a})
. mapping _DateTime
instance FromJSON Permission where
parseJSON
= withObject "Permission"
(\ o ->
Permission' <$>
(o .:? "photoLink") <*>
(o .:? "kind" .!= "drive#permission")
<*> (o .:? "domain")
<*> (o .:? "role")
<*> (o .:? "emailAddress")
<*> (o .:? "allowFileDiscovery")
<*> (o .:? "displayName")
<*> (o .:? "id")
<*> (o .:? "type")
<*> (o .:? "expirationTime"))
instance ToJSON Permission where
toJSON Permission'{..}
= object
(catMaybes
[("photoLink" .=) <$> _pPhotoLink,
Just ("kind" .= _pKind), ("domain" .=) <$> _pDomain,
("role" .=) <$> _pRole,
("emailAddress" .=) <$> _pEmailAddress,
("allowFileDiscovery" .=) <$> _pAllowFileDiscovery,
("displayName" .=) <$> _pDisplayName,
("id" .=) <$> _pId, ("type" .=) <$> _pType,
("expirationTime" .=) <$> _pExpirationTime])
-- | The metadata for a file.
--
-- /See:/ 'file' smart constructor.
data File = File'
{ _fOwnedByMe :: !(Maybe Bool)
, _fThumbnailLink :: !(Maybe Text)
, _fFullFileExtension :: !(Maybe Text)
, _fModifiedTime :: !(Maybe DateTime')
, _fModifiedByMeTime :: !(Maybe DateTime')
, _fFileExtension :: !(Maybe Text)
, _fViewedByMe :: !(Maybe Bool)
, _fOwners :: !(Maybe [User])
, _fViewedByMeTime :: !(Maybe DateTime')
, _fModifiedByMe :: !(Maybe Bool)
, _fSize :: !(Maybe (Textual Int64))
, _fTrashed :: !(Maybe Bool)
, _fWebViewLink :: !(Maybe Text)
, _fCreatedTime :: !(Maybe DateTime')
, _fOriginalFilename :: !(Maybe Text)
, _fKind :: !Text
, _fLastModifyingUser :: !(Maybe User)
, _fIconLink :: !(Maybe Text)
, _fHasThumbnail :: !(Maybe Bool)
, _fThumbnailVersion :: !(Maybe (Textual Int64))
, _fImageMediaMetadata :: !(Maybe FileImageMediaMetadata)
, _fExplicitlyTrashed :: !(Maybe Bool)
, _fShared :: !(Maybe Bool)
, _fMD5Checksum :: !(Maybe Text)
, _fFolderColorRgb :: !(Maybe Text)
, _fMimeType :: !(Maybe Text)
, _fIsAppAuthorized :: !(Maybe Bool)
, _fName :: !(Maybe Text)
, _fParents :: !(Maybe [Text])
, _fStarred :: !(Maybe Bool)
, _fSpaces :: !(Maybe [Text])
, _fVersion :: !(Maybe (Textual Int64))
, _fWritersCanShare :: !(Maybe Bool)
, _fId :: !(Maybe Text)
, _fPermissions :: !(Maybe [Permission])
, _fQuotaBytesUsed :: !(Maybe (Textual Int64))
, _fAppProperties :: !(Maybe FileAppProperties)
, _fVideoMediaMetadata :: !(Maybe FileVideoMediaMetadata)
, _fSharedWithMeTime :: !(Maybe DateTime')
, _fHeadRevisionId :: !(Maybe Text)
, _fCapabilities :: !(Maybe FileCapabilities)
, _fDescription :: !(Maybe Text)
, _fViewersCanCopyContent :: !(Maybe Bool)
, _fSharingUser :: !(Maybe User)
, _fWebContentLink :: !(Maybe Text)
, _fContentHints :: !(Maybe FileContentHints)
, _fProperties :: !(Maybe FileProperties)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'File' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fOwnedByMe'
--
-- * 'fThumbnailLink'
--
-- * 'fFullFileExtension'
--
-- * 'fModifiedTime'
--
-- * 'fModifiedByMeTime'
--
-- * 'fFileExtension'
--
-- * 'fViewedByMe'
--
-- * 'fOwners'
--
-- * 'fViewedByMeTime'
--
-- * 'fModifiedByMe'
--
-- * 'fSize'
--
-- * 'fTrashed'
--
-- * 'fWebViewLink'
--
-- * 'fCreatedTime'
--
-- * 'fOriginalFilename'
--
-- * 'fKind'
--
-- * 'fLastModifyingUser'
--
-- * 'fIconLink'
--
-- * 'fHasThumbnail'
--
-- * 'fThumbnailVersion'
--
-- * 'fImageMediaMetadata'
--
-- * 'fExplicitlyTrashed'
--
-- * 'fShared'
--
-- * 'fMD5Checksum'
--
-- * 'fFolderColorRgb'
--
-- * 'fMimeType'
--
-- * 'fIsAppAuthorized'
--
-- * 'fName'
--
-- * 'fParents'
--
-- * 'fStarred'
--
-- * 'fSpaces'
--
-- * 'fVersion'
--
-- * 'fWritersCanShare'
--
-- * 'fId'
--
-- * 'fPermissions'
--
-- * 'fQuotaBytesUsed'
--
-- * 'fAppProperties'
--
-- * 'fVideoMediaMetadata'
--
-- * 'fSharedWithMeTime'
--
-- * 'fHeadRevisionId'
--
-- * 'fCapabilities'
--
-- * 'fDescription'
--
-- * 'fViewersCanCopyContent'
--
-- * 'fSharingUser'
--
-- * 'fWebContentLink'
--
-- * 'fContentHints'
--
-- * 'fProperties'
file
:: File
file =
File'
{ _fOwnedByMe = Nothing
, _fThumbnailLink = Nothing
, _fFullFileExtension = Nothing
, _fModifiedTime = Nothing
, _fModifiedByMeTime = Nothing
, _fFileExtension = Nothing
, _fViewedByMe = Nothing
, _fOwners = Nothing
, _fViewedByMeTime = Nothing
, _fModifiedByMe = Nothing
, _fSize = Nothing
, _fTrashed = Nothing
, _fWebViewLink = Nothing
, _fCreatedTime = Nothing
, _fOriginalFilename = Nothing
, _fKind = "drive#file"
, _fLastModifyingUser = Nothing
, _fIconLink = Nothing
, _fHasThumbnail = Nothing
, _fThumbnailVersion = Nothing
, _fImageMediaMetadata = Nothing
, _fExplicitlyTrashed = Nothing
, _fShared = Nothing
, _fMD5Checksum = Nothing
, _fFolderColorRgb = Nothing
, _fMimeType = Nothing
, _fIsAppAuthorized = Nothing
, _fName = Nothing
, _fParents = Nothing
, _fStarred = Nothing
, _fSpaces = Nothing
, _fVersion = Nothing
, _fWritersCanShare = Nothing
, _fId = Nothing
, _fPermissions = Nothing
, _fQuotaBytesUsed = Nothing
, _fAppProperties = Nothing
, _fVideoMediaMetadata = Nothing
, _fSharedWithMeTime = Nothing
, _fHeadRevisionId = Nothing
, _fCapabilities = Nothing
, _fDescription = Nothing
, _fViewersCanCopyContent = Nothing
, _fSharingUser = Nothing
, _fWebContentLink = Nothing
, _fContentHints = Nothing
, _fProperties = Nothing
}
-- | Whether the user owns the file.
fOwnedByMe :: Lens' File (Maybe Bool)
fOwnedByMe
= lens _fOwnedByMe (\ s a -> s{_fOwnedByMe = a})
-- | A short-lived link to the file\'s thumbnail, if available. Typically
-- lasts on the order of hours. Only populated when the requesting app can
-- access the file\'s content.
fThumbnailLink :: Lens' File (Maybe Text)
fThumbnailLink
= lens _fThumbnailLink
(\ s a -> s{_fThumbnailLink = a})
-- | The full file extension extracted from the name field. May contain
-- multiple concatenated extensions, such as \"tar.gz\". This is only
-- available for files with binary content in Drive. This is automatically
-- updated when the name field changes, however it is not cleared if the
-- new name does not contain a valid extension.
fFullFileExtension :: Lens' File (Maybe Text)
fFullFileExtension
= lens _fFullFileExtension
(\ s a -> s{_fFullFileExtension = a})
-- | The last time the file was modified by anyone (RFC 3339 date-time). Note
-- that setting modifiedTime will also update modifiedByMeTime for the
-- user.
fModifiedTime :: Lens' File (Maybe UTCTime)
fModifiedTime
= lens _fModifiedTime
(\ s a -> s{_fModifiedTime = a})
. mapping _DateTime
-- | The last time the file was modified by the user (RFC 3339 date-time).
fModifiedByMeTime :: Lens' File (Maybe UTCTime)
fModifiedByMeTime
= lens _fModifiedByMeTime
(\ s a -> s{_fModifiedByMeTime = a})
. mapping _DateTime
-- | The final component of fullFileExtension. This is only available for
-- files with binary content in Drive.
fFileExtension :: Lens' File (Maybe Text)
fFileExtension
= lens _fFileExtension
(\ s a -> s{_fFileExtension = a})
-- | Whether the file has been viewed by this user.
fViewedByMe :: Lens' File (Maybe Bool)
fViewedByMe
= lens _fViewedByMe (\ s a -> s{_fViewedByMe = a})
-- | The owners of the file. Currently, only certain legacy files may have
-- more than one owner.
fOwners :: Lens' File [User]
fOwners
= lens _fOwners (\ s a -> s{_fOwners = a}) . _Default
. _Coerce
-- | The last time the file was viewed by the user (RFC 3339 date-time).
fViewedByMeTime :: Lens' File (Maybe UTCTime)
fViewedByMeTime
= lens _fViewedByMeTime
(\ s a -> s{_fViewedByMeTime = a})
. mapping _DateTime
-- | Whether the file has been modified by this user.
fModifiedByMe :: Lens' File (Maybe Bool)
fModifiedByMe
= lens _fModifiedByMe
(\ s a -> s{_fModifiedByMe = a})
-- | The size of the file\'s content in bytes. This is only applicable to
-- files with binary content in Drive.
fSize :: Lens' File (Maybe Int64)
fSize
= lens _fSize (\ s a -> s{_fSize = a}) .
mapping _Coerce
-- | Whether the file has been trashed, either explicitly or from a trashed
-- parent folder. Only the owner may trash a file, and other users cannot
-- see files in the owner\'s trash.
fTrashed :: Lens' File (Maybe Bool)
fTrashed = lens _fTrashed (\ s a -> s{_fTrashed = a})
-- | A link for opening the file in a relevant Google editor or viewer in a
-- browser.
fWebViewLink :: Lens' File (Maybe Text)
fWebViewLink
= lens _fWebViewLink (\ s a -> s{_fWebViewLink = a})
-- | The time at which the file was created (RFC 3339 date-time).
fCreatedTime :: Lens' File (Maybe UTCTime)
fCreatedTime
= lens _fCreatedTime (\ s a -> s{_fCreatedTime = a})
. mapping _DateTime
-- | The original filename of the uploaded content if available, or else the
-- original value of the name field. This is only available for files with
-- binary content in Drive.
fOriginalFilename :: Lens' File (Maybe Text)
fOriginalFilename
= lens _fOriginalFilename
(\ s a -> s{_fOriginalFilename = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#file\".
fKind :: Lens' File Text
fKind = lens _fKind (\ s a -> s{_fKind = a})
-- | The last user to modify the file.
fLastModifyingUser :: Lens' File (Maybe User)
fLastModifyingUser
= lens _fLastModifyingUser
(\ s a -> s{_fLastModifyingUser = a})
-- | A static, unauthenticated link to the file\'s icon.
fIconLink :: Lens' File (Maybe Text)
fIconLink
= lens _fIconLink (\ s a -> s{_fIconLink = a})
-- | Whether this file has a thumbnail.
fHasThumbnail :: Lens' File (Maybe Bool)
fHasThumbnail
= lens _fHasThumbnail
(\ s a -> s{_fHasThumbnail = a})
-- | The thumbnail version for use in thumbnail cache invalidation.
fThumbnailVersion :: Lens' File (Maybe Int64)
fThumbnailVersion
= lens _fThumbnailVersion
(\ s a -> s{_fThumbnailVersion = a})
. mapping _Coerce
-- | Additional metadata about image media, if available.
fImageMediaMetadata :: Lens' File (Maybe FileImageMediaMetadata)
fImageMediaMetadata
= lens _fImageMediaMetadata
(\ s a -> s{_fImageMediaMetadata = a})
-- | Whether the file has been explicitly trashed, as opposed to recursively
-- trashed from a parent folder.
fExplicitlyTrashed :: Lens' File (Maybe Bool)
fExplicitlyTrashed
= lens _fExplicitlyTrashed
(\ s a -> s{_fExplicitlyTrashed = a})
-- | Whether the file has been shared.
fShared :: Lens' File (Maybe Bool)
fShared = lens _fShared (\ s a -> s{_fShared = a})
-- | The MD5 checksum for the content of the file. This is only applicable to
-- files with binary content in Drive.
fMD5Checksum :: Lens' File (Maybe Text)
fMD5Checksum
= lens _fMD5Checksum (\ s a -> s{_fMD5Checksum = a})
-- | The color for a folder as an RGB hex string. The supported colors are
-- published in the folderColorPalette field of the About resource. If an
-- unsupported color is specified, the closest color in the palette will be
-- used instead.
fFolderColorRgb :: Lens' File (Maybe Text)
fFolderColorRgb
= lens _fFolderColorRgb
(\ s a -> s{_fFolderColorRgb = a})
-- | The MIME type of the file. Drive will attempt to automatically detect an
-- appropriate value from uploaded content if no value is provided. The
-- value cannot be changed unless a new revision is uploaded. If a file is
-- created with a Google Doc MIME type, the uploaded content will be
-- imported if possible. The supported import formats are published in the
-- About resource.
fMimeType :: Lens' File (Maybe Text)
fMimeType
= lens _fMimeType (\ s a -> s{_fMimeType = a})
-- | Whether the file was created or opened by the requesting app.
fIsAppAuthorized :: Lens' File (Maybe Bool)
fIsAppAuthorized
= lens _fIsAppAuthorized
(\ s a -> s{_fIsAppAuthorized = a})
-- | The name of the file. This is not necessarily unique within a folder.
fName :: Lens' File (Maybe Text)
fName = lens _fName (\ s a -> s{_fName = a})
-- | The IDs of the parent folders which contain the file. If not specified
-- as part of a create request, the file will be placed directly in the My
-- Drive folder. Update requests must use the addParents and removeParents
-- parameters to modify the values.
fParents :: Lens' File [Text]
fParents
= lens _fParents (\ s a -> s{_fParents = a}) .
_Default
. _Coerce
-- | Whether the user has starred the file.
fStarred :: Lens' File (Maybe Bool)
fStarred = lens _fStarred (\ s a -> s{_fStarred = a})
-- | The list of spaces which contain the file. The currently supported
-- values are \'drive\', \'appDataFolder\' and \'photos\'.
fSpaces :: Lens' File [Text]
fSpaces
= lens _fSpaces (\ s a -> s{_fSpaces = a}) . _Default
. _Coerce
-- | A monotonically increasing version number for the file. This reflects
-- every change made to the file on the server, even those not visible to
-- the user.
fVersion :: Lens' File (Maybe Int64)
fVersion
= lens _fVersion (\ s a -> s{_fVersion = a}) .
mapping _Coerce
-- | Whether users with only writer permission can modify the file\'s
-- permissions.
fWritersCanShare :: Lens' File (Maybe Bool)
fWritersCanShare
= lens _fWritersCanShare
(\ s a -> s{_fWritersCanShare = a})
-- | The ID of the file.
fId :: Lens' File (Maybe Text)
fId = lens _fId (\ s a -> s{_fId = a})
-- | The full list of permissions for the file. This is only available if the
-- requesting user can share the file.
fPermissions :: Lens' File [Permission]
fPermissions
= lens _fPermissions (\ s a -> s{_fPermissions = a})
. _Default
. _Coerce
-- | The number of storage quota bytes used by the file. This includes the
-- head revision as well as previous revisions with keepForever enabled.
fQuotaBytesUsed :: Lens' File (Maybe Int64)
fQuotaBytesUsed
= lens _fQuotaBytesUsed
(\ s a -> s{_fQuotaBytesUsed = a})
. mapping _Coerce
-- | A collection of arbitrary key-value pairs which are private to the
-- requesting app. Entries with null values are cleared in update and copy
-- requests.
fAppProperties :: Lens' File (Maybe FileAppProperties)
fAppProperties
= lens _fAppProperties
(\ s a -> s{_fAppProperties = a})
-- | Additional metadata about video media. This may not be available
-- immediately upon upload.
fVideoMediaMetadata :: Lens' File (Maybe FileVideoMediaMetadata)
fVideoMediaMetadata
= lens _fVideoMediaMetadata
(\ s a -> s{_fVideoMediaMetadata = a})
-- | The time at which the file was shared with the user, if applicable (RFC
-- 3339 date-time).
fSharedWithMeTime :: Lens' File (Maybe UTCTime)
fSharedWithMeTime
= lens _fSharedWithMeTime
(\ s a -> s{_fSharedWithMeTime = a})
. mapping _DateTime
-- | The ID of the file\'s head revision. This is currently only available
-- for files with binary content in Drive.
fHeadRevisionId :: Lens' File (Maybe Text)
fHeadRevisionId
= lens _fHeadRevisionId
(\ s a -> s{_fHeadRevisionId = a})
-- | Capabilities the current user has on the file.
fCapabilities :: Lens' File (Maybe FileCapabilities)
fCapabilities
= lens _fCapabilities
(\ s a -> s{_fCapabilities = a})
-- | A short description of the file.
fDescription :: Lens' File (Maybe Text)
fDescription
= lens _fDescription (\ s a -> s{_fDescription = a})
-- | Whether users with only reader or commenter permission can copy the
-- file\'s content. This affects copy, download, and print operations.
fViewersCanCopyContent :: Lens' File (Maybe Bool)
fViewersCanCopyContent
= lens _fViewersCanCopyContent
(\ s a -> s{_fViewersCanCopyContent = a})
-- | The user who shared the file with the requesting user, if applicable.
fSharingUser :: Lens' File (Maybe User)
fSharingUser
= lens _fSharingUser (\ s a -> s{_fSharingUser = a})
-- | A link for downloading the content of the file in a browser. This is
-- only available for files with binary content in Drive.
fWebContentLink :: Lens' File (Maybe Text)
fWebContentLink
= lens _fWebContentLink
(\ s a -> s{_fWebContentLink = a})
-- | Additional information about the content of the file. These fields are
-- never populated in responses.
fContentHints :: Lens' File (Maybe FileContentHints)
fContentHints
= lens _fContentHints
(\ s a -> s{_fContentHints = a})
-- | A collection of arbitrary key-value pairs which are visible to all apps.
-- Entries with null values are cleared in update and copy requests.
fProperties :: Lens' File (Maybe FileProperties)
fProperties
= lens _fProperties (\ s a -> s{_fProperties = a})
instance FromJSON File where
parseJSON
= withObject "File"
(\ o ->
File' <$>
(o .:? "ownedByMe") <*> (o .:? "thumbnailLink") <*>
(o .:? "fullFileExtension")
<*> (o .:? "modifiedTime")
<*> (o .:? "modifiedByMeTime")
<*> (o .:? "fileExtension")
<*> (o .:? "viewedByMe")
<*> (o .:? "owners" .!= mempty)
<*> (o .:? "viewedByMeTime")
<*> (o .:? "modifiedByMe")
<*> (o .:? "size")
<*> (o .:? "trashed")
<*> (o .:? "webViewLink")
<*> (o .:? "createdTime")
<*> (o .:? "originalFilename")
<*> (o .:? "kind" .!= "drive#file")
<*> (o .:? "lastModifyingUser")
<*> (o .:? "iconLink")
<*> (o .:? "hasThumbnail")
<*> (o .:? "thumbnailVersion")
<*> (o .:? "imageMediaMetadata")
<*> (o .:? "explicitlyTrashed")
<*> (o .:? "shared")
<*> (o .:? "md5Checksum")
<*> (o .:? "folderColorRgb")
<*> (o .:? "mimeType")
<*> (o .:? "isAppAuthorized")
<*> (o .:? "name")
<*> (o .:? "parents" .!= mempty)
<*> (o .:? "starred")
<*> (o .:? "spaces" .!= mempty)
<*> (o .:? "version")
<*> (o .:? "writersCanShare")
<*> (o .:? "id")
<*> (o .:? "permissions" .!= mempty)
<*> (o .:? "quotaBytesUsed")
<*> (o .:? "appProperties")
<*> (o .:? "videoMediaMetadata")
<*> (o .:? "sharedWithMeTime")
<*> (o .:? "headRevisionId")
<*> (o .:? "capabilities")
<*> (o .:? "description")
<*> (o .:? "viewersCanCopyContent")
<*> (o .:? "sharingUser")
<*> (o .:? "webContentLink")
<*> (o .:? "contentHints")
<*> (o .:? "properties"))
instance ToJSON File where
toJSON File'{..}
= object
(catMaybes
[("ownedByMe" .=) <$> _fOwnedByMe,
("thumbnailLink" .=) <$> _fThumbnailLink,
("fullFileExtension" .=) <$> _fFullFileExtension,
("modifiedTime" .=) <$> _fModifiedTime,
("modifiedByMeTime" .=) <$> _fModifiedByMeTime,
("fileExtension" .=) <$> _fFileExtension,
("viewedByMe" .=) <$> _fViewedByMe,
("owners" .=) <$> _fOwners,
("viewedByMeTime" .=) <$> _fViewedByMeTime,
("modifiedByMe" .=) <$> _fModifiedByMe,
("size" .=) <$> _fSize, ("trashed" .=) <$> _fTrashed,
("webViewLink" .=) <$> _fWebViewLink,
("createdTime" .=) <$> _fCreatedTime,
("originalFilename" .=) <$> _fOriginalFilename,
Just ("kind" .= _fKind),
("lastModifyingUser" .=) <$> _fLastModifyingUser,
("iconLink" .=) <$> _fIconLink,
("hasThumbnail" .=) <$> _fHasThumbnail,
("thumbnailVersion" .=) <$> _fThumbnailVersion,
("imageMediaMetadata" .=) <$> _fImageMediaMetadata,
("explicitlyTrashed" .=) <$> _fExplicitlyTrashed,
("shared" .=) <$> _fShared,
("md5Checksum" .=) <$> _fMD5Checksum,
("folderColorRgb" .=) <$> _fFolderColorRgb,
("mimeType" .=) <$> _fMimeType,
("isAppAuthorized" .=) <$> _fIsAppAuthorized,
("name" .=) <$> _fName, ("parents" .=) <$> _fParents,
("starred" .=) <$> _fStarred,
("spaces" .=) <$> _fSpaces,
("version" .=) <$> _fVersion,
("writersCanShare" .=) <$> _fWritersCanShare,
("id" .=) <$> _fId,
("permissions" .=) <$> _fPermissions,
("quotaBytesUsed" .=) <$> _fQuotaBytesUsed,
("appProperties" .=) <$> _fAppProperties,
("videoMediaMetadata" .=) <$> _fVideoMediaMetadata,
("sharedWithMeTime" .=) <$> _fSharedWithMeTime,
("headRevisionId" .=) <$> _fHeadRevisionId,
("capabilities" .=) <$> _fCapabilities,
("description" .=) <$> _fDescription,
("viewersCanCopyContent" .=) <$>
_fViewersCanCopyContent,
("sharingUser" .=) <$> _fSharingUser,
("webContentLink" .=) <$> _fWebContentLink,
("contentHints" .=) <$> _fContentHints,
("properties" .=) <$> _fProperties])
-- | A list of generated file IDs which can be provided in create requests.
--
-- /See:/ 'generatedIds' smart constructor.
data GeneratedIds = GeneratedIds'
{ _giSpace :: !(Maybe Text)
, _giKind :: !Text
, _giIds :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GeneratedIds' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'giSpace'
--
-- * 'giKind'
--
-- * 'giIds'
generatedIds
:: GeneratedIds
generatedIds =
GeneratedIds'
{ _giSpace = Nothing
, _giKind = "drive#generatedIds"
, _giIds = Nothing
}
-- | The type of file that can be created with these IDs.
giSpace :: Lens' GeneratedIds (Maybe Text)
giSpace = lens _giSpace (\ s a -> s{_giSpace = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#generatedIds\".
giKind :: Lens' GeneratedIds Text
giKind = lens _giKind (\ s a -> s{_giKind = a})
-- | The IDs generated for the requesting user in the specified space.
giIds :: Lens' GeneratedIds [Text]
giIds
= lens _giIds (\ s a -> s{_giIds = a}) . _Default .
_Coerce
instance FromJSON GeneratedIds where
parseJSON
= withObject "GeneratedIds"
(\ o ->
GeneratedIds' <$>
(o .:? "space") <*>
(o .:? "kind" .!= "drive#generatedIds")
<*> (o .:? "ids" .!= mempty))
instance ToJSON GeneratedIds where
toJSON GeneratedIds'{..}
= object
(catMaybes
[("space" .=) <$> _giSpace, Just ("kind" .= _giKind),
("ids" .=) <$> _giIds])
-- | A list of comments on a file.
--
-- /See:/ 'commentList' smart constructor.
data CommentList = CommentList'
{ _cllNextPageToken :: !(Maybe Text)
, _cllKind :: !Text
, _cllComments :: !(Maybe [Comment])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cllNextPageToken'
--
-- * 'cllKind'
--
-- * 'cllComments'
commentList
:: CommentList
commentList =
CommentList'
{ _cllNextPageToken = Nothing
, _cllKind = "drive#commentList"
, _cllComments = Nothing
}
-- | The page token for the next page of comments. This will be absent if the
-- end of the comments list has been reached. If the token is rejected for
-- any reason, it should be discarded, and pagination should be restarted
-- from the first page of results.
cllNextPageToken :: Lens' CommentList (Maybe Text)
cllNextPageToken
= lens _cllNextPageToken
(\ s a -> s{_cllNextPageToken = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#commentList\".
cllKind :: Lens' CommentList Text
cllKind = lens _cllKind (\ s a -> s{_cllKind = a})
-- | The list of comments. If nextPageToken is populated, then this list may
-- be incomplete and an additional page of results should be fetched.
cllComments :: Lens' CommentList [Comment]
cllComments
= lens _cllComments (\ s a -> s{_cllComments = a}) .
_Default
. _Coerce
instance FromJSON CommentList where
parseJSON
= withObject "CommentList"
(\ o ->
CommentList' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "drive#commentList")
<*> (o .:? "comments" .!= mempty))
instance ToJSON CommentList where
toJSON CommentList'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _cllNextPageToken,
Just ("kind" .= _cllKind),
("comments" .=) <$> _cllComments])
-- | A list of revisions of a file.
--
-- /See:/ 'revisionList' smart constructor.
data RevisionList = RevisionList'
{ _rllNextPageToken :: !(Maybe Text)
, _rllKind :: !Text
, _rllRevisions :: !(Maybe [Revision])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RevisionList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rllNextPageToken'
--
-- * 'rllKind'
--
-- * 'rllRevisions'
revisionList
:: RevisionList
revisionList =
RevisionList'
{ _rllNextPageToken = Nothing
, _rllKind = "drive#revisionList"
, _rllRevisions = Nothing
}
-- | The page token for the next page of revisions. This will be absent if
-- the end of the revisions list has been reached. If the token is rejected
-- for any reason, it should be discarded, and pagination should be
-- restarted from the first page of results.
rllNextPageToken :: Lens' RevisionList (Maybe Text)
rllNextPageToken
= lens _rllNextPageToken
(\ s a -> s{_rllNextPageToken = a})
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#revisionList\".
rllKind :: Lens' RevisionList Text
rllKind = lens _rllKind (\ s a -> s{_rllKind = a})
-- | The list of revisions. If nextPageToken is populated, then this list may
-- be incomplete and an additional page of results should be fetched.
rllRevisions :: Lens' RevisionList [Revision]
rllRevisions
= lens _rllRevisions (\ s a -> s{_rllRevisions = a})
. _Default
. _Coerce
instance FromJSON RevisionList where
parseJSON
= withObject "RevisionList"
(\ o ->
RevisionList' <$>
(o .:? "nextPageToken") <*>
(o .:? "kind" .!= "drive#revisionList")
<*> (o .:? "revisions" .!= mempty))
instance ToJSON RevisionList where
toJSON RevisionList'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _rllNextPageToken,
Just ("kind" .= _rllKind),
("revisions" .=) <$> _rllRevisions])
-- | A list of permissions for a file.
--
-- /See:/ 'permissionList' smart constructor.
data PermissionList = PermissionList'
{ _plKind :: !Text
, _plPermissions :: !(Maybe [Permission])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PermissionList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plKind'
--
-- * 'plPermissions'
permissionList
:: PermissionList
permissionList =
PermissionList'
{ _plKind = "drive#permissionList"
, _plPermissions = Nothing
}
-- | Identifies what kind of resource this is. Value: the fixed string
-- \"drive#permissionList\".
plKind :: Lens' PermissionList Text
plKind = lens _plKind (\ s a -> s{_plKind = a})
-- | The list of permissions.
plPermissions :: Lens' PermissionList [Permission]
plPermissions
= lens _plPermissions
(\ s a -> s{_plPermissions = a})
. _Default
. _Coerce
instance FromJSON PermissionList where
parseJSON
= withObject "PermissionList"
(\ o ->
PermissionList' <$>
(o .:? "kind" .!= "drive#permissionList") <*>
(o .:? "permissions" .!= mempty))
instance ToJSON PermissionList where
toJSON PermissionList'{..}
= object
(catMaybes
[Just ("kind" .= _plKind),
("permissions" .=) <$> _plPermissions])
| rueshyna/gogol | gogol-drive/gen/Network/Google/Drive/Types/Product.hs | mpl-2.0 | 100,969 | 0 | 57 | 27,144 | 20,765 | 11,941 | 8,824 | 2,233 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigQueryDataTransfer.Projects.Locations.TransferConfigs.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a data transfer configuration, including any associated transfer
-- runs and logs.
--
-- /See:/ <https://cloud.google.com/bigquery-transfer/ BigQuery Data Transfer API Reference> for @bigquerydatatransfer.projects.locations.transferConfigs.delete@.
module Network.Google.Resource.BigQueryDataTransfer.Projects.Locations.TransferConfigs.Delete
(
-- * REST Resource
ProjectsLocationsTransferConfigsDeleteResource
-- * Creating a Request
, projectsLocationsTransferConfigsDelete
, ProjectsLocationsTransferConfigsDelete
-- * Request Lenses
, pltcdXgafv
, pltcdUploadProtocol
, pltcdAccessToken
, pltcdUploadType
, pltcdName
, pltcdCallback
) where
import Network.Google.BigQueryDataTransfer.Types
import Network.Google.Prelude
-- | A resource alias for @bigquerydatatransfer.projects.locations.transferConfigs.delete@ method which the
-- 'ProjectsLocationsTransferConfigsDelete' request conforms to.
type ProjectsLocationsTransferConfigsDeleteResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Deletes a data transfer configuration, including any associated transfer
-- runs and logs.
--
-- /See:/ 'projectsLocationsTransferConfigsDelete' smart constructor.
data ProjectsLocationsTransferConfigsDelete =
ProjectsLocationsTransferConfigsDelete'
{ _pltcdXgafv :: !(Maybe Xgafv)
, _pltcdUploadProtocol :: !(Maybe Text)
, _pltcdAccessToken :: !(Maybe Text)
, _pltcdUploadType :: !(Maybe Text)
, _pltcdName :: !Text
, _pltcdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsTransferConfigsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pltcdXgafv'
--
-- * 'pltcdUploadProtocol'
--
-- * 'pltcdAccessToken'
--
-- * 'pltcdUploadType'
--
-- * 'pltcdName'
--
-- * 'pltcdCallback'
projectsLocationsTransferConfigsDelete
:: Text -- ^ 'pltcdName'
-> ProjectsLocationsTransferConfigsDelete
projectsLocationsTransferConfigsDelete pPltcdName_ =
ProjectsLocationsTransferConfigsDelete'
{ _pltcdXgafv = Nothing
, _pltcdUploadProtocol = Nothing
, _pltcdAccessToken = Nothing
, _pltcdUploadType = Nothing
, _pltcdName = pPltcdName_
, _pltcdCallback = Nothing
}
-- | V1 error format.
pltcdXgafv :: Lens' ProjectsLocationsTransferConfigsDelete (Maybe Xgafv)
pltcdXgafv
= lens _pltcdXgafv (\ s a -> s{_pltcdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pltcdUploadProtocol :: Lens' ProjectsLocationsTransferConfigsDelete (Maybe Text)
pltcdUploadProtocol
= lens _pltcdUploadProtocol
(\ s a -> s{_pltcdUploadProtocol = a})
-- | OAuth access token.
pltcdAccessToken :: Lens' ProjectsLocationsTransferConfigsDelete (Maybe Text)
pltcdAccessToken
= lens _pltcdAccessToken
(\ s a -> s{_pltcdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pltcdUploadType :: Lens' ProjectsLocationsTransferConfigsDelete (Maybe Text)
pltcdUploadType
= lens _pltcdUploadType
(\ s a -> s{_pltcdUploadType = a})
-- | Required. The field will contain name of the resource requested, for
-- example: \`projects\/{project_id}\/transferConfigs\/{config_id}\` or
-- \`projects\/{project_id}\/locations\/{location_id}\/transferConfigs\/{config_id}\`
pltcdName :: Lens' ProjectsLocationsTransferConfigsDelete Text
pltcdName
= lens _pltcdName (\ s a -> s{_pltcdName = a})
-- | JSONP
pltcdCallback :: Lens' ProjectsLocationsTransferConfigsDelete (Maybe Text)
pltcdCallback
= lens _pltcdCallback
(\ s a -> s{_pltcdCallback = a})
instance GoogleRequest
ProjectsLocationsTransferConfigsDelete
where
type Rs ProjectsLocationsTransferConfigsDelete =
Empty
type Scopes ProjectsLocationsTransferConfigsDelete =
'["https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsTransferConfigsDelete'{..}
= go _pltcdName _pltcdXgafv _pltcdUploadProtocol
_pltcdAccessToken
_pltcdUploadType
_pltcdCallback
(Just AltJSON)
bigQueryDataTransferService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsTransferConfigsDeleteResource)
mempty
| brendanhay/gogol | gogol-bigquerydatatransfer/gen/Network/Google/Resource/BigQueryDataTransfer/Projects/Locations/TransferConfigs/Delete.hs | mpl-2.0 | 5,634 | 0 | 15 | 1,144 | 703 | 413 | 290 | 108 | 1 |
module Tables.A274079Spec (main, spec) where
import Test.Hspec
import Tables.A274079 (a274079)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A274079" $
it "correctly computes the first 20 elements" $
take 20 (map a274079 [1..]) `shouldBe` expectedValue where
expectedValue = [2,4,5,4,7,8,7,9,8,7,11,12,11,13,12,11,14,13,12,11]
| peterokagey/haskellOEIS | test/Tables/A274079Spec.hs | apache-2.0 | 357 | 0 | 10 | 59 | 160 | 95 | 65 | 10 | 1 |
-- http://www.codewars.com/kata/53d16bd82578b1fb5b00128c
module Codewars.Kata.Grade where
grader :: Double -> Char
grader n = snd . head . filter (\(f, _) -> f n) $ grades where
grades =
[ ((>= 1.0), 'F')
, ((>= 0.9), 'A')
, ((>= 0.8), 'B')
, ((>= 0.7), 'C')
, ((>= 0.6), 'D')
, (const True, 'F')
] | Bodigrim/katas | src/haskell/8-Grader.hs | bsd-2-clause | 329 | 0 | 10 | 83 | 141 | 88 | 53 | 10 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
-- In addition, you can configure a number of different aspects of Yesod
-- by overriding methods in the Yesod typeclass. That instance is
-- declared in the YesodBBS.hs file.
module Settings
( hamletFile
, cassiusFile
, juliusFile
, widgetFile
, connStr
, ConnectionPool
, withConnectionPool
, runConnectionPool
, approot
, staticroot
, staticdir
) where
import qualified Text.Hamlet as H
import qualified Text.Cassius as H
import qualified Text.Julius as H
import Language.Haskell.TH.Syntax
import Database.Persist.Sqlite
import Yesod (MonadPeelIO, addWidget, addCassius, addJulius)
import Data.Monoid (mempty)
import System.Directory (doesFileExist)
-- | The base URL for your application. This will usually be different for
-- development and production. Yesod automatically constructs URLs for you,
-- so this value must be accurate to create valid links.
approot :: String
#ifdef PRODUCTION
-- You probably want to change this. If your domain name was "yesod.com",
-- you would probably want it to be:
-- > approot = "http://www.yesod.com"
-- Please note that there is no trailing slash.
approot = "http://localhost:3000"
#else
approot = "http://localhost:3000"
#endif
-- | The location of static files on your system. This is a file system
-- path. The default value works properly with your scaffolded site.
staticdir :: FilePath
staticdir = "static"
-- | The base URL for your static files. As you can see by the default
-- value, this can simply be "static" appended to your application root.
-- A powerful optimization can be serving static files from a separate
-- domain name. This allows you to use a web server optimized for static
-- files, more easily set expires and cache values, and avoid possibly
-- costly transference of cookies on static files. For more information,
-- please see:
-- http://code.google.com/speed/page-speed/docs/request.html#ServeFromCookielessDomain
--
-- If you change the resource pattern for StaticR in YesodBBS.hs, you will
-- have to make a corresponding change here.
--
-- To see how this value is used, see urlRenderOverride in YesodBBS.hs
staticroot :: String
staticroot = approot ++ "/static"
-- | The database connection string. The meaning of this string is backend-
-- specific.
connStr :: String
#ifdef PRODUCTION
connStr = "production.db3"
#else
connStr = "debug.db3"
#endif
-- | Your application will keep a connection pool and take connections from
-- there as necessary instead of continually creating new connections. This
-- value gives the maximum number of connections to be open at a given time.
-- If your application requests a connection when all connections are in
-- use, that request will fail. Try to choose a number that will work well
-- with the system resources available to you while providing enough
-- connections for your expected load.
--
-- Also, connections are returned to the pool as quickly as possible by
-- Yesod to avoid resource exhaustion. A connection is only considered in
-- use while within a call to runDB.
connectionCount :: Int
connectionCount = 10
-- The rest of this file contains settings which rarely need changing by a
-- user.
-- The following three functions are used for calling HTML, CSS and
-- Javascript templates from your Haskell code. During development,
-- the "Debug" versions of these functions are used so that changes to
-- the templates are immediately reflected in an already running
-- application. When making a production compile, the non-debug version
-- is used for increased performance.
--
-- You can see an example of how to call these functions in Handler/Root.hs
--
-- Note: due to polymorphic Hamlet templates, hamletFileDebug is no longer
-- used; to get the same auto-loading effect, it is recommended that you
-- use the devel server.
toHamletFile, toCassiusFile, toJuliusFile :: String -> FilePath
toHamletFile x = "hamlet/" ++ x ++ ".hamlet"
toCassiusFile x = "cassius/" ++ x ++ ".cassius"
toJuliusFile x = "julius/" ++ x ++ ".julius"
hamletFile :: FilePath -> Q Exp
hamletFile = H.hamletFile . toHamletFile
cassiusFile :: FilePath -> Q Exp
#ifdef PRODUCTION
cassiusFile = H.cassiusFile . toCassiusFile
#else
cassiusFile = H.cassiusFileDebug . toCassiusFile
#endif
juliusFile :: FilePath -> Q Exp
#ifdef PRODUCTION
juliusFile = H.juliusFile . toJuliusFile
#else
juliusFile = H.juliusFileDebug . toJuliusFile
#endif
widgetFile :: FilePath -> Q Exp
widgetFile x = do
let h = unlessExists toHamletFile hamletFile
let c = unlessExists toCassiusFile cassiusFile
let j = unlessExists toJuliusFile juliusFile
[|addWidget $h >> addCassius $c >> addJulius $j|]
where
unlessExists tofn f = do
e <- qRunIO $ doesFileExist $ tofn x
if e then f x else [|mempty|]
-- The next two functions are for allocating a connection pool and running
-- database actions using a pool, respectively. It is used internally
-- by the scaffolded application, and therefore you will rarely need to use
-- them yourself.
withConnectionPool :: MonadPeelIO m => (ConnectionPool -> m a) -> m a
withConnectionPool = withSqlitePool connStr connectionCount
runConnectionPool :: MonadPeelIO m => SqlPersist m a -> ConnectionPool -> m a
runConnectionPool = runSqlPool
| tanakh/yesod-bbs | Settings.hs | bsd-2-clause | 5,463 | 0 | 11 | 944 | 556 | 340 | 216 | 55 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Lang.Cortho.Parser
Description : Cortho parser
Copyright : (c) Benjamin F Jones, 2016
License : BSD-3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
This module implements the parser for Cortho using parsec ("Text.Parsec"). It
handles precedence and associativity of all the binary / unary operators and
funciton application.
-}
module Lang.Cortho.Parser
( -- * core langauage syntax
parseProgram
, parseSC
, parseExpr
, parseAlt
, parseAlts
, parseIdent
, parseNum
-- * utilities
, parseList
-- * re-exported from Text.Parsec
, parse
)
where
import Control.Monad
import Data.Char (isAlphaNum)
import Data.Foldable (foldl1)
import Data.Maybe (isJust)
import Data.Text (Text)
import qualified Data.Text as T
import GHC.Exts (IsString(..))
import Text.Parsec
import Text.Parsec.Char
import Text.Parsec.String
import Text.PrettyPrint.HughesPJClass (pPrint)
import Lang.Cortho.Types
import Debug.Trace (trace)
------------------------------------------------------------------------
-- Keywords of the Core Language
------------------------------------------------------------------------
keywords :: [Text]
keywords =
[ "let"
, "letrec"
, "in"
, "case"
, "of"
, "Pack"
]
isKeyword :: Ident -> Bool
isKeyword s = unIdent s `elem` keywords
------------------------------------------------------------------------
-- Parser Combinators for the Core Language
------------------------------------------------------------------------
{- Core Language Example:
> main = double 2;
> double x = 2 * x
> f = 3;
> g x y = let z = x in z;
> h x = case (let y = x in y) of
> <1> -> 2;
> <2> -> 5
-}
-- | Parse a core language program
parseProgram :: Parser Program
parseProgram = Program <$> (parseSC `sepBy` term)
-- | Parse a core language supercombinator definition
parseSC :: Parser ScDef
parseSC = do
name <- parseIdent
binds <- parseIdent `sepBy` ws
equals
rhs <- parseExpr
return $ ScDef
{ scName = name
, scBinds = binds
, scExpr = rhs
}
-- | Parse a core language expression
parseExpr :: Parser CoreExpr
parseExpr = parseELet
<|> parseECase
<|> parseELam
<|> parseE1
where
-- Let/Letrec expression
parseELet = do
void $ string "let"
mrec <- optionMaybe $ try (string "rec")
ws1
decls <- parseDecl `sepBy1` term
kw "in"
e2 <- parseExpr
return $ ELet (isJust mrec) decls e2
-- Let variable binding
parseDecl = do
EVar bind <- parseEVar
symbol '='
e1 <- parseExpr
return (bind, e1)
-- Case expression
parseECase = do
kw "case"
e1 <- parseExpr
kw "of"
alts <- parseAlts
return $ ECase e1 alts
-- Lambda
parseELam = do
lambda
vars <- parseIdent `sepBy` ws
arrow
e <- parseExpr
return $ ELam vars e
-- The following productions encode operator precedence ----------------
-- Case: expr1 -> expr2 | expr1. (|) is right associative.
parseE1 = do
e2 <- parseE2
e1' <- parseE1'
return $ e1' e2
-- | Return a CoreExpr builder in the 'expr1' case
parseE1' :: Parser (CoreExpr -> CoreExpr)
parseE1' = parseOrRHS <|> (ws >> return id)
where
parseOrRHS = do
symbol '|'
e1 <- parseE1
return (\e2 -> EBinOp OpOr e2 e1)
-- Case: expr2 -> expr3 & expr2. (&) is right associative.
parseE2 = do
e3 <- parseE3
e2' <- parseE2'
return $ e2' e3
parseE2' = parseAndRHS <|> (ws >> return id)
where
parseAndRHS = do
symbol '&'
e2 <- parseE2
return (\e3 -> EBinOp OpAnd e3 e2)
-- Case: expr3 -> expr4 relop expr4. Relational operators are non-associative.
parseE3 = do
e4 <- parseE4
e3' <- parseE3'
return $ e3' e4
parseE3' = parseRelOpRHS <|> (ws >> return id)
where
parseRelOpRHS = do
op <- relop
e4' <- parseE4
return (\e4 -> EBinOp op e4 e4')
-- Case: expr4 -> expr5 +- expr4
parseE4 = do
e5 <- parseE5
f <- parseE4'
return $ f e5
parseE4' = parseAddOpRHS <|> parseSubOpRHS <|> (ws >> return id)
where
parseAddOpRHS = do -- + is right associative
symbol '+'
e4' <- parseE4
return (\e5 -> EBinOp OpAdd e5 e4')
parseSubOpRHS = do -- - is non-associative
symbol '-'
e5' <- parseE5
return (\e5 -> EBinOp OpSub e5 e5')
-- Case: expr5 -> expr6 */ expr5(6)
parseE5 = do
e6 <- parseE6
f <- parseE5'
return $ f e6
parseE5' = parseMultOpRHS <|> parseDivOpRHS <|> (ws >> return id)
where
parseMultOpRHS = do -- note: * is right associative
symbol '*'
e5' <- parseE5
return (\e6 -> EBinOp OpMult e6 e5')
parseDivOpRHS = do -- note: / is non-associative
symbol '/'
e6' <- parseE6
return (\e6 -> EBinOp OpDiv e6 e6')
-- Highest precedence sort of expr are the atomic ones and function
-- applications (left associative).
parseE6 = do
exprs <- parseAtomic `sepBy1` ws
case exprs of
[] -> error "fatal parser error: sepBy1 returned nothing"
[e] -> return $ e -- atomic expr
_ -> return $ foldl1 EAp exprs -- application
-- Atomic expressions
parseAtomic = parseEVar
<|> parseENum
<|> parseEConstr
<|> parseParenExpr
-- Identifier expressions (but not keywords)
parseEVar = try $ do
s <- parseIdent
if isKeyword s
then mzero -- fail
else return $ EVar s
-- Integers
parseENum = ENum <$> parseNum
-- Data constructor: note no whitespace in pack for now
parseEConstr = do
kw "Pack"
symbol '{'
n <- parseNum
symbol ','
k <- parseNum
symbol '}'
return $ EConstr n k
-- Expr in parens
parseParenExpr = between (symbol '(') (symbol ')') parseExpr <* ws
-- | Parse a case alternative
parseAlt :: Parser CoreAlter
parseAlt = (try parseAPattern <?> "error at: parsePattern") <|> parseADefault
where
parseAPattern = do
tag <- between (symbol '<') (symbol '>') parseNum
binds <- parseIdent `sepBy` ws
arrow
expr <- try parseExpr
return $ APattern tag binds expr
parseADefault = do
symbol '_'
arrow
expr <- parseExpr
return $ ADefault expr
-- | Parse one of more case alternatives separated by terminators
parseAlts :: Parser [CoreAlter]
parseAlts = parseAlt `sepBy1` term
-- | Non-alpha numeric characters allowed after the first character in an identifier
extraIdentChars :: [Char]
extraIdentChars = "_'"
-- | Parse an identifier consisting of an alpha character following by zero or
-- more AlphaNum characters, underscores, or primes (')
parseIdent :: Parser Ident
parseIdent = do
c <- letter
cs <- many (satisfy nonFirstChar)
ws
return $ identFromStr (c:cs)
where
nonFirstChar c = isAlphaNum c || c `elem` extraIdentChars
-- | Parse an unsigned integer
parseNum :: (Num a, Read a) => Parser a
parseNum = do
digits <- lexeme (many1 digit)
return (read digits)
-- Utilities -----------------------------------------------------------
-- | Use the given parser and then consume any trailing whitespace
lexeme :: Parser a -> Parser a
lexeme p = p <* ws
-- | Parse the given keyword as a lexeme
kw :: String -> Parser ()
kw = void . lexeme . string
-- | Parse a list of zero or more items (2nd parser) separatd by elements (1st
-- parser)
parseList :: Parser b -> Parser a -> Parser [a]
parseList = flip sepBy
-- | Terminator characters
termChar :: Char
termChar = ';'
-- | Terminator for supercombinator definitions, let bindings, and case
-- alternatives
term :: Parser ()
term = void $ lexeme (char termChar)
-- | Whitespace characters
wsChars :: [Char]
wsChars = " \t\n"
-- | Parse one whitespace character
wsChar :: Parser Char
wsChar = satisfy (`elem` wsChars)
-- | Parse and throw away zero or more whitespace (spaces and tabs), but not
-- newlines
ws :: Parser ()
ws = void $ many wsChar
-- | Parse at least one whitespace character
ws1 :: Parser ()
ws1 = void $ many1 wsChar
-- Various symbol lexemes ----------------------------------------------
symbol = lexeme . char
bigSym = lexeme . string
arrow = lexeme (string "->")
lambda = lexeme (char '\\')
equals = lexeme (char '=')
arithOp :: Parser BinOp
arithOp = (symbol '+' >> return OpAdd)
<|> (symbol '-' >> return OpSub)
<|> (symbol '*' >> return OpMult)
<|> (symbol '/' >> return OpDiv)
relop :: Parser BinOp
relop = (bigSym "<=" >> return OpLE)
<|> (bigSym "<" >> return OpLT)
<|> (bigSym ">=" >> return OpGE)
<|> (bigSym ">" >> return OpGT)
<|> (bigSym "==" >> return OpEQ)
<|> (bigSym "!=" >> return OpNEQ)
| benjaminfjones/cortho | src/Lang/Cortho/Parser.hs | bsd-2-clause | 9,117 | 0 | 14 | 2,542 | 2,107 | 1,081 | 1,026 | 219 | 4 |
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Backends.Html.Names
-- Copyright : (c) Simon Marlow 2003-2006,
-- David Waern 2006-2009,
-- Mark Lentczner 2010
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Backends.Xhtml.Names (
ppName, ppDocName, ppLDocName, ppRdrName,
ppBinder, ppBinder',
ppModule, ppModuleRef,
linkId
) where
import Haddock.Backends.Xhtml.Utils
import Haddock.GhcUtils
import Haddock.Types
import Haddock.Utils
import Text.XHtml hiding ( name, title, p, quote )
import GHC
import Name
import RdrName
ppOccName :: OccName -> Html
ppOccName = toHtml . occNameString
ppRdrName :: RdrName -> Html
ppRdrName = ppOccName . rdrNameOcc
ppLDocName :: Located DocName -> Html
ppLDocName (L _ d) = ppDocName d
ppDocName :: DocName -> Html
ppDocName (Documented name mdl) =
linkIdOcc mdl (Just occName) << ppOccName occName
where occName = nameOccName name
ppDocName (Undocumented name) = toHtml (getOccString name)
ppName :: Name -> Html
ppName name = toHtml (getOccString name)
ppBinder :: Bool -> OccName -> Html
-- The Bool indicates whether we are generating the summary, in which case
-- the binder will be a link to the full definition.
ppBinder True n = linkedAnchor (nameAnchorId n) << ppBinder' n
ppBinder False n = namedAnchor (nameAnchorId n) ! [theclass "def"]
<< ppBinder' n
ppBinder' :: OccName -> Html
ppBinder' n
| isVarSym n = parens $ ppOccName n
| otherwise = ppOccName n
linkId :: Module -> Maybe Name -> Html -> Html
linkId mdl mbName = linkIdOcc mdl (fmap nameOccName mbName)
linkIdOcc :: Module -> Maybe OccName -> Html -> Html
linkIdOcc mdl mbName = anchor ! [href url]
where
url = case mbName of
Nothing -> moduleUrl mdl
Just name -> moduleNameUrl mdl name
ppModule :: Module -> Html
ppModule mdl = anchor ! [href (moduleUrl mdl)]
<< toHtml (moduleString mdl)
ppModuleRef :: Module -> String -> Html
ppModuleRef mdl ref = anchor ! [href (moduleUrl mdl ++ ref)]
<< toHtml (moduleString mdl)
-- NB: The ref parameter already includes the '#'.
-- This function is only called from markupModule expanding a
-- DocModule, which doesn't seem to be ever be used.
| nominolo/haddock2 | src/Haddock/Backends/Xhtml/Names.hs | bsd-2-clause | 2,518 | 0 | 11 | 546 | 613 | 324 | 289 | 47 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import qualified Bench.Zodiac.TSRP.MAC
import qualified Bench.Zodiac.TSRP.Symmetric
import Criterion.Main
import Criterion.Types
import P
import System.IO
import Test.Zodiac.TSRP.Arbitrary ()
zodiacBench :: [Benchmark] -> IO ()
zodiacBench = defaultMainWith cfg
where
cfg = defaultConfig {
reportFile = Just "dist/build/zodiac-bench.html"
, csvFile = Just "dist/build/zodiac-bench.csv"
}
main :: IO ()
main = zodiacBench $ join [
Bench.Zodiac.TSRP.MAC.benchmarks
, Bench.Zodiac.TSRP.Symmetric.benchmarks
]
| ambiata/zodiac | zodiac-tsrp/bench/bench.hs | bsd-3-clause | 775 | 0 | 9 | 186 | 142 | 88 | 54 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE ViewPatterns #-}
import qualified Data.ByteString.Char8 as C
import System.IO
import System.Environment
dict = [ ("million", 10^6)
, ("thousand", 10^3)
, ("hundred", 10^2)
, ("ninety", 90)
, ("eighty", 80)
, ("seventy", 70)
, ("sixty", 60)
, ("fifty", 50)
, ("forty", 40)
, ("thirty", 30)
, ("twenty", 20)
, ("nineteen", 19)
, ("eighteen", 18)
, ("seventeen", 17)
, ("sixteen", 16)
, ("fifteen", 15)
, ("fourteen", 14)
, ("thirteen", 13)
, ("twelve", 12)
, ("eleven", 11)
, ("ten", 10)
, ("nine", 9)
, ("eight", 8)
, ("seven", 7)
, ("six", 6)
, ("five", 5)
, ("four", 4)
, ("three", 3)
, ("two", 2)
, ("one", 1)
, ("zero", 0)
] :: [ (C.ByteString, Int) ]
negative = "negative" :: C.ByteString
isTimes x
| x == 10^2 || x == 10^3 || x == 10^6 = True
| otherwise = False
notTimes = not . isTimes
accum r d
| d == 10^2 = (r`div`1000)*1000 + (r `mod` 1000)*d
| d == 10^3 = (r`div`10^6)*10^6 + (r `mod` 10^6)*d
| d == 10^6 = r*d
parseNumber :: C.ByteString -> Maybe Int
parseNumber (C.words -> []) = Nothing
parseNumber (C.words -> (w:ws))
| w == negative = fmap (\x -> -x) (go 0 ws)
| w /= negative = go 0 (w:ws)
where go r [] = Just r
go r (x:xs) = lookup x dict >>= \d -> if
| isTimes d -> go (accum r d) xs
| notTimes d -> go (r + d) xs
parseNumber' = maybe 0 id . parseNumber
main = fmap head getArgs >>= \fname ->
withFile fname ReadMode mainloop
where mainloop h = C.hGetContents h >>= mapM_ (print . parseNumber') . C.lines
| wangbj/excises | string2Number.hs | bsd-3-clause | 1,841 | 0 | 15 | 624 | 828 | 469 | 359 | 60 | 3 |
{-# OPTIONS -Wall #-}
module Lambda where
-- compiled program
data Term val
= TermVar Int
| TermLam (Term val)
| TermApp (Term val) (Term val)
| TermLit val
| TermPrm (val -> val -> val) (Term val) (Term val)
isClosed :: Term val -> Int -> Bool
isClosed (TermVar idx) lev = idx < lev
isClosed (TermLam sub) lev = isClosed sub (lev + 1)
isClosed (TermApp sub1 sub2) lev
= isClosed sub1 lev && isClosed sub2 lev
isClosed (TermLit _) _ = True
isClosed (TermPrm _ sub1 sub2) lev
= isClosed sub1 lev && isClosed sub2 lev
-- runtime values
data Value val
= Value val
| Closure (Term val) [Value val]
execute :: Term val -> [Value val] -> Value val
execute (TermVar idx) env
= if idx < length env
then env !! idx
else error "invalid variable"
execute (TermLam term) env
= Closure term env
execute (TermApp fun arg) env
= case execute fun env of
Closure t e -> execute t (execute arg env : e)
Value _ -> error "not applicable"
execute (TermLit val) _ = Value val
execute (TermPrm fun arg1 arg2) env
= let
a1 = execute arg1 env
a2 = execute arg2 env
in case (a1, a2) of
(Value v1, Value v2) -> Value (fun v1 v2)
_ -> error "invalid value"
| mmaroti/tensor.hs | src/Lambda.hs | bsd-3-clause | 1,167 | 20 | 12 | 259 | 543 | 272 | 271 | 38 | 4 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
module HipChat.Auth.Types where
--------------------------------------------------------------------------------
-- |
-- Module: HipChat.Auth.Types
--
-- Types involved in authentication and authorization, e.g. OAuth token requests
--
--------------------------------------------------------------------------------
import Control.Lens hiding ((.=))
import Control.Lens.AsText (AsText)
import qualified Control.Lens.AsText as AsText
import Data.Aeson (FromJSON (parseJSON),
ToJSON (toJSON), Value (String),
object, withObject, (.:), (.:?),
(.=))
import Data.String (IsString (fromString))
import Data.Text (Text)
import qualified Data.Text as T
import GHC.Generics (Generic)
import Web.FormUrlEncoded (FromForm (fromForm),
ToForm (toForm), parseUnique)
import Web.HttpApiData (FromHttpApiData (parseUrlPiece),
ToHttpApiData (toUrlPiece),
parseQueryParam, toQueryParam)
--------------------------------------------------------------------------------
-- APIScope
--------------------------------------------------------------------------------
data APIScope
= AdminGroup -- ^ Perform group administrative tasks
| AdminRoom -- ^ Perform room administrative tasks
| ManageRooms -- ^ Create, update, and remove rooms
| SendMessage -- ^ Send private one-on-one messages
| SendNotification -- ^ Send room notifications
| ViewGroup -- ^ View users, rooms, and other group information
| ViewMessages -- ^ View messages from chat rooms and private chats you have access to
| ViewRoom -- ^ View room information and participants, but not history
deriving (Show, Read, Eq, Generic)
instance AsText APIScope where
enc = \case
AdminGroup -> "admin_group"
AdminRoom -> "admin_room"
ManageRooms -> "manage_rooms"
SendMessage -> "send_message"
SendNotification -> "send_notification"
ViewGroup -> "view_group"
ViewMessages -> "view_messages"
ViewRoom -> "view_room"
dec = \case
"admin_group" -> Just AdminGroup
"admin_room" -> Just AdminRoom
"manage_rooms" -> Just ManageRooms
"send_message" -> Just SendMessage
"send_notification" -> Just SendNotification
"view_group" -> Just ViewGroup
"view_messages" -> Just ViewMessages
"view_room" -> Just ViewRoom
_ -> Nothing
instance ToHttpApiData APIScope where
toUrlPiece = AsText.toUrlPiece
instance FromHttpApiData APIScope where
parseUrlPiece = AsText.parseUrlPiece
instance ToJSON APIScope where
toJSON = AsText.toJSON
instance FromJSON APIScope where
parseJSON = AsText.parseJSON
instance IsString APIScope where
fromString = AsText.fromString
--------------------------------------------------------------------------------
-- GrantType
--------------------------------------------------------------------------------
data GrantType
= AuthorizationCode
| RefreshToken
| Password
| ClientCredentials
| Personal
| RoomNotification
| Internal -- ^ allowed, but not documented
deriving (Eq, Show, Read, Generic)
instance AsText GrantType where
enc = \case
AuthorizationCode -> "authorization_code"
RefreshToken -> "refresh_token"
Password -> "password"
ClientCredentials -> "client_credentials"
Personal -> "personal"
RoomNotification -> "room_notification"
Internal -> "internal"
dec = \case
"authorization_code" -> Just AuthorizationCode
"refresh_token" -> Just RefreshToken
"password" -> Just Password
"client_credentials" -> Just ClientCredentials
"personal" -> Just Personal
"room_notification" -> Just RoomNotification
"internal" -> Just Internal
_ -> Nothing
instance ToHttpApiData GrantType where
toUrlPiece = AsText.toUrlPiece
instance FromHttpApiData GrantType where
parseUrlPiece = AsText.parseUrlPiece
instance ToJSON GrantType where
toJSON = AsText.toJSON
instance FromJSON GrantType where
parseJSON = AsText.parseJSON
instance IsString GrantType where
fromString = AsText.fromString
--------------------------------------------------------------------------------
-- Token request
--------------------------------------------------------------------------------
data TokenReq = TokenReq
{ tokenReqGrantType :: GrantType -- ^ The type of grant request.
, tokenReqScope :: [APIScope] -- ^ A space-delimited list of scopes that is requested.
, tokenReqUsername :: Maybe Text -- ^ The user name to generate a token on behalf of. Only valid in the 'password' and 'client_credentials' grants.
, tokenReqUserId :: Maybe Text -- int?
-- ^ The id of the user the token is acting on behalf of. Only valid in the 'authorization_code' and 'refresh_token' grants.
, tokenReqCode :: Maybe Text -- ^ The authorization code to exchange for an access token. Only valid in the 'authorization_code' grant.
, tokenReqClientName :: Maybe Text -- ^ The name of the public oauth client retrieving a token for. Only valid in the 'authorization_code' and 'refresh_token' grants.
, tokenReqRedirectUrl :: Maybe Text -- ^ The URL that was used to generate an authorization code, and it must match that value. Only valid in the 'authorization_code' grant.
, tokenReqPassword :: Maybe Text -- ^ The user's password to use for authentication when creating a token. Only valid in the 'password' grant.
, tokenReqGroupId :: Maybe Text -- ^ The name of the group to which the related user belongs. Only valid in the 'authorization_code' and 'refresh_token' grants.
, tokenReqRefreshToken :: Maybe Text -- ^ The refresh token to use to generate a new access token. Only valid in the 'refresh_token' grant.
} deriving (Eq, Show, Read, Generic)
makeLensesWith camelCaseFields ''TokenReq
tokenReq :: GrantType -> TokenReq
tokenReq gt = TokenReq gt [] Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
addOnTokenReq :: [APIScope] -> TokenReq
addOnTokenReq scopes = tokenReq ClientCredentials & scope .~ scopes
instance ToForm TokenReq where
toForm req =
[ ("grant_type", toQueryParam (req ^. grantType))
, ("scope", T.unwords $ toQueryParam <$> req ^. scope) -- TODO nonempty?
, foldMap (("username",) . toQueryParam) (req ^. username)
, foldMap (("user_id",) . toQueryParam) (req ^. userId)
, foldMap (("code",) . toQueryParam) (req ^. code)
, foldMap (("client_name",) . toQueryParam) (req ^. clientName)
, foldMap (("redirect_url",) . toQueryParam) (req ^. redirectUrl)
, foldMap (("password",) . toQueryParam) (req ^. password)
, foldMap (("group_id",) . toQueryParam) (req ^. groupId)
, foldMap (("refresh_token",) . toQueryParam) (req ^. refreshToken)
]
instance FromForm TokenReq where
fromForm f = TokenReq
<$> parseUnique "grant_type" f
<*> (parseUnique "scope" f >>= traverse parseQueryParam . T.words)
<*> parseUnique "username" f
<*> parseUnique "user_id" f
<*> parseUnique "code" f
<*> parseUnique "client_name" f
<*> parseUnique "redirect_url" f
<*> parseUnique "password" f
<*> parseUnique "group_id" f
<*> parseUnique "refresh_token" f
--------------------------------------------------------------------------------
-- Token response
--------------------------------------------------------------------------------
data TokenResp = TokenResp
{ tokenRespAccessToken :: Text -- ^ The generated access token to use to authenticate future requests.
, tokenRespExpiresIn :: Int -- ^ The number of seconds this token will be valid for.
, tokenRespGroupId :: Int -- ^ The HipChat group ID this token belongs to
, tokenRespGroupName :: Text -- ^ The HipChat group name this token belongs to.
, tokenRespTokenType :: Text -- ^ The type of token returned. Always 'bearer'.
, tokenRespScope :: [APIScope] -- ^ A space-delimited list of scopes that this token is allowed to use.
, tokenRespRefreshToken :: Maybe Text -- ^ The generated refresh token to use when requesting new access tokens.
} deriving (Eq, Show, Read, Generic)
makeLensesWith camelCaseFields ''TokenResp
instance ToJSON TokenResp where
toJSON t = object
[ "access_token" .= (t ^. accessToken)
, "expires_in" .= (t ^. expiresIn)
, "group_id" .= (t ^. groupId)
, "group_name" .= (t ^. groupName)
, "token_type" .= (t ^. tokenType)
, "token_scope" .= T.unwords (toQueryParam <$> t ^. scope)
, "refresh_token" .= (t ^. refreshToken)
]
instance FromJSON TokenResp where
parseJSON = withObject "object" $ \o -> TokenResp
<$> o .: "access_token"
<*> o .: "expires_in"
<*> o .: "group_id"
<*> o .: "group_name"
<*> o .: "token_type"
<*> (o .: "scope" >>= traverse (parseJSON . String) . T.words)
<*> o .:? "refresh_token"
| mjhopkins/hipchat | src/HipChat/Auth/Types.hs | bsd-3-clause | 9,956 | 0 | 20 | 2,532 | 1,660 | 934 | 726 | 172 | 1 |
module ImplicitRefsCont.Parser
( expression
, program
, parseProgram
) where
import Control.Monad (void)
import Data.Maybe (fromMaybe)
import ImplicitRefsCont.Data
import Text.Megaparsec hiding (ParseError)
import Text.Megaparsec.Expr
import qualified Text.Megaparsec.Lexer as L
import Text.Megaparsec.String
parseProgram :: String -> Try Program
parseProgram input = case runParser program "Program Parser" input of
Left err -> Left $ ParseError err
Right p -> Right p
spaceConsumer :: Parser ()
spaceConsumer = L.space (void spaceChar) lineCmnt blockCmnt
where lineCmnt = L.skipLineComment "//"
blockCmnt = L.skipBlockComment "/*" "*/"
symbol = L.symbol spaceConsumer
parens = between (symbol "(") (symbol ")")
minus = symbol "-"
equal = symbol "="
comma = symbol ","
longArrow = symbol "==>"
semiColon = symbol ";"
lexeme :: Parser a -> Parser a
lexeme = L.lexeme spaceConsumer
keyWord :: String -> Parser ()
keyWord w = string w *> notFollowedBy alphaNumChar *> spaceConsumer
reservedWords :: [String]
reservedWords =
[ "let", "in", "if", "then", "else", "zero?", "minus"
, "equal?", "greater?", "less?", "proc", "letrec", "begin", "set"
]
binOpsMap :: [(String, BinOp)]
binOpsMap =
[ ("+", Add), ("-", Sub), ("*", Mul), ("/", Div), ("equal?", Eq)
, ("greater?", Gt), ("less?", Le) ]
binOp :: Parser BinOp
binOp = do
opStr <- foldl1 (<|>) (fmap (try . symbol . fst) binOpsMap)
return $ fromMaybe
(error ("Unknown operator '" `mappend` opStr `mappend` "'"))
(lookup opStr binOpsMap)
unaryOpsMap :: [(String, UnaryOp)]
unaryOpsMap =
[ ("minus", Minus), ("zero?", IsZero) ]
unaryOp :: Parser UnaryOp
unaryOp = do
opStr <- foldl1 (<|>) (fmap (try . symbol . fst) unaryOpsMap)
return $ fromMaybe
(error ("Unknown operator '" `mappend` opStr `mappend` "'"))
(lookup opStr unaryOpsMap)
-- | Identifier ::= String (without reserved words)
identifier :: Parser String
identifier = lexeme (p >>= check)
where
p = (:) <$> letterChar <*> many alphaNumChar
check x = if x `elem` reservedWords
then fail $
concat ["keyword ", show x, " cannot be an identifier"]
else return x
integer :: Parser Integer
integer = lexeme L.integer
signedInteger :: Parser Integer
signedInteger = L.signed spaceConsumer integer
pairOf :: Parser a -> Parser b -> Parser (a, b)
pairOf pa pb = parens $ do
a <- pa
comma
b <- pb
return (a, b)
-- expressionPair ::= (Expression, Expression)
expressionPair :: Parser (Expression, Expression)
expressionPair = pairOf expression expression
-- | ConstExpr ::= Number
constExpr :: Parser Expression
constExpr = ConstExpr . ExprNum <$> signedInteger
-- | BinOpExpr ::= BinOp (Expression, Expression)
binOpExpr :: Parser Expression
binOpExpr = do
op <- binOp
exprPair <- expressionPair
return $ uncurry (BinOpExpr op) exprPair
-- | UnaryOpExpr ::= UnaryOp (Expression)
unaryOpExpr :: Parser Expression
unaryOpExpr = do
op <- unaryOp
expr <- parens expression
return $ UnaryOpExpr op expr
-- | IfExpr ::= if Expression then Expression
ifExpr :: Parser Expression
ifExpr = do
keyWord "if"
ifE <- expression
keyWord "then"
thenE <- expression
keyWord "else"
elseE <- expression
return $ IfExpr ifE thenE elseE
-- | VarExpr ::= Identifier
varExpr :: Parser Expression
varExpr = VarExpr <$> identifier
-- | LetExpr ::= let {Identifier = Expression}* in Expression
letExpr :: Parser Expression
letExpr = letFamilyExpr "let" LetExpr
letFamilyExpr :: String
-> ([(String, Expression)] -> Expression -> Expression)
-> Parser Expression
letFamilyExpr letType builder = do
keyWord letType
bindings <- many binding
keyWord "in"
body <- expression
return $ builder bindings body
where
binding = try assignment
-- | LetrecExpr ::= letrec {Identifier (Identifier) = Expression} in Expression
letRecExpr :: Parser Expression
letRecExpr = do
keyWord "letrec"
procBindings <- many procBinding
keyWord "in"
recBody <- expression
return $ LetRecExpr procBindings recBody
where
procBinding = try $ do
procName <- identifier
params <- parens $ many identifier
equal
procBody <- expression
return (procName, params, procBody)
-- | ManyExprs ::= <empty>
-- ::= Many1Exprs
manyExprs :: Parser [Expression]
manyExprs = sepBy expression comma
-- | Many1Exprs ::= Expression
-- ::= Expression , Many1Exprs
many1Exprs :: Parser [Expression]
many1Exprs = sepBy1 expression comma
-- | ProcExpr ::= proc ({Identifier}*) Expression
procExpr :: Parser Expression
procExpr = do
keyWord "proc"
params <- parens $ many identifier
body <- expression
return $ ProcExpr params body
-- | CallExpr ::= (Expression {Expression}*)
callExpr :: Parser Expression
callExpr = parens $ do
rator <- expression
rands <- many expression
return $ CallExpr rator rands
-- | BeginExpr ::= begin BeginBody end
--
-- BeginBody ::= Expression BeginBodyTail
--
-- BeginBodyTail ::= <empty>
-- ::= ; Expression BeginBodyTail
beginExpr :: Parser Expression
beginExpr = do
keyWord "begin"
exprs <- sepBy1 (try expression) semiColon
keyWord "end"
return $ BeginExpr exprs
assignment :: Parser (String, Expression)
assignment = do
name <- identifier
equal
expr <- expression
return (name, expr)
-- | AssignExpr ::= set Identifier = Expression
assignExpr :: Parser Expression
assignExpr = do
keyWord "set"
assign <- assignment
return $ uncurry AssignExpr assign
-- | Expression ::= ConstExpr
-- ::= BinOpExpr
-- ::= UnaryOpExpr
-- ::= IfExpr
-- ::= VarExpr
-- ::= LetExpr
-- ::= LetRecExpr
-- ::= ProcExpr
-- ::= CallExpr
-- ::= BeginExpr
-- ::= AssignExpr
expression :: Parser Expression
expression = foldl1 (<|>) (fmap try expressionList)
where
expressionList =
[ constExpr
, binOpExpr
, unaryOpExpr
, ifExpr
, varExpr
, letExpr
, letRecExpr
, procExpr
, callExpr
, beginExpr
, assignExpr
]
program :: Parser Program
program = do
spaceConsumer
expr <- expression
eof
return $ Prog expr
| li-zhirui/EoplLangs | src/ImplicitRefsCont/Parser.hs | bsd-3-clause | 6,396 | 0 | 13 | 1,534 | 1,721 | 899 | 822 | 172 | 2 |
module FormulaTest where
import Test.QuickCheck
import Formula
-- QuickCheck Stuff
instance Arbitrary Fmla where
arbitrary = scale (\x -> if x < 4 then x else 4) $ sized fmlaGen'
fmlaGen :: Gen (Fmla)
fmlaGen = fmlaGen' 4
fmlaGen' :: Int -> Gen (Fmla)
fmlaGen' 0 = propGen
fmlaGen' n = oneof $ fmlaGenerators (n - 1)
where
fmlaGenerators :: Int -> [Gen (Fmla)]
fmlaGenerators n = [ (negaGen n) , (conjGen n) , (disjGen n) , (implGen n) ]
propGen :: Gen (Fmla)
propGen = elements $ map (\x -> Prop x) allProps
negaGen :: Int -> Gen (Fmla)
negaGen n = Neg <$> fmlaGen' n
conjGen :: Int -> Gen (Fmla)
conjGen n = Conjunction <$> fmlaGen' n <*> fmlaGen' n
disjGen :: Int -> Gen (Fmla)
disjGen n = Disjunction <$> fmlaGen' n <*> fmlaGen' n
implGen :: Int -> Gen (Fmla)
implGen n = Implication <$> fmlaGen' n <*> fmlaGen' n | jpotecki/LogicTableauChecker | test/FormulaTest.hs | bsd-3-clause | 843 | 0 | 11 | 180 | 361 | 188 | 173 | 22 | 1 |
module Phenotype
( Phenotype (Phenotype)
, phenotypeToVector
, randomPhenotypeFraction
, randomPhenotypeChange
, randomPhenotypeChangeWithOneNonzero
, zeroPhenotype
, fitness) where
import Phenotype.Internal | satai/FrozenBeagle | Simulation/Lib/src/Phenotype.hs | bsd-3-clause | 236 | 0 | 5 | 48 | 36 | 24 | 12 | 11 | 0 |
module CC.Object where
import CC.Language
--
-- * Some Simple Object Languages
--
-- ** Empty/unit values
data None e = None
deriving Eq
instance Obj None where
mapCC _ _ = None
foldCC _ b = const b
showObj _ = "_"
-- ** Atomic values
data One a e = One a
deriving Eq
instance Show a => Obj (One a) where
mapCC _ (One a) = One a
foldCC _ b = const b
showObj (One a) = show a
-- ** Lists
data List a e =
Nil
| Cons a e
deriving Eq
instance Show a => Obj (List a) where
mapCC f (Cons a e) = Cons a (f e)
mapCC _ Nil = Nil
foldCC _ b Nil = b
foldCC f b (Cons _ e) = f e b
showObj Nil = "[]"
showObj (Cons a e) = show a ++ ":" ++ show e
-- ** Binary trees
data Tree a e =
Leaf a
| Node a e e
deriving Eq
instance Show a => Obj (Tree a) where
mapCC f (Node a l r) = Node a (f l) (f r)
mapCC _ (Leaf a) = Leaf a
foldCC _ b (Leaf _) = b
foldCC f b (Node _ l r) = f r (f l b)
showObj (Leaf a) = show a
showObj (Node a l r) = unwords ["(Node", show a, show l, show r ++ ")"]
--
-- * Plain Types
--
type None' = Plain None
type One' a = Plain (One a)
type List' a = Plain (List a)
type Tree' a = Plain (Tree a)
-- ** Smart constructors
none :: None'
none = P None
one :: a -> One' a
one = P . One
nil :: List' a
nil = P Nil
cons :: a -> List' a -> List' a
cons a l = P (Cons a l)
fromList :: [a] -> List' a
fromList = foldr cons nil
toList :: List' a -> [a]
toList (P Nil) = []
toList (P (Cons a e)) = a : toList e
leaf :: a -> Tree' a
leaf = P . Leaf
node :: a -> Tree' a -> Tree' a -> Tree' a
node a l r = P (Node a l r)
| walkie/CC-Minimal | src/CC/Object.hs | bsd-3-clause | 1,676 | 0 | 9 | 548 | 832 | 427 | 405 | 57 | 1 |
module Main where
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Concurrent.STM.TChan (tryPeekTChan)
import Control.Exception
import Control.Monad
import Control.Monad.Fix (fix)
import Control.Monad.STM (atomically)
import Data.Maybe
import GHC.Int (Int64)
import Network.Socket hiding (shutdown)
import System.Environment (getArgs)
import System.IO
import System.IO.Error
import System.Posix.Signals as Sig
import System.Log.Handler.Simple
import System.Log.Logger
import Data.Aeson (encode)
import Graphics.Vty
import qualified Data.Text as T
import qualified Graphics.Vty.Picture as V
import Ebitor.Edit
import Ebitor.Events.JSON (eventToString)
import Ebitor.Rope (Rope)
import Ebitor.Server
import Ebitor.Utils
import Ebitor.Window (Window(..), Orientation(..))
import qualified Ebitor.Rope as R
import qualified Ebitor.Rope.Cursor as R
import qualified Ebitor.Window as W
data App = App
{ term :: Vty
, serverSocket :: Socket
, isRunning :: IO Bool
, quit :: IO ()
}
type Window_ = Window WindowContent
loggerName = "Ebitor.Vty"
setUpLogger :: IO ()
setUpLogger = do
updateGlobalLogger rootLoggerName removeHandler
f <- fileHandler "logs/vty.log" DEBUG
updateGlobalLogger loggerName (setHandlers [f] . setLevel DEBUG)
setCursor :: Vty -> Window_ -> IO ()
setCursor vty w = do
let out = outputIface vty
focused = W.getFocusedWindow w
rect = fromJust $ W.cwRect focused
WTruncatedEditor e = W.cwContent focused
R.Cursor (ln, col) = snd $ tPosition e
fstLn = tFirstLine e - 1
setCursorPos out (col - 1 + W.rectX rect) (ln - 1 - fstLn + W.rectY rect)
showCursor out
imageForWindow :: Window_ -> Image
imageForWindow w = imageForWindow' w
where
imageForWindow' :: Window_ -> Image
imageForWindow' (LayoutWindow o wins _ _) =
let cat = if o == Horizontal then vertCat else horizCat
in cat $ map imageForWindow' wins
imageForWindow' w =
let r = case cwContent w of
WTruncatedEditor e -> tRope e
WRope r -> r
img = vertCat $ map imageForLine $ R.lines r
rect = fromJust $ cwRect w
resizeWidth' w = if w >= 0 then resizeWidth w else id
resizeHeight' h = if h >= 0 then resizeHeight h else id
in resizeWidth' (W.rectWidth rect) $ resizeHeight' (W.rectHeight rect) img
replaceTabs :: T.Text -> T.Text
replaceTabs = T.replace "\t" (T.replicate 8 " ")
imageForLine :: Rope -> Image
imageForLine = resizeHeight 1 . text' defAttr . replaceTabs . R.unpackText
handleResponse :: Response -> App -> IO ()
handleResponse (Screen w) app = do
bounds <- displayBounds $ outputIface vty
update vty $ picForImage $ imageForWindow w
setCursor vty w
where
vty = term app
handleResponse Disconnected app = quit app
handleResponse InvalidCommand app = do
sendCommand (serverSocket app) $ Echo $ ErrorMessage "Invalid command"
return ()
processEvent :: App -> IO ()
processEvent app = do
let sock = serverSocket app
e <- nextEvent $ term app
case e of
EvKey _ _ -> sendCommand sock $ SendKeys [e]
EvResize w h -> sendCommand sock $ UpdateDisplaySize (w, h)
_ -> return 0
return ()
getVty :: IO Vty
getVty = do
cfg <- standardIOConfig
vty <- mkVty cfg
setCursorPos (outputIface vty) 0 0
return vty
getSocket :: IO Socket
getSocket = do
sock <- socket AF_INET Stream 0
setSocketOption sock ReuseAddr 1
connect sock defaultSockAddr
infoM loggerName "Connected to server"
return sock
responseLoop :: App -> IO ()
responseLoop app = whileRunning app $ do
infoM loggerName "Waiting for response..."
resp <- receiveResponse (serverSocket app)
debugM loggerName ("Response: " ++ show resp)
when (isJust resp) $ handleResponse (fromJust resp) app
return ()
handleArgs :: App -> IO ()
handleArgs app = do
args <- getArgs
case args of
[] -> return ()
[fname] -> sendCommand' $ EditFile fname
_ -> sendCommand' $ Echo (ErrorMessage "Invalid command-line arguments")
where
sendCommand' cmd = do
sendCommand (serverSocket app) cmd
return ()
whileRunning :: App -> IO () -> IO ()
whileRunning app f = fix $ \loop -> do
running <- isRunning app
when running (f >> loop)
main = do
setUpLogger
programStatus <- newEmptyMVar
vty <- getVty
-- Not working. Why? Why indeed.
_ <- installHandler sigTSTP Sig.Default Nothing
sock <- catch getSocket $ \e -> do
if isDoesNotExistError e then do
infoM loggerName "Running own server..."
runServerThread defaultSockAddr
getSocket
else
error "Error connecting to server"
let app = App
{ term = vty
, serverSocket = sock
, isRunning = isEmptyMVar programStatus
, quit = putMVar programStatus ()
}
forkIO $ responseLoop app
-- Set initial display size
displayBounds (outputIface vty) >>= sendCommand sock . UpdateDisplaySize
handleArgs app
-- Thread to process user events
forkIO $ whileRunning app $ processEvent app
-- Wait until we quit the program
_ <- takeMVar programStatus
shutdown vty
close sock
| benekastah/ebitor | app/Main.hs | bsd-3-clause | 5,399 | 0 | 15 | 1,405 | 1,705 | 847 | 858 | 147 | 6 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE PackageImports #-}
-- |
-- Module : Crypto.Hash.Internal
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unknown
module Crypto.Hash.Internal
( unsafeDoIO
, digestToByteString
, digestToByteStringWitness
) where
import System.IO.Unsafe
import Data.ByteArray (convert)
import "cryptonite" Crypto.Hash
import Data.ByteString (ByteString)
-- | perform io for hashes that do allocation and ffi.
-- unsafeDupablePerformIO is used when possible as the
-- computation is pure and the output is directly linked
-- to the input. we also do not modify anything after it has
-- been returned to the user.
unsafeDoIO :: IO a -> a
#if __GLASGOW_HASKELL__ > 704
unsafeDoIO = unsafeDupablePerformIO
#else
unsafeDoIO = unsafePerformIO
#endif
digestToByteString :: HashAlgorithm h => Digest h -> ByteString
digestToByteString = convert
digestToByteStringWitness :: HashAlgorithm h => h -> Digest h -> ByteString
digestToByteStringWitness _ = convert
| vincenthz/hs-cryptohash | Crypto/Hash/Internal.hs | bsd-3-clause | 1,072 | 0 | 8 | 180 | 138 | 84 | 54 | 16 | 1 |
-- | PowerPC branch analysis.
module Language.PowerPC.Branching
( branching
) where
import Control.Monad
import Data.Bits
import Data.List
import Data.Word
import System.IO
import Language.PowerPC.Opcode
branching :: [Word32] -> Word64 -> [Word64] -> IO ()
branching program base starts = do
next :: (Word64, Word32) -> [Word64]
next (addr, instr) = case opcode instr of
(18, 0) -> if aa then [li] else [addr + li]
(16, 0) -> (if branchAlways then [] else [addr + 4]) ++ (if aa then [bd] else [addr + bd])
(19, 16) -> if branchAlways then [] else [addr + 4]
(19, 528) -> if branchAlways then [] else [addr + 4]
_ -> [addr + 4]
where
ufield :: Int -> Int -> Integer
ufield h l = shiftR (fromIntegral instr) (31 - l) .&. setBits 0 [0 .. l - h]
sfield :: Int -> Int -> Integer
sfield h l = ufield h l .|. (if testBit instr (31 - h) then complement $ setBits 0 [0 .. l - h] else 0)
aa = testBit 1 instr
li = clearBits (sfield 6 31) [1, 0]
bd = clearBits (sfield 16 31) [1, 0]
bo = ufield 6 10
branchAlways = testBit bo 4 && testBit bo 2
| tomahawkins/powerpc | Language/PowerPC/Branching.hs | bsd-3-clause | 1,084 | 1 | 14 | 256 | 507 | 280 | 227 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Configuration.OSC where
import Lens.Simple ( (^.)
, makeLenses
)
import Data.Yaml ( (.!=)
, (.:?)
, FromJSON(..)
)
import qualified Data.Yaml as Y
data ImprovizOSCConfig = ImprovizOSCConfig
{ _enabled :: Bool
, _port :: Int
}
deriving Show
makeLenses ''ImprovizOSCConfig
defaultOSCConfig = ImprovizOSCConfig { _enabled = False, _port = 5510 }
instance FromJSON ImprovizOSCConfig where
parseJSON (Y.Object v) =
ImprovizOSCConfig
<$> v
.:? "enabled"
.!= (defaultOSCConfig ^. enabled)
<*> v
.:? "port"
.!= (defaultOSCConfig ^. port)
parseJSON _ = fail "Expected Object for OSCConfig value"
| rumblesan/improviz | src/Configuration/OSC.hs | bsd-3-clause | 1,046 | 0 | 11 | 478 | 179 | 106 | 73 | 25 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Client.CLFX where
-- Client Graphics Effects
import Control.Lens ((.=), ix, use, (^.), preuse, zoom, (&), (.~), (+~), (-~), (%=))
import Control.Monad (unless, when, liftM)
import Data.Bits ((.&.), complement)
import Data.Char (ord)
import Data.IORef (IORef, newIORef, modifyIORef', writeIORef, readIORef)
import Linear (V3(..), _x, _y, _z, norm, normalize)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as UV
import Game.CVarT
import Game.EntityStateT
import Client.ClientStateT
import Client.ClientStaticT
import Client.CEntityT
import Client.CDLightT
import Client.CLightStyleT
import Client.CParticleT
import Types
import QuakeState
import CVarVariables
import qualified Constants
import {-# SOURCE #-} qualified Client.CLTEnt as CLTEnt
import {-# SOURCE #-} qualified Client.V as ClientV
import qualified Game.Monsters.MFlash as MFlash
import {-# SOURCE #-} qualified QCommon.Com as Com
import qualified QCommon.MSG as MSG
import qualified Sound.S as S
import qualified Util.Lib as Lib
import qualified Util.Math3D as Math3D
instantParticle :: Float
instantParticle = -10000.0
particleGravity :: Float
particleGravity = 40
colorTable :: UV.Vector Int
colorTable = UV.fromList [ 2 * 8, 13 * 8, 21 * 8, 18 * 8 ]
runDLights :: Quake ()
runDLights = do
time <- use $ globals.gCl.csTime
dLights <- use $ clientGlobals.cgDLights
io $ runDLight dLights (fromIntegral time) 0 Constants.maxDLights
where runDLight :: V.Vector (IORef CDLightT) -> Float -> Int -> Int -> IO ()
runDLight dLights time idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
let dlRef = dLights V.! idx
dl <- readIORef dlRef
if | dl^.cdlRadius == 0 -> runDLight dLights time (idx + 1) maxIdx
| dl^.cdlDie < time -> modifyIORef' dlRef (\v -> v { _cdlRadius = 0 })
| otherwise -> runDLight dLights time (idx + 1) maxIdx
-- TODO: original quake2 code does have something else
-- here (jake2 is missing a part of this function)
runLightStyles :: Quake ()
runLightStyles = do
time <- use $ globals.gCl.csTime
lastOfs <- use $ clientGlobals.cgLastOfs
let ofs = time `div` 100
unless (ofs == lastOfs) $ do
clientGlobals.cgLastOfs .= ofs
lightStyles <- use $ clientGlobals.cgLightStyle
runLightStyle lightStyles ofs 0 (V.length lightStyles)
where runLightStyle :: V.Vector CLightStyleT -> Int -> Int -> Int -> Quake ()
runLightStyle lightStyles ofs idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
let ls = lightStyles V.! idx
if | ls^.clsLength == 0 -> do
clientGlobals.cgLightStyle.ix idx.clsValue .= V3 1 1 1
| ls^.clsLength == 1 -> do
let v = (ls^.clsMap) UV.! 0
clientGlobals.cgLightStyle.ix idx.clsValue .= V3 v v v
| otherwise -> do
let v = (ls^.clsMap) UV.! (ofs `mod` (ls^.clsLength))
clientGlobals.cgLightStyle.ix idx.clsValue .= V3 v v v
runLightStyle lightStyles ofs (idx + 1) maxIdx
clearEffects :: Quake ()
clearEffects = do
clearParticles
clearDLights
clearLightStyles
clearParticles :: Quake ()
clearParticles = do
particles <- use $ clientGlobals.cgParticles
clientGlobals.cgFreeParticles .= Just (particles V.! 0)
clientGlobals.cgActiveParticles .= Nothing
io $ do
let len = V.length particles - 1
setParticleChain particles 0 (len - 1)
modifyIORef' (particles V.! (len - 1)) (\v -> v { _cpNext = Nothing })
where setParticleChain :: V.Vector (IORef CParticleT) -> Int -> Int -> IO ()
setParticleChain particles idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
modifyIORef' (particles V.! idx) (\v -> v { _cpNext = Just $ particles V.! (idx + 1) })
setParticleChain particles (idx + 1) maxIdx
clearDLights :: Quake ()
clearDLights = do
dlights <- io $ V.replicateM Constants.maxDLights (newIORef newCDLightT)
clientGlobals.cgDLights .= dlights
clearLightStyles :: Quake ()
clearLightStyles = do
clientGlobals.cgLightStyle .= V.replicate Constants.maxLightStyles newCLightStyleT
clientGlobals.cgLastOfs .= -1
-- Int is reference to globals.cl.csConfigStrings
setLightStyle :: Int -> Quake ()
setLightStyle csIdx = do
Just str <- preuse $ globals.gCl.csConfigStrings.ix (csIdx + Constants.csLights)
let len = B.length str
when (len >= Constants.maxQPath) $
Com.comError Constants.errDrop $ "svc_lightstyle length=" `B.append` BC.pack (show len) -- IMPROVE?
let d :: Float = fromIntegral (ord 'm' - ord 'a') -- so we do not recalculate it every time
lsMap = UV.unfoldr buildLightStyle (str, d, 0)
zoom (clientGlobals.cgLightStyle.ix csIdx) $ do
clsLength .= len
clsMap .= lsMap -- TODO: make sure we never want to access something beyond length
where buildLightStyle :: (B.ByteString, Float, Int) -> Maybe (Float, (B.ByteString, Float, Int))
buildLightStyle (str, d, idx)
| idx >= B.length str = Nothing
| otherwise =
let a :: Float = fromIntegral $ ord (str `BC.index` idx) - ord 'a'
in Just (a / d, (str, d, idx + 1))
teleporterParticles :: EntityStateT -> Quake ()
teleporterParticles ent = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addTeleporterParticles freeParticles 0 8
where addTeleporterParticles :: Maybe (IORef CParticleT) -> Int -> Int -> Quake ()
addTeleporterParticles Nothing _ _ = return ()
addTeleporterParticles (Just pRef) idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
activeParticles <- use $ clientGlobals.cgActiveParticles
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
o1 <- Lib.rand
o2 <- Lib.rand
o3 <- Lib.rand
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.rand
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpColor = 0xDB
, _cpOrg = V3 ((ent^.esOrigin._x) - 16 + fromIntegral (o1 .&. 31)) ((ent^.esOrigin._y) - 16 + fromIntegral (o2 .&. 31)) ((ent^.esOrigin._z) - 8 + fromIntegral (o3 .&. 7))
, _cpVel = V3 (v1 * 14) (v2 * 14) (80 + fromIntegral (v3 .&. 3))
, _cpAccel = V3 0 0 (- particleGravity)
, _cpAlpha = 1.0
, _cpAlphaVel = -0.5
})
addTeleporterParticles (p^.cpNext) (idx + 1) maxIdx
bigTeleportParticles :: V3 Float -> Quake ()
bigTeleportParticles org = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addBigTeleportParticles freeParticles 0 4096
where addBigTeleportParticles :: Maybe (IORef CParticleT) -> Int -> Int -> Quake ()
addBigTeleportParticles Nothing _ _ = return ()
addBigTeleportParticles (Just pRef) idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
activeParticles <- use $ clientGlobals.cgActiveParticles
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
color <- Lib.rand >>= \r -> return $ fromIntegral (colorTable UV.! (fromIntegral r .&. 3))
angle <- Lib.rand >>= \r -> return $ pi * 2 * fromIntegral (r .&. 1023) / 1023.0
dist <- Lib.rand >>= \r -> return $ fromIntegral (r .&. 31)
o <- Lib.rand
v1 <- Lib.rand
v2 <- Lib.rand
v3 <- Lib.rand
f <- Lib.randomF
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpColor = color
, _cpOrg = V3 ((org^._x) + (cos angle) * dist) ((org^._y) + (sin angle) * dist) ((org^._z) + 8 + fromIntegral (o `mod` 90))
, _cpVel = V3 ((cos angle) * (70 + fromIntegral (v1 .&. 63))) ((sin angle) * (70 + fromIntegral (v2 .&. 63))) ((-100) + fromIntegral (v3 .&. 31))
, _cpAccel = V3 ((-100) * (cos angle)) ((-100) * (sin angle)) (particleGravity * 4)
, _cpAlpha = 1.0
, _cpAlphaVel = (-0.3) / (0.5 + f * 0.3)
})
addBigTeleportParticles (p^.cpNext) (idx + 1) maxIdx
{-
- ============== CL_EntityEvent ==============
-
- An entity has just been parsed that has an event value
-
- the female events are there for backwards compatability
-}
entityEvent :: EntityStateT -> Quake ()
entityEvent entityState = do
if | (entityState^.esEvent) == Constants.evItemRespawn -> do
sfx <- S.registerSound "items/respawn1.wav"
S.startSound Nothing (Ref (entityState^.esNumber)) Constants.chanWeapon sfx 1 Constants.attnIdle 0
itemRespawnParticles (entityState^.esOrigin)
| (entityState^.esEvent) == Constants.evPlayerTeleport -> do
sfx <- S.registerSound "misc/tele1.wav"
S.startSound Nothing (Ref (entityState^.esNumber)) Constants.chanWeapon sfx 1 Constants.attnIdle 0
teleportParticles (entityState^.esOrigin)
| (entityState^.esEvent) == Constants.evFootstep -> do
footstepsValue <- liftM (^.cvValue) clFootstepsCVar
when (footstepsValue /= 0) $ do
r <- Lib.rand
let idx = fromIntegral (r .&. 3)
Just sfx <- preuse $ clTEntGlobals.clteSfxFootsteps.ix idx
S.startSound Nothing (Ref (entityState^.esNumber)) Constants.chanBody sfx 1 Constants.attnNorm 0
| (entityState^.esEvent) == Constants.evFallShort -> do
sfx <- S.registerSound "player/land1.wav"
S.startSound Nothing (Ref (entityState^.esNumber)) Constants.chanAuto sfx 1 Constants.attnNorm 0
| (entityState^.esEvent) == Constants.evFall -> do
sfx <- S.registerSound "*fall2.wav"
S.startSound Nothing (Ref (entityState^.esNumber)) Constants.chanAuto sfx 1 Constants.attnNorm 0
| (entityState^.esEvent) == Constants.evFallFar -> do
sfx <- S.registerSound "*fall1.wav"
S.startSound Nothing (Ref (entityState^.esNumber)) Constants.chanAuto sfx 1 Constants.attnNorm 0
| otherwise ->
return () -- TODO: expected?
addParticles :: Quake ()
addParticles = do
activeParticles <- use $ clientGlobals.cgActiveParticles
cl' <- use $ globals.gCl
addParticle cl' activeParticles Nothing Nothing 0
where addParticle :: ClientStateT -> Maybe (IORef CParticleT) -> Maybe (IORef CParticleT) -> Maybe (IORef CParticleT) -> Float -> Quake ()
addParticle _ Nothing active _ _ =
clientGlobals.cgActiveParticles .= active
addParticle cl' (Just pRef) activeRef tailRef time = do
p <- io $ readIORef pRef
let next = p^.cpNext
(done, time', alpha) <- if (p^.cpAlphaVel) /= instantParticle
then do
let time' = (fromIntegral (cl'^.csTime) - (p^.cpTime)) * 0.001
alpha = (p^.cpAlpha) + time' * (p^.cpAlphaVel)
if alpha <= 0 -- faded out
then do
freeParticles <- use $ clientGlobals.cgFreeParticles
io $ modifyIORef' pRef (\v -> v { _cpNext = freeParticles })
clientGlobals.cgFreeParticles .= Just pRef
return (True, time', alpha)
else
return (False, time', alpha)
else
return (False, time, p^.cpAlpha)
if done
then
addParticle cl' next activeRef tailRef time'
else do
io $ modifyIORef' pRef (\v -> v { _cpNext = Nothing })
(activeRef', tailRef') <- case tailRef of
Nothing ->
return (Just pRef, Just pRef)
Just ref -> do
io $ modifyIORef' ref (\v -> v { _cpNext = Just pRef })
return (activeRef, Just pRef)
let alpha' = if alpha > 1 then 1 else alpha
color = truncate (p^.cpColor) :: Int
time2 = time' * time'
org = (p^.cpOrg) + fmap (* time') (p^.cpVel) + fmap (* time2) (p^.cpAccel)
ClientV.addParticle org color alpha'
when ((p^.cpAlphaVel) == instantParticle) $
io $ modifyIORef' pRef (\v -> v { _cpAlpha = 0
, _cpAlphaVel = 0
})
addParticle cl' next activeRef' tailRef' time'
addDLights :: Quake ()
addDLights = do
-- TODO: currently simplified version... need to update it to reflect
-- jake2 version correctly
dlights <- use $ clientGlobals.cgDLights
addDLight dlights 0 Constants.maxDLights
where addDLight :: V.Vector (IORef CDLightT) -> Int -> Int -> Quake ()
addDLight dlights idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
let dlRef = dlights V.! idx
dl <- io $ readIORef dlRef
if (dl^.cdlRadius) == 0
then
addDLight dlights (idx + 1) maxIdx
else do
ClientV.addLight (dl^.cdlOrigin) (dl^.cdlRadius) (dl^.cdlColor._x) (dl^.cdlColor._y) (dl^.cdlColor._z)
addDLight dlights (idx + 1) maxIdx
addLightStyles :: Quake ()
addLightStyles = do
lightStyles <- use $ clientGlobals.cgLightStyle
addLightStyle lightStyles 0 Constants.maxLightStyles
where addLightStyle :: V.Vector CLightStyleT -> Int -> Int -> Quake ()
addLightStyle lightStyles idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
let ls = lightStyles V.! idx
ClientV.addLightStyle idx (ls^.clsValue._x) (ls^.clsValue._y) (ls^.clsValue._z)
addLightStyle lightStyles (idx + 1) maxIdx
itemRespawnParticles :: V3 Float -> Quake ()
itemRespawnParticles org = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addItemRespawnParticles freeParticles 0 64
where addItemRespawnParticles :: Maybe (IORef CParticleT) -> Int -> Int -> Quake ()
addItemRespawnParticles Nothing _ _ = return ()
addItemRespawnParticles (Just pRef) idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
activeParticles <- use $ clientGlobals.cgActiveParticles
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
color <- Lib.rand >>= \r -> return (r .&. 3)
o1 <- Lib.crandom
o2 <- Lib.crandom
o3 <- Lib.crandom
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
f <- Lib.randomF
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpColor = 0xD4 + fromIntegral color -- green
, _cpOrg = V3 ((org^._x) + o1 * 8) ((org^._y) + o2 * 8) ((org^._z) + o3 * 8)
, _cpVel = V3 (v1 * 8) (v2 * 8) (v3 * 8)
, _cpAccel = V3 0 0 ((-0.2) * particleGravity)
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (1.0 + f * 0.3)
})
addItemRespawnParticles (p^.cpNext) (idx + 1) maxIdx
teleportParticles :: V3 Float -> Quake ()
teleportParticles org = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addTeleportParticles freeParticles (-16) (-16) (-16)
where addTeleportParticles :: Maybe (IORef CParticleT) -> Int -> Int -> Int -> Quake ()
addTeleportParticles Nothing _ _ _ = return ()
addTeleportParticles (Just pRef) i j k
| i > 16 = return ()
| j > 16 = addTeleportParticles (Just pRef) (i + 4) (-16) (-16)
| k > 32 = addTeleportParticles (Just pRef) i (j + 4) (-16)
| otherwise = do
activeParticles <- use $ clientGlobals.cgActiveParticles
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
color <- Lib.rand >>= \r -> return $ fromIntegral (r .&. 7)
av <- Lib.rand
o1 <- Lib.rand
o2 <- Lib.rand
o3 <- Lib.rand
let dir = normalize (fmap fromIntegral (V3 (j * 8) (i * 8) (k * 8)))
vel <- Lib.rand >>= \r -> return (50 + fromIntegral (r .&. 63))
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpColor = 7 + color
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (0.3 + fromIntegral (av .&. 7) * 0.02)
, _cpOrg = V3 ((org^._x) + fromIntegral i + fromIntegral (o1 .&. 3)) ((org^._y) + fromIntegral j + fromIntegral (o2 .&. 3)) ((org^._z) + fromIntegral k + fromIntegral (o3 .&. 3))
, _cpVel = fmap (* vel) dir
, _cpAccel = V3 0 0 (- particleGravity)
})
addTeleportParticles (p^.cpNext) i j (k + 4)
{-
- =============== CL_ParticleEffect ===============
-
- Wall impact puffs
-}
particleEffect :: V3 Float -> V3 Float -> Int -> Int -> Quake ()
particleEffect org dir color count = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addEffects freeParticles 0
where addEffects :: Maybe (IORef CParticleT) -> Int -> Quake ()
addEffects Nothing _ = return ()
addEffects (Just pRef) idx
| idx >= count = return ()
| otherwise = do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
pTime <- use $ globals.gCl.csTime
r <- Lib.rand
let pColor = color + fromIntegral (r .&. 7)
d <- liftM (fromIntegral . (.&. 31)) Lib.rand
o1 <- Lib.rand
v1 <- Lib.crandom
o2 <- Lib.rand
v2 <- Lib.crandom
o3 <- Lib.rand
v3 <- Lib.crandom
let oRand = V3 o1 o2 o3
vRand = V3 v1 v2 v3
pOrg = org + fmap (fromIntegral . (subtract 4) . (.&. 7)) oRand + fmap (* d) dir
pVel = fmap (* 20) vRand
pAccel = V3 0 0 (- particleGravity)
pAlpha = 1.0
r' <- Lib.randomF
let pAlphaVel = -1 / (0.5 + r' * 0.3)
io $ writeIORef pRef CParticleT { _cpNext = activeParticles
, _cpTime = fromIntegral pTime
, _cpColor = fromIntegral pColor
, _cpOrg = pOrg
, _cpVel = pVel
, _cpAccel = pAccel
, _cpAlpha = pAlpha
, _cpAlphaVel = pAlphaVel
}
addEffects (p^.cpNext) (idx + 1)
particleEffect2 :: V3 Float -> V3 Float -> Int -> Int -> Quake ()
particleEffect2 org dir color count = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addParticleEffect2 freeParticles 0
where addParticleEffect2 :: Maybe (IORef CParticleT) -> Int -> Quake ()
addParticleEffect2 Nothing _ = return ()
addParticleEffect2 (Just pRef) idx
| idx >= count = return ()
| otherwise = do
p <- io $ readIORef pRef
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgFreeParticles .= (p^.cpNext)
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
d <- Lib.rand >>= \r -> return (fromIntegral (r .&. 7))
o1 <- Lib.rand
o2 <- Lib.rand
o3 <- Lib.rand
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
f <- Lib.randomF
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpColor = fromIntegral color
, _cpOrg = org + fmap (fromIntegral . (subtract 4) . (.&. 7)) (V3 o1 o2 o3) + fmap (* d) dir
, _cpVel = fmap (* 20) (V3 v1 v2 v3)
, _cpAccel = V3 0 0 (- particleGravity)
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (0.5 + f * 0.3)
})
addParticleEffect2 (p^.cpNext) (idx + 1)
particleEffect3 :: V3 Float -> V3 Float -> Int -> Int -> Quake ()
particleEffect3 org dir color count = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addParticleEffect3 freeParticles 0
where addParticleEffect3 :: Maybe (IORef CParticleT) -> Int -> Quake ()
addParticleEffect3 Nothing _ = return ()
addParticleEffect3 (Just pRef) idx
| idx >= count = return ()
| otherwise = do
p <- io $ readIORef pRef
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgFreeParticles .= (p^.cpNext)
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
d <- Lib.rand >>= \r -> return (fromIntegral (r .&. 7))
o1 <- Lib.rand
o2 <- Lib.rand
o3 <- Lib.rand
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
f <- Lib.randomF
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpColor = fromIntegral color
, _cpOrg = org + fmap (fromIntegral . (subtract 4) . (.&. 7)) (V3 o1 o2 o3) + fmap (* d) dir
, _cpVel = fmap (* 20) (V3 v1 v2 v3)
, _cpAccel = V3 0 0 particleGravity
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (0.5 + f * 0.3)
})
addParticleEffect3 (p^.cpNext) (idx + 1)
explosionParticles :: V3 Float -> Quake ()
explosionParticles org = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addEffects freeParticles 0 256
where addEffects :: Maybe (IORef CParticleT) -> Int -> Int -> Quake ()
addEffects Nothing _ _ = return ()
addEffects (Just pRef) idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
pTime <- use $ globals.gCl.csTime
r <- Lib.rand
let pColor = 0xE0 + fromIntegral (r .&. 7)
o1 <- Lib.rand
v1 <- Lib.rand
o2 <- Lib.rand
v2 <- Lib.rand
o3 <- Lib.rand
v3 <- Lib.rand
let oRand = V3 o1 o2 o3
vRand = V3 v1 v2 v3
pOrg = org + fmap (fromIntegral . (subtract 16) . (`mod` 32)) oRand
pVel = fmap (fromIntegral . (subtract 192) . (`mod` 384)) vRand
pAccel = V3 0 0 (- particleGravity)
pAlpha = 1.0
r' <- Lib.randomF
let pAlphaVel = -0.8 / (0.5 + r' * 0.3)
io $ writeIORef pRef CParticleT { _cpNext = activeParticles
, _cpTime = fromIntegral pTime
, _cpColor = fromIntegral pColor
, _cpOrg = pOrg
, _cpVel = pVel
, _cpAccel = pAccel
, _cpAlpha = pAlpha
, _cpAlphaVel = pAlphaVel
}
addEffects (p^.cpNext) (idx + 1) maxIdx
{-
- =============== CL_BlasterParticles ===============
-
- Wall impact puffs
-}
blasterParticles :: V3 Float -> V3 Float -> Quake ()
blasterParticles org dir = do
drawParticles 0 40
where drawParticles :: Int -> Int -> Quake ()
drawParticles idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
freeParticles <- use $ clientGlobals.cgFreeParticles
case freeParticles of
Nothing ->
return ()
Just pRef -> do
p <- io $ readIORef pRef
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgFreeParticles .= (p^.cpNext)
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles })
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
r <- Lib.rand
d <- liftM (fromIntegral . (.&. 15)) Lib.rand
o1 <- Lib.rand
o2 <- Lib.rand
o3 <- Lib.rand
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
f <- Lib.randomF
io $ modifyIORef' pRef (\v -> v { _cpTime = fromIntegral time
, _cpColor = 0xE0 + fromIntegral (r .&. 7)
, _cpOrg = org + (V3 (fromIntegral $ (o1 .&. 7) - 4) (fromIntegral $ (o2 .&. 7) - 4) (fromIntegral $ (o3 .&. 7) - 4)) + fmap (* d) dir
, _cpVel = fmap (* 30) dir + fmap (* 40) (V3 v1 v2 v3)
, _cpAccel = V3 0 0 (- particleGravity)
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (0.5 + f * 0.3)
})
drawParticles (idx + 1) maxIdx
railTrail :: V3 Float -> V3 Float -> Quake ()
railTrail start end = do
let move = start
vec = end - start
len = norm vec
vec' = normalize vec
(right, up) = Math3D.makeNormalVectors vec'
freeParticles <- use $ clientGlobals.cgFreeParticles
addRailTrail freeParticles vec' right up move 0 len
let vec'' = fmap (* 0.75) vec'
freeParticles' <- use $ clientGlobals.cgFreeParticles
addRailTrail2 freeParticles' vec'' len move
where addRailTrail :: Maybe (IORef CParticleT) -> V3 Float -> V3 Float -> V3 Float -> V3 Float -> Float -> Float -> Quake ()
addRailTrail Nothing _ _ _ _ _ _ = return ()
addRailTrail (Just pRef) vec right up move idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
let d = 0.1 * idx
c = cos d
s = sin d
dir = fmap (* c) right + fmap (* s) up
time <- use $ globals.gCl.csTime
f <- Lib.randomF
r <- Lib.rand
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpAccel = V3 0 0 0
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (1.0 + f * 0.2)
, _cpColor = 0x74 + fromIntegral (r .&. 7)
, _cpOrg = move + fmap (* 3) dir
, _cpVel = fmap (* 6) dir
})
addRailTrail (p^.cpNext) vec right up (move + vec) (idx + 1) maxIdx
addRailTrail2 :: Maybe (IORef CParticleT) -> V3 Float -> Float -> V3 Float -> Quake ()
addRailTrail2 Nothing _ _ _ = return ()
addRailTrail2 (Just pRef) vec len move
| len <= 0 = return ()
| otherwise = do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
f <- Lib.randomF
r <- Lib.rand
o1 <- Lib.crandom
o2 <- Lib.crandom
o3 <- Lib.crandom
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpAccel = V3 0 0 0
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (0.5 + f * 0.2)
, _cpColor = fromIntegral (r .&. 15)
, _cpOrg = move + fmap (* 3) (V3 o1 o2 o3)
, _cpVel = fmap (* 3) (V3 v1 v2 v3)
})
addRailTrail2 (p^.cpNext) vec (len - 0.75) (move + vec)
bfgExplosionParticles :: V3 Float -> Quake ()
bfgExplosionParticles org = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addBfgExplosionParticles freeParticles 0 256
where addBfgExplosionParticles :: Maybe (IORef CParticleT) -> Int -> Int -> Quake ()
addBfgExplosionParticles Nothing _ _ = return ()
addBfgExplosionParticles (Just pRef) idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
r <- Lib.rand
o1 <- Lib.rand
o2 <- Lib.rand
o3 <- Lib.rand
v1 <- Lib.rand
v2 <- Lib.rand
v3 <- Lib.rand
f <- Lib.randomF
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpColor = 0xD0 + fromIntegral (r .&. 7)
, _cpOrg = org + fmap (\v -> fromIntegral ((v `mod` 32) - 16)) (V3 o1 o2 o3)
, _cpVel = fmap (\v -> fromIntegral ((v `mod` 384) - 192)) (V3 v1 v2 v3)
, _cpAccel = V3 0 0 (- particleGravity)
, _cpAlpha = 1.0
, _cpAlphaVel = (-0.8) / (0.5 + f * 0.3)
})
addBfgExplosionParticles (p^.cpNext) (idx + 1) maxIdx
bubbleTrail :: V3 Float -> V3 Float -> Quake ()
bubbleTrail start end = do
let move = start
vec = end - start
len = norm vec
vec' = fmap (* 32) (normalize vec)
freeParticles <- use $ clientGlobals.cgFreeParticles
addBubbleTrail freeParticles vec' move 0 len
where addBubbleTrail :: Maybe (IORef CParticleT) -> V3 Float -> V3 Float -> Float -> Float -> Quake ()
addBubbleTrail Nothing _ _ _ _ = return ()
addBubbleTrail (Just pRef) vec move idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
r <- Lib.rand
f <- Lib.randomF
o1 <- Lib.crandom
o2 <- Lib.crandom
o3 <- Lib.crandom
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpAccel = V3 0 0 0
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (1.0 + f * 0.2)
, _cpColor = 4 + fromIntegral (r .&. 7)
, _cpOrg = move + fmap (* 2) (V3 o1 o2 o3)
, _cpVel = (fmap (* 5) (V3 v1 v2 v3)) & _z +~ 6
})
addBubbleTrail (p^.cpNext) vec (move + vec) (idx + 32) maxIdx
parseMuzzleFlash :: Quake ()
parseMuzzleFlash = do
i <- MSG.readShort (globals.gNetMessage)
when (i < 1 || i >= Constants.maxEdicts) $
Com.comError Constants.errDrop "CL_ParseMuzzleFlash: bad entity"
w <- MSG.readByte (globals.gNetMessage)
let silenced = w .&. Constants.mzSilenced
weapon = w .&. (complement Constants.mzSilenced)
Just pl <- preuse $ globals.gClEntities.ix i
dlRef <- allocDLight i
r <- Lib.rand
time <- use $ globals.gCl.csTime
let (Just fv, Just rv, _) = Math3D.angleVectors (pl^.ceCurrent.esAngles) True True False
origin = (pl^.ceCurrent.esOrigin)
+ fmap (* 18) fv
+ fmap (* 16) rv
radius = if silenced /= 0
then 100 + fromIntegral (r .&. 31)
else 200 + fromIntegral (r .&. 31)
volume = if silenced /= 0
then 0.2
else 1
io $ modifyIORef' dlRef (\v -> v { _cdlOrigin = origin
, _cdlRadius = radius
, _cdlMinLight = 32
, _cdlDie = fromIntegral time
})
if | weapon == Constants.mzBlaster -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "weapons/blastf1a.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzBlueHyperblaster -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0 0 1})
soundIdx <- S.registerSound "weapons/hyprbf1a.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzHyperblaster -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0})
soundIdx <- S.registerSound "weapons/hyprbf1a.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzMachinegun -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
r <- Lib.rand
let soundName = "weapons/machgf" `B.append` BC.pack (show ((r `mod` 5) + 1)) `B.append` "b.wav" -- IMPROVE ?
soundIdx <- S.registerSound soundName
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzShotgun -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "weapons/shotgf1b.wav"
soundIdx' <- S.registerSound "weapons/shotgr1b.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
S.startSound Nothing (Ref i) Constants.chanAuto soundIdx' volume Constants.attnNorm 0.1
| weapon == Constants.mzSShotgun -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "weapons/sshotf1b.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzChaingun1 -> do
r <- Lib.rand
io $ modifyIORef' dlRef (\v -> v { _cdlRadius = 200 + fromIntegral (r .&. 31)
, _cdlColor = V3 1 0.25 0
})
r' <- Lib.rand
let soundName = "weapons/machgf" `B.append` BC.pack (show ((r' `mod` 5) + 1)) `B.append` "b.wav" -- IMPROVE?
soundIdx <- S.registerSound soundName
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzChaingun2 -> do
r <- Lib.rand
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlRadius = 225 + fromIntegral (r .&. 31)
, _cdlColor = V3 1 0.5 0
, _cdlDie = fromIntegral time + 0.1 -- long delay
})
r' <- Lib.rand
let soundName = "weapons/machgf" `B.append` BC.pack (show ((r' `mod` 5) + 1)) `B.append` "b.wav" -- IMPROVE?
soundIdx <- S.registerSound soundName
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
r'' <- Lib.rand
let soundName' = "weapons/machgf" `B.append` BC.pack (show ((r'' `mod` 5) + 1)) `B.append` "b.wav" -- IMPROVE?
soundIdx' <- S.registerSound soundName'
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx' volume Constants.attnNorm 0.05
| weapon == Constants.mzChaingun3 -> do
r <- Lib.rand
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlRadius = 250 + fromIntegral (r .&. 31)
, _cdlColor = V3 1 1 0
, _cdlDie = fromIntegral time + 0.1 -- long delay
})
r' <- Lib.rand
let soundName = "weapons/machgf" `B.append` BC.pack (show ((r' `mod` 5) + 1)) `B.append` "b.wav" -- IMPROVE?
soundIdx <- S.registerSound soundName
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
r'' <- Lib.rand
let soundName' = "weapons/machgf" `B.append` BC.pack (show ((r'' `mod` 5) + 1)) `B.append` "b.wav" -- IMPROVE?
soundIdx' <- S.registerSound soundName'
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx' volume Constants.attnNorm 0.033
r''' <- Lib.rand
let soundName'' = "weapons/machgf" `B.append` BC.pack (show ((r''' `mod` 5) + 1)) `B.append` "b.wav" -- IMPROVE?
soundIdx'' <- S.registerSound soundName''
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx'' volume Constants.attnNorm 0.066
| weapon == Constants.mzRailgun -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0.5 0.5 1 })
soundIdx <- S.registerSound "weapons/railgf1a.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzRocket -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0.5 0.2 })
soundIdx <- S.registerSound "weapons/rocklf1a.wav"
soundIdx' <- S.registerSound "weapons/rocklr1b.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
S.startSound Nothing (Ref i) Constants.chanAuto soundIdx' volume Constants.attnNorm 0.1
| weapon == Constants.mzGrenade -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0.5 0 })
soundIdx <- S.registerSound "weapons/grenlf1a.wav"
soundIdx' <- S.registerSound "weapons/grenlr1b.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
S.startSound Nothing (Ref i) Constants.chanAuto soundIdx' volume Constants.attnNorm 0.1
| weapon == Constants.mzBFG -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0 1 0 })
soundIdx <- S.registerSound "weapons/bfg__f1y.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzLogin -> do
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0 1 0
, _cdlDie = fromIntegral time + 1.0
})
soundIdx <- S.registerSound "weapons/grenlf1a.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
logoutEffect (pl^.ceCurrent.esOrigin) weapon
| weapon == Constants.mzLogout -> do
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0 0
, _cdlDie = fromIntegral time + 1.0
})
soundIdx <- S.registerSound "weapons/grenlf1a.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
logoutEffect (pl^.ceCurrent.esOrigin) weapon
| weapon == Constants.mzRespawn -> do
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0
, _cdlDie = fromIntegral time + 1.0
})
soundIdx <- S.registerSound "weapons/grenlf1a.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
logoutEffect (pl^.ceCurrent.esOrigin) weapon
| weapon == Constants.mzPhalanx -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0.5 0.5 })
soundIdx <- S.registerSound "weapons/plasshot.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzIonRipper -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0.5 0.5 })
soundIdx <- S.registerSound "weapons/rippfire.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzEtfRifle -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0.9 0.7 0 })
soundIdx <- S.registerSound "weapons/nail1.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzShotgun2 -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "weapons/shotg2.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzHeatBeam -> do
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0
, _cdlDie = fromIntegral time + 100
})
| weapon == Constants.mzBlaster2 -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0 1 0 })
-- FIXME: different sound for blaster2 ??
soundIdx <- S.registerSound "weapons/blastf1a.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzTracker -> do
-- negative flashes handled the same in gl/soft until CL_AddDLights
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 (-1) (-1) (-1) })
soundIdx <- S.registerSound "weapons/disint2.wav"
S.startSound Nothing (Ref i) Constants.chanWeapon soundIdx volume Constants.attnNorm 0
| weapon == Constants.mzNuke1 -> do
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0 0
, _cdlDie = fromIntegral time + 100
})
| weapon == Constants.mzNuke2 -> do
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0
, _cdlDie = fromIntegral time + 100
})
| weapon == Constants.mzNuke4 -> do
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0 0 1
, _cdlDie = fromIntegral time + 100
})
| weapon == Constants.mzNuke8 -> do
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0 1 1
, _cdlDie = fromIntegral time + 100
})
| otherwise -> return () -- TODO: some error should be thrown?
parseMuzzleFlash2 :: Quake ()
parseMuzzleFlash2 = do
ent <- MSG.readShort (globals.gNetMessage)
when (ent < 1 || ent >= Constants.maxEdicts) $
Com.comError Constants.errDrop "CL_ParseMuzzleFlash2: bad entity"
flashNumber <- MSG.readByte (globals.gNetMessage)
-- locate the origin
Just cent <- preuse $ globals.gClEntities.ix ent
let (Just forward, Just right, _) = Math3D.angleVectors (cent^.ceCurrent.esAngles) True True False
a = (cent^.ceCurrent.esOrigin._x) + (forward^._x) * ((MFlash.monsterFlashOffset V.! flashNumber)^._x) + (right^._x) * ((MFlash.monsterFlashOffset V.! flashNumber)^._y)
b = (cent^.ceCurrent.esOrigin._y) + (forward^._y) * ((MFlash.monsterFlashOffset V.! flashNumber)^._x) + (right^._y) * ((MFlash.monsterFlashOffset V.! flashNumber)^._y)
c = (cent^.ceCurrent.esOrigin._z) + (forward^._z) * ((MFlash.monsterFlashOffset V.! flashNumber)^._x) + (right^._z) * ((MFlash.monsterFlashOffset V.! flashNumber)^._y) + ((MFlash.monsterFlashOffset V.! flashNumber)^._z)
origin = V3 a b c
dlRef <- allocDLight ent
r <- Lib.rand
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlOrigin = origin
, _cdlRadius = 200 + fromIntegral (r .&. 31)
, _cdlMinLight = 32
, _cdlDie = fromIntegral time
})
if | flashNumber `elem` [ Constants.mz2InfantryMachinegun1
, Constants.mz2InfantryMachinegun2
, Constants.mz2InfantryMachinegun3
, Constants.mz2InfantryMachinegun4
, Constants.mz2InfantryMachinegun5
, Constants.mz2InfantryMachinegun6
, Constants.mz2InfantryMachinegun7
, Constants.mz2InfantryMachinegun8
, Constants.mz2InfantryMachinegun9
, Constants.mz2InfantryMachinegun10
, Constants.mz2InfantryMachinegun11
, Constants.mz2InfantryMachinegun12
, Constants.mz2InfantryMachinegun13
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
v3o <- use $ globals.gVec3Origin
particleEffect origin v3o 0 40
CLTEnt.smokeAndFlash origin
soundIdx <- S.registerSound "infantry/infatck1.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2SoldierMachinegun1
, Constants.mz2SoldierMachinegun2
, Constants.mz2SoldierMachinegun3
, Constants.mz2SoldierMachinegun4
, Constants.mz2SoldierMachinegun5
, Constants.mz2SoldierMachinegun6
, Constants.mz2SoldierMachinegun7
, Constants.mz2SoldierMachinegun8
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
v3o <- use $ globals.gVec3Origin
particleEffect origin v3o 0 40
CLTEnt.smokeAndFlash origin
soundIdx <- S.registerSound "soldier/solatck3.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2GunnerMachinegun1
, Constants.mz2GunnerMachinegun2
, Constants.mz2GunnerMachinegun3
, Constants.mz2GunnerMachinegun4
, Constants.mz2GunnerMachinegun5
, Constants.mz2GunnerMachinegun6
, Constants.mz2GunnerMachinegun7
, Constants.mz2GunnerMachinegun8
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
v3o <- use $ globals.gVec3Origin
particleEffect origin v3o 0 40
CLTEnt.smokeAndFlash origin
soundIdx <- S.registerSound "gunner/gunatck2.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2ActorMachinegun1
, Constants.mz2SupertankMachinegun1
, Constants.mz2SupertankMachinegun2
, Constants.mz2SupertankMachinegun3
, Constants.mz2SupertankMachinegun4
, Constants.mz2SupertankMachinegun5
, Constants.mz2SupertankMachinegun6
, Constants.mz2TurretMachinegun
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
v3o <- use $ globals.gVec3Origin
particleEffect origin v3o 0 40
CLTEnt.smokeAndFlash origin
soundIdx <- S.registerSound "infantry/infatck1.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2Boss2MachinegunL1
, Constants.mz2Boss2MachinegunL2
, Constants.mz2Boss2MachinegunL3
, Constants.mz2Boss2MachinegunL4
, Constants.mz2Boss2MachinegunL5
, Constants.mz2CarrierMachinegunL1
, Constants.mz2CarrierMachinegunL2
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
v3o <- use $ globals.gVec3Origin
particleEffect origin v3o 0 40
CLTEnt.smokeAndFlash origin
soundIdx <- S.registerSound "infantry/infatck1.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNone 0
| flashNumber `elem` [ Constants.mz2SoldierBlaster1
, Constants.mz2SoldierBlaster2
, Constants.mz2SoldierBlaster3
, Constants.mz2SoldierBlaster4
, Constants.mz2SoldierBlaster5
, Constants.mz2SoldierBlaster6
, Constants.mz2SoldierBlaster7
, Constants.mz2SoldierBlaster8
, Constants.mz2TurretBlaster
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "soldier/solatck2.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2FlyerBlaster1
, Constants.mz2FlyerBlaster2
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "flyer/flyatck3.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber == Constants.mz2MedicBlaster1 -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "medic/medatck1.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber == Constants.mz2HoverBlaster1 -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "hover/hovatck1.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber == Constants.mz2FloatBlaster1 -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "floater/fltatck1.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2SoldierShotgun1
, Constants.mz2SoldierShotgun2
, Constants.mz2SoldierShotgun3
, Constants.mz2SoldierShotgun4
, Constants.mz2SoldierShotgun5
, Constants.mz2SoldierShotgun6
, Constants.mz2SoldierShotgun7
, Constants.mz2SoldierShotgun8
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
CLTEnt.smokeAndFlash origin
soundIdx <- S.registerSound "soldier/solatck1.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2TankBlaster1
, Constants.mz2TankBlaster2
, Constants.mz2TankBlaster3
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "tank/tnkatck3.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2TankMachinegun1
, Constants.mz2TankMachinegun2
, Constants.mz2TankMachinegun3
, Constants.mz2TankMachinegun4
, Constants.mz2TankMachinegun5
, Constants.mz2TankMachinegun6
, Constants.mz2TankMachinegun7
, Constants.mz2TankMachinegun8
, Constants.mz2TankMachinegun9
, Constants.mz2TankMachinegun10
, Constants.mz2TankMachinegun11
, Constants.mz2TankMachinegun12
, Constants.mz2TankMachinegun13
, Constants.mz2TankMachinegun14
, Constants.mz2TankMachinegun15
, Constants.mz2TankMachinegun16
, Constants.mz2TankMachinegun17
, Constants.mz2TankMachinegun18
, Constants.mz2TankMachinegun19
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
v3o <- use $ globals.gVec3Origin
particleEffect origin v3o 0 40
CLTEnt.smokeAndFlash origin
r <- Lib.rand
let soundName = "tank/tnkatk2" `B.append` B.singleton (97 + fromIntegral (r `mod` 5)) `B.append` ".wav"
soundIdx <- S.registerSound soundName
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2ChickRocket1
, Constants.mz2TurretRocket
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0.5 0.2 })
soundIdx <- S.registerSound "chick/chkatck2.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2TankRocket1
, Constants.mz2TankRocket2
, Constants.mz2TankRocket3
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0.5 0.2 })
soundIdx <- S.registerSound "tank/tnkatck1.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2SupertankRocket1
, Constants.mz2SupertankRocket2
, Constants.mz2SupertankRocket3
, Constants.mz2Boss2Rocket1
, Constants.mz2Boss2Rocket2
, Constants.mz2Boss2Rocket3
, Constants.mz2Boss2Rocket4
, Constants.mz2CarrierRocket1
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0.5 0.2 })
soundIdx <- S.registerSound "tank/rocket.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2GunnerGrenade1
, Constants.mz2GunnerGrenade2
, Constants.mz2GunnerGrenade3
, Constants.mz2GunnerGrenade4
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 0.5 0 })
soundIdx <- S.registerSound "gunner/gunatck3.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2GladiatorRailgun1
, Constants.mz2CarrierRailgun
, Constants.mz2WidowRail
] ->
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0.5 0.5 1.0 })
| flashNumber == Constants.mz2MakronBfg ->
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0.5 1 0.5 })
| flashNumber `elem` [ Constants.mz2MakronBlaster1
, Constants.mz2MakronBlaster2
, Constants.mz2MakronBlaster3
, Constants.mz2MakronBlaster4
, Constants.mz2MakronBlaster5
, Constants.mz2MakronBlaster6
, Constants.mz2MakronBlaster7
, Constants.mz2MakronBlaster8
, Constants.mz2MakronBlaster9
, Constants.mz2MakronBlaster10
, Constants.mz2MakronBlaster11
, Constants.mz2MakronBlaster12
, Constants.mz2MakronBlaster13
, Constants.mz2MakronBlaster14
, Constants.mz2MakronBlaster15
, Constants.mz2MakronBlaster16
, Constants.mz2MakronBlaster17
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
soundIdx <- S.registerSound "makron/blaster.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2JorgMachinegunL1
, Constants.mz2JorgMachinegunL2
, Constants.mz2JorgMachinegunL3
, Constants.mz2JorgMachinegunL4
, Constants.mz2JorgMachinegunL5
, Constants.mz2JorgMachinegunL6
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
v3o <- use $ globals.gVec3Origin
particleEffect origin v3o 0 40
CLTEnt.smokeAndFlash origin
soundIdx <- S.registerSound "boss3/xfire.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2JorgMachinegunR1
, Constants.mz2JorgMachinegunR2
, Constants.mz2JorgMachinegunR3
, Constants.mz2JorgMachinegunR4
, Constants.mz2JorgMachinegunR5
, Constants.mz2JorgMachinegunR6
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
v3o <- use $ globals.gVec3Origin
particleEffect origin v3o 0 40
CLTEnt.smokeAndFlash origin
| flashNumber == Constants.mz2JorgBfg1 ->
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0.5 1 0.5 })
| flashNumber `elem` [ Constants.mz2Boss2MachinegunR1
, Constants.mz2Boss2MachinegunR2
, Constants.mz2Boss2MachinegunR3
, Constants.mz2Boss2MachinegunR4
, Constants.mz2Boss2MachinegunR5
, Constants.mz2CarrierMachinegunR1
, Constants.mz2CarrierMachinegunR2
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0 })
v3o <- use $ globals.gVec3Origin
particleEffect origin v3o 0 40
CLTEnt.smokeAndFlash origin
| flashNumber `elem` [ Constants.mz2StalkerBlaster
, Constants.mz2DaedalusBlaster
, Constants.mz2MedicBlaster2
, Constants.mz2WidowBlaster
, Constants.mz2WidowBlasterSweep1
, Constants.mz2WidowBlasterSweep2
, Constants.mz2WidowBlasterSweep3
, Constants.mz2WidowBlasterSweep4
, Constants.mz2WidowBlasterSweep5
, Constants.mz2WidowBlasterSweep6
, Constants.mz2WidowBlasterSweep7
, Constants.mz2WidowBlasterSweep8
, Constants.mz2WidowBlasterSweep9
, Constants.mz2WidowBlaster100
, Constants.mz2WidowBlaster90
, Constants.mz2WidowBlaster80
, Constants.mz2WidowBlaster70
, Constants.mz2WidowBlaster60
, Constants.mz2WidowBlaster50
, Constants.mz2WidowBlaster40
, Constants.mz2WidowBlaster30
, Constants.mz2WidowBlaster20
, Constants.mz2WidowBlaster10
, Constants.mz2WidowBlaster0
, Constants.mz2WidowBlaster10L
, Constants.mz2WidowBlaster20L
, Constants.mz2WidowBlaster30L
, Constants.mz2WidowBlaster40L
, Constants.mz2WidowBlaster50L
, Constants.mz2WidowBlaster60L
, Constants.mz2WidowBlaster70L
, Constants.mz2WidowRun1
, Constants.mz2WidowRun2
, Constants.mz2WidowRun3
, Constants.mz2WidowRun4
, Constants.mz2WidowRun5
, Constants.mz2WidowRun6
, Constants.mz2WidowRun7
, Constants.mz2WidowRun8
] -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 0 1 0 })
soundIdx <- S.registerSound "tank/tnkatck3.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber == Constants.mz2WidowDisruptor -> do
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 (-1) (-1) (-1) })
soundIdx <- S.registerSound "weapons/disint2.wav"
S.startSound Nothing (Ref ent) Constants.chanWeapon soundIdx 1 Constants.attnNorm 0
| flashNumber `elem` [ Constants.mz2WidowPlasmaBeam
, Constants.mz2Widow2Beamer1
, Constants.mz2Widow2Beamer2
, Constants.mz2Widow2Beamer3
, Constants.mz2Widow2Beamer4
, Constants.mz2Widow2Beamer5
, Constants.mz2Widow2BeamSweep1
, Constants.mz2Widow2BeamSweep2
, Constants.mz2Widow2BeamSweep3
, Constants.mz2Widow2BeamSweep4
, Constants.mz2Widow2BeamSweep5
, Constants.mz2Widow2BeamSweep6
, Constants.mz2Widow2BeamSweep7
, Constants.mz2Widow2BeamSweep8
, Constants.mz2Widow2BeamSweep9
, Constants.mz2Widow2BeamSweep10
, Constants.mz2Widow2BeamSweep11
] -> do
r <- Lib.rand
time <- use $ globals.gCl.csTime
io $ modifyIORef' dlRef (\v -> v { _cdlColor = V3 1 1 0
, _cdlRadius = 300 + fromIntegral (r .&. 100)
, _cdlDie = fromIntegral time + 200
})
| otherwise -> do
-- TODO: throw some kind of error ??
return ()
allocDLight :: Int -> Quake (IORef CDLightT)
allocDLight key = do
dLights <- use $ clientGlobals.cgDLights
-- first look for an exact key match
exactMatch <- io $ findExactMatch dLights 0 Constants.maxDLights
case exactMatch of
Just em -> return em
Nothing -> do
-- then look for anything else
time <- use $ globals.gCl.csTime
anyMatch <- io $ findAnyDLight (fromIntegral time) dLights 0 Constants.maxDLights
case anyMatch of
Just am -> return am
Nothing -> do
io $ writeIORef (dLights V.! 0) newCDLightT { _cdlKey = key }
return (dLights V.! 0)
where findExactMatch :: V.Vector (IORef CDLightT) -> Int -> Int -> IO (Maybe (IORef CDLightT))
findExactMatch dLights idx maxIdx
| idx >= maxIdx = return Nothing
| otherwise = do
let dlRef = dLights V.! idx
dl <- readIORef dlRef
if (dl^.cdlKey) == key
then do
writeIORef dlRef newCDLightT { _cdlKey = key }
return (Just dlRef)
else
findExactMatch dLights (idx + 1) maxIdx
findAnyDLight :: Float -> V.Vector (IORef CDLightT) -> Int -> Int -> IO (Maybe (IORef CDLightT))
findAnyDLight time dLights idx maxIdx
| idx >= maxIdx = return Nothing
| otherwise = do
let dlRef = dLights V.! idx
dl <- readIORef dlRef
if (dl^.cdlDie) < time
then do
writeIORef dlRef newCDLightT { _cdlKey = key }
return (Just dlRef)
else
findAnyDLight time dLights (idx + 1) maxIdx
logoutEffect :: V3 Float -> Int -> Quake ()
logoutEffect org pType = do
freeParticles <- use $ clientGlobals.cgFreeParticles
addLogoutEffect freeParticles 0 500
where addLogoutEffect :: Maybe (IORef CParticleT) -> Int -> Int -> Quake ()
addLogoutEffect Nothing _ _ = return ()
addLogoutEffect (Just pRef) idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
r <- Lib.rand
f <- Lib.randomF
o1 <- Lib.randomF
o2 <- Lib.randomF
o3 <- Lib.randomF
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
let r' = fromIntegral (r .&. 7)
color = if | pType == Constants.mzLogin -> 0xD0 + r' -- green
| pType == Constants.mzLogout -> 0x40 + r' -- red
| otherwise -> 0xE0 + r' -- yellow
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpColor = color
, _cpOrg = V3 ((org^._x) - 16 + o1 * 32) ((org^._y) - 16 + o2 * 32) ((org^._z) - 24 + o3 * 56)
, _cpVel = fmap (* 20) (V3 v1 v2 v3)
, _cpAccel = V3 0 0 (- particleGravity)
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (1.0 + f * 0.3)
})
addLogoutEffect (p^.cpNext) (idx + 1) maxIdx
rocketTrail :: V3 Float -> V3 Float -> Int -> Quake ()
rocketTrail start end oldIdx = do
-- smoke
diminishingTrail start end oldIdx Constants.efRocket
-- fire
let move = start
vec = end - start
len = norm vec
vec' = normalize vec
freeParticles <- use $ clientGlobals.cgFreeParticles
addRocketTrail freeParticles vec' move len
where addRocketTrail :: Maybe (IORef CParticleT) -> V3 Float -> V3 Float -> Float -> Quake ()
addRocketTrail Nothing _ _ _ = return ()
addRocketTrail (Just pRef) vec move len
| len <= 0 = return ()
| otherwise = do
r <- Lib.rand
if r .&. 7 == 0
then do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
r <- Lib.rand
f <- Lib.randomF
o1 <- Lib.crandom
o2 <- Lib.crandom
o3 <- Lib.crandom
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpAccel = V3 0 0 (- particleGravity)
, _cpTime = fromIntegral time
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (1.0 + f * 0.2)
, _cpColor = 0xDC + fromIntegral (r .&. 3)
, _cpOrg = move + fmap (* 5) (V3 o1 o2 o3)
, _cpVel = fmap (* 20) (V3 v1 v2 v3)
})
addRocketTrail (p^.cpNext) vec (move + vec) (len - 1)
else
addRocketTrail (Just pRef) vec (move + vec) (len - 1)
blasterTrail :: V3 Float -> V3 Float -> Quake ()
blasterTrail start end = do
let move = start
vec = end - start
len = norm vec
vec' = fmap (* 5) (normalize vec)
trailParticles move vec' len
where trailParticles :: V3 Float -> V3 Float -> Float -> Quake ()
trailParticles move vec len
| len <= 0 = return ()
| otherwise = do
freeParticles <- use $ clientGlobals.cgFreeParticles
case freeParticles of
Nothing ->
return ()
Just pRef -> do
activeParticles <- use $ clientGlobals.cgActiveParticles
time <- use $ globals.gCl.csTime
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles })
clientGlobals.cgActiveParticles .= Just pRef
f <- Lib.randomF
o1 <- Lib.crandom
o2 <- Lib.crandom
o3 <- Lib.crandom
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
io $ modifyIORef' pRef (\v -> v { _cpTime = fromIntegral time
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (0.3 + f * 0.2)
, _cpColor = 0xE0
, _cpOrg = move + V3 o1 o2 o3
, _cpVel = V3 (5 * v1) (5 * v2) (5 * v3)
, _cpAccel = V3 0 0 0
})
trailParticles (move + vec) vec (len - 5)
-- TODO: oldIdx should be CEntityReference
diminishingTrail :: V3 Float -> V3 Float -> Int -> Int -> Quake ()
diminishingTrail start end oldIdx flags= do
let move = start
vec = end - start
len = norm vec
vec' = fmap (* 0.5) (normalize vec)
Just old <- preuse $ globals.gClEntities.ix oldIdx
let (orgScale, velScale) = if | (old^.ceTrailCount) > 900 -> (4, 15)
| (old^.ceTrailCount) > 800 -> (2, 10)
| otherwise -> (1, 5)
freeParticles <- use $ clientGlobals.cgFreeParticles
addDiminishingTrail freeParticles vec' move orgScale velScale len
where addDiminishingTrail :: Maybe (IORef CParticleT) -> V3 Float -> V3 Float -> Float -> Float -> Float -> Quake ()
addDiminishingTrail Nothing _ _ _ _ _ = return ()
addDiminishingTrail (Just pRef) vec move orgScale velScale len
| len <= 0 = return ()
| otherwise = do
r <- Lib.rand
Just old <- preuse $ globals.gClEntities.ix oldIdx
-- drop less particles as it flies
if fromIntegral (r .&. 1023) < (old^.ceTrailCount)
then do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
f <- Lib.randomF
color' <- Lib.rand
o1 <- Lib.crandom
o2 <- Lib.crandom
o3 <- Lib.crandom
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
if | flags .&. Constants.efGib /= 0 -> do
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpAccel = V3 0 0 0
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (1.0 + f * 0.4)
, _cpColor = 0xE8 + fromIntegral (color' .&. 7)
, _cpOrg = move + fmap (* orgScale) (V3 o1 o2 o3)
, _cpVel = (fmap (* velScale) (V3 v1 v2 v3)) & _z -~ particleGravity
})
| flags .&. Constants.efGreenGib /= 0 -> do
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpAccel = V3 0 0 0
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (1.0 + f * 0.4)
, _cpColor = 0xDB + fromIntegral (color' .&. 7)
, _cpOrg = move + fmap (* orgScale) (V3 o1 o2 o3)
, _cpVel = (fmap (* velScale) (V3 v1 v2 v3)) & _z -~ particleGravity
})
| otherwise -> do
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpAccel = V3 0 0 20
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (1.0 + f * 0.2)
, _cpColor = 4 + fromIntegral (color' .&. 7)
, _cpOrg = move + fmap (* orgScale) (V3 o1 o2 o3)
, _cpVel = fmap (* velScale) (V3 v1 v2 v3)
})
globals.gClEntities.ix oldIdx.ceTrailCount %= (\v -> if v - 5 < 100 then 100 else v - 5)
addDiminishingTrail (p^.cpNext) vec (move + vec) orgScale velScale (len - 0.5)
else do
globals.gClEntities.ix oldIdx.ceTrailCount %= (\v -> if v - 5 < 100 then 100 else v - 5)
addDiminishingTrail (Just pRef) vec (move + vec) orgScale velScale (len - 0.5)
-- TODO: entIdx should be CEntityReference
flyEffect :: Int -> V3 Float -> Quake ()
flyEffect entIdx origin = do
Just ent <- preuse $ globals.gClEntities.ix entIdx
time <- use $ globals.gCl.csTime
let (startTime, flyStopTime) = if (ent^.ceFlyStopTime) < time
then (time, time + 60000)
else ((ent^.ceFlyStopTime) - 60000, ent^.ceFlyStopTime)
n = time - startTime
count = if n < 20000
then (n * 162) `div` 20000
else let n' = flyStopTime - time
in if n' < 20000
then (n' * 162) `div` 20000
else 162
globals.gClEntities.ix entIdx.ceFlyStopTime .= flyStopTime
flyParticles origin count
flyParticles :: V3 Float -> Int -> Quake ()
flyParticles origin count = do
let count' = if count > Constants.numVertexNormals
then Constants.numVertexNormals
else count
Just avelocity <- preuse $ clientGlobals.cgAVelocities.ix 0
when ((avelocity^._x) == 0) $ do
aVelocities <- V.replicateM Constants.numVertexNormals genAVelocity
clientGlobals.cgAVelocities .= aVelocities
time <- use $ globals.gCl.csTime
let lTime = fromIntegral time / 1000
freeParticles <- use $ clientGlobals.cgFreeParticles
addFlyParticles freeParticles lTime 0 count'
where genAVelocity :: Quake (V3 Float)
genAVelocity = do
a1 <- Lib.rand
a2 <- Lib.rand
a3 <- Lib.rand
return (V3 (fromIntegral (a1 .&. 255) * 0.01) (fromIntegral (a2 .&. 255) * 0.01) (fromIntegral (a3 .&. 255) * 0.01))
addFlyParticles :: Maybe (IORef CParticleT) -> Float -> Int -> Int -> Quake ()
addFlyParticles Nothing _ _ _ = return ()
addFlyParticles (Just pRef) lTime idx maxIdx
| idx >= maxIdx = return ()
| otherwise = do
io (putStrLn "CLFX.flyParticles") >> undefined -- TODO
flagTrail :: V3 Float -> V3 Float -> Float -> Quake ()
flagTrail start end color = do
let move = start
vec = end - start
len = norm vec
vec' = fmap (* 5) (normalize vec)
freeParticles <- use $ clientGlobals.cgFreeParticles
addFlagTrail freeParticles vec' move len
where addFlagTrail :: Maybe (IORef CParticleT) -> V3 Float -> V3 Float -> Float -> Quake ()
addFlagTrail Nothing _ _ _ = return ()
addFlagTrail (Just pRef) vec move len
| len <= 0 = return ()
| otherwise = do
p <- io $ readIORef pRef
clientGlobals.cgFreeParticles .= (p^.cpNext)
activeParticles <- use $ clientGlobals.cgActiveParticles
clientGlobals.cgActiveParticles .= Just pRef
time <- use $ globals.gCl.csTime
f <- Lib.randomF
o1 <- Lib.crandom
o2 <- Lib.crandom
o3 <- Lib.crandom
v1 <- Lib.crandom
v2 <- Lib.crandom
v3 <- Lib.crandom
io $ modifyIORef' pRef (\v -> v { _cpNext = activeParticles
, _cpTime = fromIntegral time
, _cpAccel = V3 0 0 0
, _cpAlpha = 1.0
, _cpAlphaVel = (-1.0) / (0.8 + f * 0.2)
, _cpColor = color
, _cpOrg = move + fmap (* 16) (V3 o1 o2 o3)
, _cpVel = fmap (* 5) (V3 v1 v2 v3)
})
addFlagTrail (p^.cpNext) vec (move + vec) (len - 5)
bfgParticles :: EntityT -> Quake ()
bfgParticles _ = do
io (putStrLn "CLFX.bfgParticles") >> undefined -- TODO
trapParticles :: EntityT -> Quake ()
trapParticles _ = do
io (putStrLn "CLFX.trapParticles") >> undefined -- TODO
ionRipperTrail :: V3 Float -> V3 Float -> Quake ()
ionRipperTrail _ _ = do
io (putStrLn "CLFX.ionRipperTrail") >> undefined -- TODO
| ksaveljev/hake-2 | src/Client/CLFX.hs | bsd-3-clause | 86,658 | 0 | 29 | 36,032 | 23,631 | 11,877 | 11,754 | -1 | -1 |
module Data.Povray.Colour where
import Data.Povray.Base
import Data.Povray.Types
data Colour = RGB Vect
| RGBF Vect4
| RGBT Vect4
| RGBFT Vect5
-- TODO : better polymorphism?
cmul :: Double -> Colour -> Colour
cmul x (RGB v) = RGB (fmap (*x) v)
cmul x (RGBF v) = RGBF (fmap (*x) v)
cmul x (RGBT v) = RGBT (fmap (*x) v)
cmul x (RGBFT v) = RGBFT (fmap (*x) v)
instance Povray Colour where
toPov (RGB v) = "colour rgb " `mappend` toPov v
toPov (RGBF v) = "colour rgbf " `mappend` toPov v
toPov (RGBT v) = "colour rgbt " `mappend` toPov v
toPov (RGBFT v) = "colour rgbft " `mappend` toPov v
-- |
-- >>> toPov (RGB (V 1 0.4 0.3))
-- colour rgb <1, 0.4, 0.3>
| lesguillemets/hspov_proto | src/Data/Povray/Colour.hs | bsd-3-clause | 714 | 0 | 8 | 190 | 284 | 154 | 130 | 17 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Records support in Accelerate expressions.
module Frames.Accelerate.Rec
( LiftedElem
, inCoreA
, rlens
, rget
, rput
, rp
) where
import Control.Monad.Primitive
import Data.Array.Accelerate
import Data.Array.Accelerate.Array.Sugar
import Data.Array.Accelerate.Smart
import Data.Array.Accelerate.Tuple
import Data.Array.Accelerate.Type
import Data.Typeable
import Data.Vinyl ( HList )
import Data.Vinyl.TypeLevel ( RIndex )
import Frames hiding ( rlens, rget, rput )
import Frames.InCore
import Frames.Accelerate.HList
import GHC.TypeLits
import Language.Haskell.TH.Quote
import Pipes ( Producer )
import qualified Pipes.Prelude as P
type instance EltRepr (s :-> a) = ((), EltRepr' a)
type instance EltRepr' (s :-> a) = EltRepr' a
instance ( Elt a
, Typeable s
, KnownSymbol s
) => Elt (s :-> a) where
eltType (Col a) = PairTuple UnitTuple (eltType' a)
fromElt (Col a) = ((), fromElt' a)
toElt ((), a) = Col $ toElt' a
eltType' (Col a) = eltType' a
fromElt' (Col a) = fromElt' a
toElt' a = Col $ toElt' a
instance IsTuple (s :-> a) where
type TupleRepr (s :-> a) = ((), a)
fromTuple (Col a) = ((), a)
toTuple ((), a) = Col a
instance ( Elt (Plain a)
, Lift Exp a
, Typeable s
, KnownSymbol s
) => Lift Exp (s :-> a) where
type Plain (s :-> a) = s :-> Plain a
lift (Col a) = Exp $ Tuple (NilTup `SnocTup` lift a)
instance ( Elt a
, KnownSymbol s
, Typeable s
) => Unlift Exp (s :-> Exp a) where
unlift e = let a = Exp $ ZeroTupIdx `Prj` e
in Col a
type LiftedElem r rs = ( RElem (Exp r) (LiftRow rs) (RIndex (Exp r) (LiftRow rs))
, HList rs ~ Plain (HList (LiftRow rs))
, Unlift Exp (HList (LiftRow rs)) )
-- | Lens to a record field
rlens :: ( Functor f
, Elt a
, LiftedElem (s :-> a) rs
, Typeable s
, KnownSymbol s
)
=> sing (Exp (s :-> a))
-> (Exp a -> f (Exp a))
-> Exp (HList rs)
-> f (Exp (HList rs))
rlens k = hlens k . clens
where
clens f = fmap (lift . Col) . f . getCol . unlift
-- | Getter for a record field.
rget :: ( forall f. Functor f => (a -> f a) -> Exp (HList rs) -> f (Exp (HList rs)) )
-> Exp (HList rs) -> a
rget = hget
-- | Setter for a record field.
rput :: ( forall f. Functor f => (a -> f a) -> Exp (HList rs) -> f (Exp (HList rs)) )
-> a -> Exp (HList rs) -> Exp (HList rs)
rput = hput
rp :: QuasiQuoter
rp = hp
-- | Stream a finite sequence of rows into an accelerate Array for further manipulation.
-- TODO: достать из VectorMs вектора и сделать из них массив для Acccelerate
inCoreA :: forall m rs . ( Applicative m, PrimMonad m, RecVec rs )
=> Producer (Record rs) m () -> m (Vector (Record rs))
inCoreA xs = do
mvs <- allocRec (Proxy :: Proxy rs)
let feed (!i, !sz, !mvs') row
| i == sz = growRec (Proxy::Proxy rs) mvs'
>>= flip feed row . (i, sz*2,)
| otherwise = do
writeRec (Proxy::Proxy rs) i mvs' row
return (i+1, sz, mvs')
fin (n,_,mvs') = do
vs <- freezeRec (Proxy::Proxy rs) n mvs'
-- return . (n,) $ produceRec (Proxy::Proxy rs) vs
return $ error "inCoreA: not implemented"
P.foldM feed (return (0,initialCapacity,mvs)) fin xs
| schernichkin/frames-accelerate | src/Frames/Accelerate/Rec.hs | bsd-3-clause | 3,987 | 0 | 15 | 1,229 | 1,379 | 734 | 645 | 99 | 1 |
module Data.ByteString.SuperBuffer.Pure
( SuperBuffer, withBuffer, appendBuffer, appendBufferT, size )
where
import Control.Concurrent.MVar
import Control.Exception
import Data.Bits
import Data.IORef
import Data.Word
import Foreign.Marshal.Alloc
import Foreign.Marshal.Utils
import Foreign.Ptr
import qualified Data.ByteString as BS
import qualified Data.ByteString.Unsafe as BS
-- | The buffer data structure.
data SuperBuffer
= SuperBuffer
{ sb_buffer :: {-# UNPACK #-}!(IORef (Ptr Word8))
, sb_currentSize :: {-# UNPACK #-}!(IORef Int)
, sb_maxSize :: {-# UNPACK #-}!(IORef Int)
, sb_lock :: {-# UNPACK #-}!(MVar ())
}
-- | Allocate a new buffer with a given initial size. The perfect starting point
-- depends on the expected total size and the average size for a single chunk
-- written with 'appendBuffer'. You can always start with 1024 and optimize from
-- there with benchmarks. Please note that the SuperBuffer will no longer be
-- valid after this function terminates, so do NOT pass it to some other
-- thread without waiting for it to finish in the action.
withBuffer :: Int -> (SuperBuffer -> IO ()) -> IO BS.ByteString
withBuffer sz action =
do ptr <- mallocBytes sz
ptrRef <- newIORef ptr
go ptrRef `onException` freeOnException ptrRef
where
freeOnException ref =
do ptr <- readIORef ref
free ptr
go ptrRef =
do sizeRef <- newIORef 0
maxSizeRef <- newIORef sz
lock <- newEmptyMVar
let sb = SuperBuffer ptrRef sizeRef maxSizeRef lock
action sb
readBuffer sb
{-# INLINE withBuffer #-}
-- | Write a bytestring to the buffer and grow the buffer if needed. Note that only
-- one thread at any given time may call this function. Use 'appendBufferT' when
-- accessing 'SuperBuffer' from multiple threads.
appendBuffer :: SuperBuffer -> BS.ByteString -> IO ()
appendBuffer sb bs
| BS.null bs = pure ()
| otherwise =
BS.unsafeUseAsCStringLen bs $ \(cstr, len) ->
do currentSize <- readIORef (sb_currentSize sb)
maxSize <- readIORef (sb_maxSize sb)
let nextSize = currentSize + len
writePtr <-
if nextSize > maxSize
then do let maxSize' = nextSize + unsafeShiftR nextSize 1
writeIORef (sb_maxSize sb) maxSize'
buff <- readIORef (sb_buffer sb)
buff' <- reallocBytes buff maxSize'
writeIORef (sb_buffer sb) buff'
pure buff'
else readIORef (sb_buffer sb)
let copyTarget = writePtr `plusPtr` currentSize
copyBytes copyTarget cstr len
writeIORef (sb_currentSize sb) (currentSize + len)
{-# INLINE appendBuffer #-}
-- | Write a bytestring to the buffer and grow the buffer if needed. This function
-- can be used accross different threads, but is slower than 'appendBuffer'.
appendBufferT :: SuperBuffer -> BS.ByteString -> IO ()
appendBufferT sb bs =
bracket_ (putMVar (sb_lock sb) ()) (takeMVar (sb_lock sb)) $
appendBuffer sb bs
{-# INLINE appendBufferT #-}
-- | Read the final buffer contents. This must only be called once
readBuffer :: SuperBuffer -> IO BS.ByteString
readBuffer sb =
do (buff, currentSize, maxSize) <-
(,,)
<$> readIORef (sb_buffer sb)
<*> readIORef (sb_currentSize sb)
<*> readIORef (sb_maxSize sb)
finalPtr <-
if currentSize < maxSize
then reallocBytes buff currentSize
else pure buff
BS.unsafePackCStringFinalizer finalPtr currentSize (free finalPtr)
{-# INLINE readBuffer #-}
-- | Get current (filled) size of the buffer
size :: SuperBuffer -> IO Int
size sb = readIORef $ sb_currentSize sb
{-# INLINE size #-}
| agrafix/superbuffer | src/Data/ByteString/SuperBuffer/Pure.hs | bsd-3-clause | 3,910 | 0 | 18 | 1,084 | 842 | 426 | 416 | 76 | 2 |
{-# LANGUAGE MultiWayIf #-}
module Lang.LF.Internal.Print where
import Control.Monad.Identity
import qualified Data.Set as Set
import qualified Data.Map.Strict as Map
import Text.PrettyPrint.ANSI.Leijen hiding ((<$>))
import Lang.LF.Internal.Model
import Lang.LF.Internal.Weak
displayLF :: (LFModel f m, ?hyps::Hyps f γ, ?soln :: LFSoln f)
=> f γ s -> m String
displayLF x = show <$> ppLF TopPrec WeakRefl x
prettySignature :: forall f m. LFModel f m => m Doc
prettySignature = withCurrentSolution (go =<< getSignature)
where go [] = return empty
go ((a ::. k) : xs) = do
let adoc = pretty a
kdoc <- inEmptyCtx $ ppLF TopPrec WeakRefl (runIdentity k)
xsdoc <- go xs
let x = hang 4 (group (adoc <+> text "::" <> line <> kdoc))
return (x <$$> xsdoc)
go ((c :. t) : xs) = do
let cdoc = pretty c
tdoc <- inEmptyCtx $ ppLF TopPrec WeakRefl (runIdentity t)
xsdoc <- go xs
let x = hang 4 (group (cdoc <+> text ":" <> line <> tdoc))
return (x <$$> xsdoc)
prettyRecord :: Doc -> Doc -> Doc -> [(Doc, Doc)] -> Doc
prettyRecord begin end sep flds =
let flds' = go flds in
align $ group (begin <+> flds' <> line <> end)
where go [] = softline
go [x] = go1 x
go (x:xs) =
let x' = go1 x in
let xs' = go xs in
x' <> linebreak <> text "," <+> xs'
go1 (nm,x) =
hang 2 (group (text "$" <> nm <+> sep <> softline <> x))
prettyValue :: LFModel f m
=> (a -> Doc)
-> LFVal f m a
-> Doc
prettyValue ppBase v =
case v of
ValLam _ ->
text "<<fun>>"
ValRecord xs -> do
let xs' = [ (pretty f, prettyValue ppBase x)
| (f,x) <- Map.toList xs
]
prettyRecord lbrace rbrace (text ":=") xs'
ValRow xs ->
encloseSep (text "⟨") (text "⟩") comma $ map pretty $ Set.toList xs
ValBase x ->
ppBase x
prettyLF
:: (LFModel f m, ?hyps::Hyps f γ', ?soln :: LFSoln f)
=> Prec
-> Weakening γ γ'
-> f γ s
-> m Doc
prettyLF prec w x =
case unfoldLF x of
Weak w' x -> ppLF prec (weakCompose w w') x
Type -> return $ text "Type"
KPi nm a k
| freeVar B k -> do
let nm' = freshName nm
adoc <- group <$> ppLF BinderPrec w a
kdoc <- extendCtx nm QPi (weaken w a) $ ppLF TopPrec (WeakSkip w) k
return $ (if prec /= TopPrec then parens else id) $
(text "Π" <> text nm' <+> colon <+> adoc <+> comma <> softline <> kdoc)
| otherwise -> do
adoc <- ppLF BinderPrec w a
kdoc <- extendCtx nm QPi (weaken w a) $ ppLF TopPrec (WeakSkip w) k
return $ (if prec /= TopPrec then parens else id) $
align (adoc <+> text "⇒" <> line <> kdoc)
AType x -> group . (linebreak <>) . hang 2 <$> (ppLF prec w x)
TyPi nm a1 a2
| freeVar B a2 -> do
let nm' = freshName nm
a1doc <- group <$> ppLF BinderPrec w a1
a2doc <- extendCtx nm QPi (weaken w a1) $ ppLF TopPrec (WeakSkip w) a2
return $ (if prec /= TopPrec then parens else id) $
(text "Π" <> text nm' <+> colon <+> a1doc <> comma <> softline <> a2doc)
| otherwise -> do
a1doc <- group <$> ppLF BinderPrec w a1
a2doc <- extendCtx nm QPi (weaken w a1) $ ppLF TopPrec (WeakSkip w) a2
return $! (if prec /= TopPrec then parens else id) $
align (a1doc <+> text "⇒" <> line <> a2doc)
TyRow (PosFieldSet fldSet) -> return $
text "row⊆" <>
encloseSep lbrace rbrace comma (map pretty $ Set.toList fldSet)
TyRow (NegFieldSet fldSet)
| Set.null fldSet -> return $ text "row"
| otherwise -> return $
text "row#" <>
encloseSep lbrace rbrace comma (map pretty $ Set.toList fldSet)
TyRecord row -> ppLF RecordPrec w row
TyConst x -> return $ pretty x
TyApp p a -> do
pdoc <- ppLF AppLPrec w p
adoc <- ppLF AppRPrec w a
return $! group $ (if prec == AppRPrec then parens else id) $
(pdoc <> line <> adoc)
ATerm x
| prec == TopPrec ->
group . (linebreak <>) . hang 2 <$> (ppLF prec w x)
| otherwise -> hang 2 <$> ppLF prec w x
Lam nm a m -> do
let nm' = freshName nm
adoc <- ppLF BinderPrec w a
mdoc <- extendCtx nm QLam (weaken w a) $ ppLF TopPrec (WeakSkip w) m
return $! (if prec /= TopPrec then parens else id) $
(text "λ" <> text nm' <+> colon <+> adoc <> comma <> softline <> mdoc)
Record flds -> do
flds' <- sequence [ ppLF TopPrec w x >>= \x' -> return (pretty f, x')
| (f,x) <- Map.toList flds
]
return $ prettyRecord lbrace rbrace (text ":=") flds'
RecordModify r delSet insMap -> do
headDoc <- if Set.null delSet then do
ppLF TopPrec w r
else do
rdoc <- ppLF AppLPrec w r
let delSet' = encloseSep lbrace rbrace comma $
map pretty $ Set.elems delSet
return $ rdoc <> text "\\" <> delSet'
insList <- sequence [ ppLF TopPrec w x >>= \x' -> return (pretty f, x')
| (f,x) <- Map.toList insMap
]
if null insList then
return $ lbrace <+> headDoc <+> rbrace
else do
let tailDoc = prettyRecord (text "|") rbrace (text "↦") insList
return $ group $ align $ lbrace <+> headDoc <> softline <> tailDoc
Row flds -> do
let (begin,end) = if prec == RecordPrec then
(lbrace,rbrace)
else
(text "⟨", text "⟩")
flds' <- sequence [ ppLF TopPrec w x >>= \x' -> return (pretty f, x')
| (f,x) <- Map.toList flds
]
return $ prettyRecord begin end colon flds'
RowModify r delSet insMap -> do
let (begin,end) = if prec == RecordPrec then
(lbrace,rbrace)
else
(text "⟨", text "⟩")
headDoc <- if Set.null delSet then do
ppLF TopPrec w r
else do
rdoc <- ppLF AppLPrec w r
let delSet' = encloseSep lbrace rbrace comma $
map pretty $ Set.elems delSet
return $ rdoc <> text "\\" <> delSet'
insList <- sequence [ ppLF TopPrec w x >>= \x' -> return (pretty f, x')
| (f,x) <- Map.toList insMap
]
if null insList then
return $ begin <+> headDoc <+> end
else do
let tailDoc = prettyRecord (text "|") end (text "↦") insList
return $ group $ align $ begin <+> headDoc <> softline <> tailDoc
Const x -> return $ pretty x
App m1 m2 -> do
m1doc <- ppLF AppLPrec w m1
m2doc <- ppLF AppRPrec w m2
return $! group $ (if prec == AppRPrec then parens else id) $
(m1doc <> line <> m2doc)
Var ->
let (nm,_,_) = lookupCtx (weakenVar w B)
in return $ text nm
UVar u -> return (text "#" <> pretty u)
Project x fld -> do
xdoc <- ppLF AppLPrec w x
return $ xdoc <> text "->$" <> pretty fld
Unify r1 r2 -> do
r1doc <- ppLF TopPrec w r1
r2doc <- ppLF TopPrec w r2
return $ group (r1doc <+> text "=" <> line <> r2doc)
And [] -> return $ text "⊤"
And cs -> do
cs' <- mapM (ppLF TopPrec w) cs
return $ align $ cat $ punctuate (text " ∧ ") cs'
Forall nm a c -> do
let nm' = freshName nm
adoc <- ppLF BinderPrec w a
cdoc <- extendCtx nm QForall (weaken w a) $ ppLF TopPrec (WeakSkip w) c
return $ (if prec /= TopPrec then parens else id) $
text "∀" <> text nm' <+> colon <+> adoc <> comma <> hang 2 (softline <> cdoc)
Exists nm a c -> do
let nm' = freshName nm
adoc <- ppLF BinderPrec w a
cdoc <- extendCtx nm QExists (weaken w a) $ ppLF TopPrec (WeakSkip w) c
return $ (if prec /= TopPrec then parens else id) $
text "∃" <> text nm' <+> colon <+> adoc <> comma <> hang 2 (softline <> cdoc)
Sigma nm a g -> do
let nm' = freshName nm
adoc <- ppLF BinderPrec w a
gdoc <- extendCtx nm QSigma (weaken w a) $ ppLF TopPrec (WeakSkip w) g
return $ (if prec /= TopPrec then parens else id) $
text "Σ" <> text nm' <+> colon <+> adoc <> comma <> hang 2 (softline <> gdoc)
Fail -> do
return $ text "⊥"
Goal m c -> do
mdoc <- ppLF TopPrec w m
cdoc <- ppLF TopPrec w c
return $ group $
text "{" <+> mdoc <+> text "|" <> nest 2 (softline <> cdoc <+> text "}")
| robdockins/canonical-lf | src/Lang/LF/Internal/Print.hs | bsd-3-clause | 9,013 | 0 | 19 | 3,363 | 3,599 | 1,726 | 1,873 | -1 | -1 |
{-# LANGUAGE CPP #-}
-- | Utility methods to automatically generate and keep track of a mapping
-- between node labels and 'Node's.
module Data.Graph.Inductive.NodeMap(
-- * Functional Construction
NodeMap,
-- ** Map Construction
new, fromGraph, mkNode, mkNode_, mkNodes, mkNodes_, mkEdge, mkEdges,
-- ** Graph Construction
-- | These functions mirror the construction and destruction functions in
-- 'Data.Graph.Inductive.Graph', but use the given 'NodeMap' to look up
-- the appropriate 'Node's. Note that the 'insMapNode' family of functions
-- will create new nodes as needed, but the other functions will not.
insMapNode, insMapNode_, insMapEdge, delMapNode, delMapEdge, insMapNodes,
insMapNodes_, insMapEdges, delMapNodes, delMapEdges, mkMapGraph,
-- * Monadic Construction
NodeMapM,
-- | The following mirror the functional construction functions, but handle passing
-- 'NodeMap's and 'Graph's behind the scenes.
-- ** Map Construction
run, run_, mkNodeM, mkNodesM, mkEdgeM, mkEdgesM,
-- ** Graph Construction
insMapNodeM, insMapEdgeM, delMapNodeM, delMapEdgeM, insMapNodesM,
insMapEdgesM, delMapNodesM, delMapEdgesM
) where
import Control.Monad.Trans.State
import Data.Graph.Inductive.Graph
import Prelude hiding (map)
import qualified Prelude as P (map)
import Data.Map (Map)
import qualified Data.Map as M
#if MIN_VERSION_containers (0,4,2)
import Control.DeepSeq (NFData (..))
#endif
data NodeMap a =
NodeMap { map :: Map a Node,
key :: Int }
deriving (Eq, Show, Read)
#if MIN_VERSION_containers (0,4,2)
instance (NFData a) => NFData (NodeMap a) where
rnf (NodeMap mp k) = rnf mp `seq` rnf k
#endif
-- | Create a new, empty mapping.
new :: NodeMap a
new = NodeMap { map = M.empty, key = 0 }
-- LNode = (Node, a)
-- | Generate a mapping containing the nodes in the given graph.
fromGraph :: (Ord a, Graph g) => g a b -> NodeMap a
fromGraph g =
let ns = labNodes g
aux (n, a) (m', k') = (M.insert a n m', max n k')
(m, k) = foldr aux (M.empty, 0) ns
in NodeMap { map = m, key = k+1 }
-- | Generate a labelled node from the given label. Will return the same node
-- for the same label.
mkNode :: (Ord a) => NodeMap a -> a -> (LNode a, NodeMap a)
mkNode m@(NodeMap mp k) a =
case M.lookup a mp of
Just i -> ((i, a), m)
Nothing ->
let m' = NodeMap { map = M.insert a k mp, key = k+1 }
in ((k, a), m')
-- | Generate a labelled node and throw away the modified 'NodeMap'.
mkNode_ :: (Ord a) => NodeMap a -> a -> LNode a
mkNode_ m a = fst $ mkNode m a
-- | Generate a 'LEdge' from the node labels.
mkEdge :: (Ord a) => NodeMap a -> (a, a, b) -> Maybe (LEdge b)
mkEdge (NodeMap m _) (a1, a2, b) =
do n1 <- M.lookup a1 m
n2 <- M.lookup a2 m
return (n1, n2, b)
-- | Generates a list of 'LEdge's.
mkEdges :: (Ord a) => NodeMap a -> [(a, a, b)] -> Maybe [LEdge b]
mkEdges m = mapM (mkEdge m)
-- | Construct a list of nodes.
mkNodes :: (Ord a) => NodeMap a -> [a] -> ([LNode a], NodeMap a)
mkNodes = map' mkNode
map' :: (a -> b -> (c, a)) -> a -> [b] -> ([c], a)
map' _ a [] = ([], a)
map' f a (b:bs) =
let (c, a') = f a b
(cs, a'') = map' f a' bs
in (c:cs, a'')
-- | Construct a list of nodes and throw away the modified 'NodeMap'.
mkNodes_ :: (Ord a) => NodeMap a -> [a] -> [LNode a]
mkNodes_ m as = fst $ mkNodes m as
insMapNode :: (Ord a, DynGraph g) => NodeMap a -> a -> g a b -> (g a b, NodeMap a, LNode a)
insMapNode m a g =
let (n, m') = mkNode m a
in (insNode n g, m', n)
insMapNode_ :: (Ord a, DynGraph g) => NodeMap a -> a -> g a b -> g a b
insMapNode_ m a g =
let (g', _, _) = insMapNode m a g
in g'
insMapEdge :: (Ord a, DynGraph g) => NodeMap a -> (a, a, b) -> g a b -> g a b
insMapEdge m e g =
let (Just e') = mkEdge m e
in insEdge e' g
delMapNode :: (Ord a, DynGraph g) => NodeMap a -> a -> g a b -> g a b
delMapNode m a g =
let (n, _) = mkNode_ m a
in delNode n g
delMapEdge :: (Ord a, DynGraph g) => NodeMap a -> (a, a) -> g a b -> g a b
delMapEdge m (n1, n2) g =
let Just (n1', n2', _) = mkEdge m (n1, n2, ())
in delEdge (n1', n2') g
insMapNodes :: (Ord a, DynGraph g) => NodeMap a -> [a] -> g a b -> (g a b, NodeMap a, [LNode a])
insMapNodes m as g =
let (ns, m') = mkNodes m as
in (insNodes ns g, m', ns)
insMapNodes_ :: (Ord a, DynGraph g) => NodeMap a -> [a] -> g a b -> g a b
insMapNodes_ m as g =
let (g', _, _) = insMapNodes m as g
in g'
insMapEdges :: (Ord a, DynGraph g) => NodeMap a -> [(a, a, b)] -> g a b -> g a b
insMapEdges m es g =
let Just es' = mkEdges m es
in insEdges es' g
delMapNodes :: (Ord a, DynGraph g) => NodeMap a -> [a] -> g a b -> g a b
delMapNodes m as g =
let ns = P.map fst $ mkNodes_ m as
in delNodes ns g
delMapEdges :: (Ord a, DynGraph g) => NodeMap a -> [(a, a)] -> g a b -> g a b
delMapEdges m ns g =
let Just ns' = mkEdges m $ P.map (\(a, b) -> (a, b, ())) ns
ns'' = P.map (\(a, b, _) -> (a, b)) ns'
in delEdges ns'' g
mkMapGraph :: (Ord a, DynGraph g) => [a] -> [(a, a, b)] -> (g a b, NodeMap a)
mkMapGraph ns es =
let (ns', m') = mkNodes new ns
Just es' = mkEdges m' es
in (mkGraph ns' es', m')
-- | Graph construction monad; handles passing both the 'NodeMap' and the
-- 'Graph'.
type NodeMapM a b g r = State (NodeMap a, g a b) r
-- | Run a construction; return the value of the computation, the modified
-- 'NodeMap', and the modified 'Graph'.
run :: (DynGraph g, Ord a) => g a b -> NodeMapM a b g r -> (r, (NodeMap a, g a b))
run g m = runState m (fromGraph g, g)
-- | Run a construction and only return the 'Graph'.
run_ :: (DynGraph g, Ord a) => g a b -> NodeMapM a b g r -> g a b
run_ g m = snd . snd $ run g m
{- not used
liftN1 :: (Ord a, DynGraph g) => (NodeMap a -> (c, NodeMap a)) -> NodeMapM a b g c
liftN1 f =
do (m, g) <- get
let (r, m') = f m
put (m', g)
return r
liftN1' :: (Ord a, DynGraph g) => (NodeMap a -> c) -> NodeMapM a b g c
liftN1' f =
do (m, g) <- get
return $ f m
-}
liftN2 :: (NodeMap a -> c -> (d, NodeMap a)) -> c -> NodeMapM a b g d
liftN2 f c =
do (m, g) <- get
let (r, m') = f m c
put (m', g)
return r
liftN2' :: (NodeMap a -> c -> d) -> c -> NodeMapM a b g d
liftN2' f c =
do (m, _) <- get
return $ f m c
{- not used
liftN3 :: (Ord a, DynGraph g) => (NodeMap a -> c -> d -> (e, NodeMap a)) -> c -> d -> NodeMapM a b g e
liftN3 f c d =
do (m, g) <- get
let (r, m') = f m c d
put (m', g)
return r
liftN3' :: (Ord a, DynGraph g) => (NodeMap a -> c -> d -> e) -> c -> d -> NodeMapM a b g e
liftN3' f c d =
do (m, g) <- get
return $ f m c d
-}
liftM1 :: (NodeMap a -> c -> g a b -> g a b) -> c -> NodeMapM a b g ()
liftM1 f c =
do (m, g) <- get
let g' = f m c g
put (m, g')
liftM1' :: (NodeMap a -> c -> g a b -> (g a b, NodeMap a, d)) -> c -> NodeMapM a b g d
liftM1' f c =
do (m, g) <- get
let (g', m', r) = f m c g
put (m', g')
return r
-- | Monadic node construction.
mkNodeM :: (Ord a) => a -> NodeMapM a b g (LNode a)
mkNodeM = liftN2 mkNode
mkNodesM :: (Ord a) => [a] -> NodeMapM a b g [LNode a]
mkNodesM = liftN2 mkNodes
mkEdgeM :: (Ord a) => (a, a, b) -> NodeMapM a b g (Maybe (LEdge b))
mkEdgeM = liftN2' mkEdge
mkEdgesM :: (Ord a) => [(a, a, b)] -> NodeMapM a b g (Maybe [LEdge b])
mkEdgesM = liftN2' mkEdges
insMapNodeM :: (Ord a, DynGraph g) => a -> NodeMapM a b g (LNode a)
insMapNodeM = liftM1' insMapNode
insMapEdgeM :: (Ord a, DynGraph g) => (a, a, b) -> NodeMapM a b g ()
insMapEdgeM = liftM1 insMapEdge
delMapNodeM :: (Ord a, DynGraph g) => a -> NodeMapM a b g ()
delMapNodeM = liftM1 delMapNode
delMapEdgeM :: (Ord a, DynGraph g) => (a, a) -> NodeMapM a b g ()
delMapEdgeM = liftM1 delMapEdge
insMapNodesM :: (Ord a, DynGraph g) => [a] -> NodeMapM a b g [LNode a]
insMapNodesM = liftM1' insMapNodes
insMapEdgesM :: (Ord a, DynGraph g) => [(a, a, b)] -> NodeMapM a b g ()
insMapEdgesM = liftM1 insMapEdges
delMapNodesM :: (Ord a, DynGraph g) => [a] -> NodeMapM a b g ()
delMapNodesM = liftM1 delMapNodes
delMapEdgesM :: (Ord a, DynGraph g) => [(a, a)] -> NodeMapM a b g ()
delMapEdgesM = liftM1 delMapEdges
| antalsz/hs-to-coq | examples/graph/graph/Data/Graph/Inductive/NodeMap.hs | mit | 8,417 | 0 | 15 | 2,308 | 3,437 | 1,818 | 1,619 | 150 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
module Main where
import Control.Applicative ((<$>), (<*>))
import Control.Monad
import Shelly hiding (FilePath)
import Data.Char
import Data.List (nub)
import Data.Text (Text)
import Data.Monoid
import qualified Data.Text as T
import Data.Yaml
default (T.Text)
data RegressionTest = RegressionTest
{ name :: Text
, cabal :: Bool
, flags :: [Text]
, aptPPA :: [Text]
, aptPackages :: [Text]
, cabalBuildTools :: [Text]
, specialSetup :: [Text]
, extraPath :: [Text]
, extraSOPath :: [Text]
, extraIncludeDirs :: [Text]
, extraLibDirs :: [Text]
, onTravis :: Bool
, runTests :: Bool
} deriving (Eq, Show)
instance FromJSON RegressionTest where
parseJSON (Object v) = RegressionTest <$> v .: "name"
<*> v .:? "cabal" .!= True
<*> v .:? "flags" .!= []
<*> v .:? "apt-ppa" .!= []
<*> v .:? "apt-packages" .!= []
<*> v .:? "cabal-build-tools" .!= []
<*> v .:? "special-setup" .!= []
<*> v .:? "extra-path" .!= []
<*> v .:? "extra-so-path" .!= []
<*> v .:? "extra-include-dirs" .!= []
<*> v .:? "extra-lib-dirs" .!= []
<*> v .:? "on-travis" .!= True
<*> v .:? "run-tests" .!= False
parseJSON _ = mzero
data Code = TestOK
| DepsFailed
| ConfFailed
| BuildFailed
| TestsFailed
deriving Eq
instance Show Code where
show TestOK = "OK"
show DepsFailed = "dependencies"
show ConfFailed = "configuration"
show BuildFailed = "build"
show TestsFailed = "tests"
makeCode :: (Int, Int, Int, Int) -> Code
makeCode (0, 0, 0, 0) = TestOK
makeCode (0, 0, 0, _) = TestsFailed
makeCode (0, 0, _, _) = BuildFailed
makeCode (0, _, _, _) = ConfFailed
makeCode (_, _, _, _) = DepsFailed
readTests :: FilePath -> IO [RegressionTest]
readTests fp = maybe [] id <$> decodeFile fp
checkApt :: Sh ()
checkApt = do
apt <- which "apt-get"
case apt of
Nothing -> errorExit "Can't find apt-get. Are you sure this is Ubuntu?"
_ -> return ()
main :: IO ()
main = shelly $ do
travis <- maybe False (const True) <$> get_env "TRAVIS"
enabled <- maybe False (const True) <$> get_env "C2HS_REGRESSION_SUITE"
when (not (travis || enabled)) $ do
echo "REGRESSION SUITE IS DISABLED"
exit 0
when travis checkApt
let travisCheck t = case travis of
False -> True
True -> onTravis t
tests <- liftIO $ filter travisCheck <$>
readTests "tests/regression-suite.yaml"
let ppas = nub $ concatMap aptPPA tests
pkgs = nub $ concatMap aptPackages tests
buildTools = nub $ concatMap cabalBuildTools tests
specials = concatMap specialSetup tests
extraPaths = concatMap extraPath tests
extraSOPaths = concatMap extraSOPath tests
when (not travis) $
echo "ASSUMING THAT ALL NECESSARY LIBRARIES ALREADY INSTALLED!\n"
home <- fromText <$> get_env_text "HOME"
appendToPath $ home </> ".cabal/bin"
when travis $ do
when (not (null ppas)) $ do
echo "SETTING UP APT PPAS\n"
forM_ ppas $ \ppa -> run_ "sudo" $ ["apt-add-repository", "ppa:" <> ppa]
run_ "sudo" $ ["apt-get", "update"]
echo "\n"
when (not (null pkgs)) $ do
echo "INSTALLING APT PACKAGES\n"
run_ "sudo" $ ["apt-get", "install", "-y"] ++ pkgs
echo "\n"
when (not (null specials)) $ do
echo "SPECIAL INSTALL STEPS\n"
forM_ specials $ \s -> let (c:as) = escapedWords s in
run_ (fromText c) as
echo "\n"
when (not (null extraPaths)) $ do
echo "ADDING PATHS\n"
forM_ extraPaths $ \p -> do
echo p
appendToPath $ fromText p
echo "\n"
when (not (null extraSOPaths)) $ do
echo "ADDING SHARED LIBRARY PATHS\n"
forM_ extraSOPaths $ \p -> do
echo p
appendToSOPath p
echo "\n"
codes <- forM (filter cabal tests) $ \t -> do
let n = name t
tst = runTests t
infs = concatMap (\f -> ["-f", f]) $ flags t
extralibs = map (\f -> "--extra-lib-dirs=" <> f) $
extraLibDirs t
extraincs = map (\f -> "--extra-include-dirs=" <> f) $
extraIncludeDirs t
mefs <- get_env $ "C2HS_REGRESSION_FLAGS_" <> n
let fs = if tst then ["--enable-tests"] else [] ++ case mefs of
Nothing -> infs
Just efs -> infs ++ concatMap (\f -> ["-f", f]) (T.splitOn "," efs)
echo $ "\nREGRESSION TEST: " <> n <> "\n"
errExit False $ do
unpack <- run "cabal" ["unpack", n]
let d = T.drop (T.length "Unpacking to ") $ T.init $ last $ T.lines unpack
chdir (fromText d) $ do
run_ "cabal" $ ["sandbox", "init"]
run_ "cabal" $ ["install", "--only-dep", "-v"] ++ fs
dep <- lastExitCode
run_ "cabal" $ ["configure"] ++ extraincs ++ extralibs ++ fs
conf <- lastExitCode
run_ "cabal" $ ["build"]
build <- lastExitCode
test <-
if tst then do
run_ "cabal" ["test"]
lastExitCode
else return 0
return $ makeCode (dep, conf, build, test)
if all (== TestOK) codes
then exit 0
else do
echo "\n\nSOME TESTS FAILED\n"
let failed = filter (\(c, _) -> c /= TestOK) $ zip codes (filter cabal tests)
forM_ failed $ \(c, t) -> echo $ "FAILED: " <> name t <>
" (" <> T.pack (show c) <> ")"
exit 1
escapedWords :: Text -> [Text]
escapedWords = map (T.pack . reverse) . escWords False "" . T.unpack
where escWords :: Bool -> String -> String -> [String]
-- End of string: just return the accumulator if there is one.
escWords _ acc "" = case acc of
"" -> []
_ -> [acc]
-- Not escaping.
escWords False acc (c:cs)
| isSpace c = acc : escWords False "" cs
| c == '\'' = case acc of
"" -> escWords True "" cs
_ -> acc : escWords True "" cs
| otherwise = escWords False (c:acc) cs
-- Escaping.
escWords True acc (c:cs)
| c == '\'' = acc : escWords False "" cs
| otherwise = escWords True (c:acc) cs
appendToSOPath :: Text -> Sh ()
appendToSOPath tp = do
pe <- get_env_text "LD_LIBRARY_PATH"
setenv "LD_LIBRARY_PATH" $ pe <> ":" <> tp
| ian-ross/c2hs-macos-test | c2hs-0.26.1/tests/regression-suite.hs | mit | 7,056 | 0 | 43 | 2,548 | 2,271 | 1,138 | 1,133 | 177 | 6 |
-- -*- mode: haskell -*-
{-# LANGUAGE TemplateHaskell #-}
-- $Id$
module Graph.Bi.Config where
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data Config = Config
{ nodes :: Int -- ^ number of nodes
, edges :: Int -- ^ number of edges
, teil :: Int -- ^ größe des beweises im verhältnis zur knotenzahl
}
deriving ( Typeable )
$(derives [makeReader, makeToDoc] [''Config])
rc :: Config
rc = Config { nodes = 12
, edges = 40
, teil = 3
}
| florianpilz/autotool | src/Graph/Bi/Config.hs | gpl-2.0 | 507 | 6 | 9 | 134 | 121 | 75 | 46 | 15 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Redshift.DescribeClusters
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns properties of provisioned clusters including general cluster
-- properties, cluster database properties, maintenance and backup
-- properties, and security and access properties. This operation supports
-- pagination. For more information about managing clusters, go to
-- <http://docs.aws.amazon.com/redshift/latest/mgmt/working-with-clusters.html Amazon Redshift Clusters>
-- in the /Amazon Redshift Cluster Management Guide/ .
--
-- If you specify both tag keys and tag values in the same request, Amazon
-- Redshift returns all clusters that match any combination of the
-- specified keys and values. For example, if you have 'owner' and
-- 'environment' for tag keys, and 'admin' and 'test' for tag values, all
-- clusters that have any combination of those values are returned.
--
-- If both tag keys and values are omitted from the request, clusters are
-- returned regardless of whether they have tag keys or values associated
-- with them.
--
-- /See:/ <http://docs.aws.amazon.com/redshift/latest/APIReference/API_DescribeClusters.html AWS API Reference> for DescribeClusters.
--
-- This operation returns paginated results.
module Network.AWS.Redshift.DescribeClusters
(
-- * Creating a Request
describeClusters
, DescribeClusters
-- * Request Lenses
, dcTagValues
, dcTagKeys
, dcClusterIdentifier
, dcMarker
, dcMaxRecords
-- * Destructuring the Response
, describeClustersResponse
, DescribeClustersResponse
-- * Response Lenses
, dcrsMarker
, dcrsClusters
, dcrsResponseStatus
) where
import Network.AWS.Pager
import Network.AWS.Prelude
import Network.AWS.Redshift.Types
import Network.AWS.Redshift.Types.Product
import Network.AWS.Request
import Network.AWS.Response
-- |
--
-- /See:/ 'describeClusters' smart constructor.
data DescribeClusters = DescribeClusters'
{ _dcTagValues :: !(Maybe [Text])
, _dcTagKeys :: !(Maybe [Text])
, _dcClusterIdentifier :: !(Maybe Text)
, _dcMarker :: !(Maybe Text)
, _dcMaxRecords :: !(Maybe Int)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeClusters' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dcTagValues'
--
-- * 'dcTagKeys'
--
-- * 'dcClusterIdentifier'
--
-- * 'dcMarker'
--
-- * 'dcMaxRecords'
describeClusters
:: DescribeClusters
describeClusters =
DescribeClusters'
{ _dcTagValues = Nothing
, _dcTagKeys = Nothing
, _dcClusterIdentifier = Nothing
, _dcMarker = Nothing
, _dcMaxRecords = Nothing
}
-- | A tag value or values for which you want to return all matching clusters
-- that are associated with the specified tag value or values. For example,
-- suppose that you have clusters that are tagged with values called
-- 'admin' and 'test'. If you specify both of these tag values in the
-- request, Amazon Redshift returns a response with the clusters that have
-- either or both of these tag values associated with them.
dcTagValues :: Lens' DescribeClusters [Text]
dcTagValues = lens _dcTagValues (\ s a -> s{_dcTagValues = a}) . _Default . _Coerce;
-- | A tag key or keys for which you want to return all matching clusters
-- that are associated with the specified key or keys. For example, suppose
-- that you have clusters that are tagged with keys called 'owner' and
-- 'environment'. If you specify both of these tag keys in the request,
-- Amazon Redshift returns a response with the clusters that have either or
-- both of these tag keys associated with them.
dcTagKeys :: Lens' DescribeClusters [Text]
dcTagKeys = lens _dcTagKeys (\ s a -> s{_dcTagKeys = a}) . _Default . _Coerce;
-- | The unique identifier of a cluster whose properties you are requesting.
-- This parameter is case sensitive.
--
-- The default is that all clusters defined for an account are returned.
dcClusterIdentifier :: Lens' DescribeClusters (Maybe Text)
dcClusterIdentifier = lens _dcClusterIdentifier (\ s a -> s{_dcClusterIdentifier = a});
-- | An optional parameter that specifies the starting point to return a set
-- of response records. When the results of a DescribeClusters request
-- exceed the value specified in 'MaxRecords', AWS returns a value in the
-- 'Marker' field of the response. You can retrieve the next set of
-- response records by providing the returned marker value in the 'Marker'
-- parameter and retrying the request.
--
-- Constraints: You can specify either the __ClusterIdentifier__ parameter
-- or the __Marker__ parameter, but not both.
dcMarker :: Lens' DescribeClusters (Maybe Text)
dcMarker = lens _dcMarker (\ s a -> s{_dcMarker = a});
-- | The maximum number of response records to return in each call. If the
-- number of remaining response records exceeds the specified 'MaxRecords'
-- value, a value is returned in a 'marker' field of the response. You can
-- retrieve the next set of records by retrying the command with the
-- returned marker value.
--
-- Default: '100'
--
-- Constraints: minimum 20, maximum 100.
dcMaxRecords :: Lens' DescribeClusters (Maybe Int)
dcMaxRecords = lens _dcMaxRecords (\ s a -> s{_dcMaxRecords = a});
instance AWSPager DescribeClusters where
page rq rs
| stop (rs ^. dcrsMarker) = Nothing
| stop (rs ^. dcrsClusters) = Nothing
| otherwise =
Just $ rq & dcMarker .~ rs ^. dcrsMarker
instance AWSRequest DescribeClusters where
type Rs DescribeClusters = DescribeClustersResponse
request = postQuery redshift
response
= receiveXMLWrapper "DescribeClustersResult"
(\ s h x ->
DescribeClustersResponse' <$>
(x .@? "Marker") <*>
(x .@? "Clusters" .!@ mempty >>=
may (parseXMLList "Cluster"))
<*> (pure (fromEnum s)))
instance ToHeaders DescribeClusters where
toHeaders = const mempty
instance ToPath DescribeClusters where
toPath = const "/"
instance ToQuery DescribeClusters where
toQuery DescribeClusters'{..}
= mconcat
["Action" =: ("DescribeClusters" :: ByteString),
"Version" =: ("2012-12-01" :: ByteString),
"TagValues" =:
toQuery (toQueryList "TagValue" <$> _dcTagValues),
"TagKeys" =:
toQuery (toQueryList "TagKey" <$> _dcTagKeys),
"ClusterIdentifier" =: _dcClusterIdentifier,
"Marker" =: _dcMarker, "MaxRecords" =: _dcMaxRecords]
-- | Contains the output from the DescribeClusters action.
--
-- /See:/ 'describeClustersResponse' smart constructor.
data DescribeClustersResponse = DescribeClustersResponse'
{ _dcrsMarker :: !(Maybe Text)
, _dcrsClusters :: !(Maybe [Cluster])
, _dcrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeClustersResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dcrsMarker'
--
-- * 'dcrsClusters'
--
-- * 'dcrsResponseStatus'
describeClustersResponse
:: Int -- ^ 'dcrsResponseStatus'
-> DescribeClustersResponse
describeClustersResponse pResponseStatus_ =
DescribeClustersResponse'
{ _dcrsMarker = Nothing
, _dcrsClusters = Nothing
, _dcrsResponseStatus = pResponseStatus_
}
-- | A value that indicates the starting point for the next set of response
-- records in a subsequent request. If a value is returned in a response,
-- you can retrieve the next set of records by providing this returned
-- marker value in the 'Marker' parameter and retrying the command. If the
-- 'Marker' field is empty, all response records have been retrieved for
-- the request.
dcrsMarker :: Lens' DescribeClustersResponse (Maybe Text)
dcrsMarker = lens _dcrsMarker (\ s a -> s{_dcrsMarker = a});
-- | A list of Cluster objects, where each object describes one cluster.
dcrsClusters :: Lens' DescribeClustersResponse [Cluster]
dcrsClusters = lens _dcrsClusters (\ s a -> s{_dcrsClusters = a}) . _Default . _Coerce;
-- | The response status code.
dcrsResponseStatus :: Lens' DescribeClustersResponse Int
dcrsResponseStatus = lens _dcrsResponseStatus (\ s a -> s{_dcrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-redshift/gen/Network/AWS/Redshift/DescribeClusters.hs | mpl-2.0 | 9,145 | 0 | 15 | 1,919 | 1,148 | 688 | 460 | 121 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Main
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Main (main) where
import Test.Tasty
import Test.AWS.CognitoSync
import Test.AWS.CognitoSync.Internal
main :: IO ()
main = defaultMain $ testGroup "CognitoSync"
[ testGroup "tests" tests
, testGroup "fixtures" fixtures
]
| fmapfmapfmap/amazonka | amazonka-cognito-sync/test/Main.hs | mpl-2.0 | 546 | 0 | 8 | 103 | 76 | 47 | 29 | 9 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : HplAssets.Cloud.PrettyPrinter.DeploymentFile
-- Copyright : (c) authors and contributors 2015
-- License : LGPL
--
-- Stability : provisional
-- Portability : portable
--
-- A bash script pretty printer for the cloud model.
--
-----------------------------------------------------------------------------
module HplAssets.Cloud.PrettyPrinter.DeploymentFile (exportCloudDeployment)
where
import System.IO
import System.Directory
import System.FilePath
import Control.Exception
import Control.Monad
import Text.PrettyPrint.HughesPJ
import BasicTypes
import HplAssets.Cloud.Types
exportCloudDeployment:: FilePath -> CloudModel -> IO()
exportCloudDeployment path cloudModel = do
createDirectoryIfMissing True path
let cs = clouds cloudModel
mapM_ (writeOne path) cs
writeOne :: FilePath -> Cloud -> IO()
writeOne path cloud = writeCloudDeploymentFile (path </> n) cloud where
n = name cloud
writeCloudDeploymentFile :: FilePath -> Cloud -> IO ()
writeCloudDeploymentFile file cloud = do
let inst = instances cloud
mapM_ (writeCloudInstances file) inst
writeCloudInstances :: FilePath -> Instance -> IO ()
writeCloudInstances file instances =
withFile file WriteMode $ \h ->
putStrLn (show (instanceToString instances))
instanceToString :: Instance -> String
instanceToString ins =
"deploy-instance " ++
"--instance-id " ++ instanceId ins ++
" --image-id " ++ imageId ins ++
" --security-group " ++ securityGroup ins ++
"--key-name " ++ accessKey ins ++
"--zone " ++ (zone $location ins) ++
"--region " ++ (region $location ins) | alessandroleite/hephaestus-pl | src/meta-hephaestus/HplAssets/Cloud/PrettyPrinter/DeploymentFile.hs | lgpl-3.0 | 1,778 | 0 | 16 | 358 | 383 | 197 | 186 | 34 | 1 |
{-# LANGUAGE OverloadedStrings, DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Cloud.Parsers.Yaml
-- Copyright : (c) authors and contributors, 2015
-- License : LGPL
--
-- Stability : provisional
-- Portability : portable
--
-- A YAML parser for our cloud model.
--
-----------------------------------------------------------------------------
module HplAssets.Cloud.Parsers.Yaml (parseCloudModel)
where
import qualified Data.ByteString.Char8 as BS
import qualified Data.Yaml as Y
import Data.Aeson
import Control.Applicative
import Control.Monad
import BasicTypes
import HplAssets.Cloud.Types
-- Instances to convert our type from JSON
instance FromJSON CloudModel where
parseJSON (Object m) =
CloudModel <$> m .: "clouds"
parseJSON _ = mzero
instance FromJSON Cloud where
parseJSON (Object m) =
Cloud <$> m .: "cloud-id"
<*> m .: "name"
<*> m .: "instances"
instance FromJSON Instance where
parseJSON (Object m) =
Instance <$> m .: "instance-id"
<*> m .: "instance-name"
<*> m .: "location"
<*> m .: "security-group"
<*> m .: "image-id"
<*> m .: "access-key"
parseJSON _ = mzero
--instance FromJSON Location where
-- parseJSON (Object m) =
-- Location <$> m .: "region"
-- <*> m .: "zone"
-- parseJSON _ = mzero
-- This is an elegant way to serialize Generic type as JSON. However, it can only be used
-- iff the there isn't any customization on the attributes's name. It is important to highlight
-- that the type to serialize must be Generic.
instance FromJSON Location
yamlFile :: FilePath
yamlFile = "cloud-model.yaml"
parseCloudModel yamlFile =
do
content <- BS.readFile yamlFile
let parsedModel = Y.decode content :: Maybe CloudModel
case parsedModel of
Nothing -> return $ Fail "Could not parse the YAML cloud model. Please check the input file."
-- (Just (CloudModel clouds)) -> return $ Success clouds -- only the clouds
Just c -> return $ BasicTypes.Success c
| alessandroleite/hephaestus-pl | src/meta-hephaestus/HplAssets/Cloud/Parsers/Yaml.hs | lgpl-3.0 | 2,162 | 0 | 17 | 508 | 334 | 185 | 149 | 37 | 2 |
{- |
Module : Camfort.Specification.Stencils.Parser.Types
Description : Defines the representation of stencil specifications resulting from parsing.
Copyright : (c) 2017, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
License : Apache-2.0
Maintainer : [email protected]
Stability : experimental
-}
{-# LANGUAGE DeriveDataTypeable #-}
module Camfort.Specification.Stencils.Parser.Types
( Specification(..)
, Region(..)
, SpecInner(..)
, reqRegions
) where
import Data.Data (Data, Typeable)
import Data.List (nub, sort)
import Camfort.Specification.Stencils.Model
(Approximation(..), Multiplicity(..))
import qualified Camfort.Specification.Stencils.Syntax as Syn
data Specification
= RegionDec String Region
| SpecDec SpecInner [String]
deriving (Show, Eq, Typeable, Data)
-- | Regions that are referenced in a specification.
reqRegions :: Specification -> [Syn.Variable]
reqRegions spec = nub . sort $
case spec of
RegionDec _ r -> reqRegions' r
SpecDec (SpecInner x _) _ ->
case x of
Once a -> reqRegionsApprox a
Mult a -> reqRegionsApprox a
where
reqRegionsApprox (Exact r) = reqRegions' r
reqRegionsApprox (Bound l u) =
let maybeReqRegions = maybe [] reqRegions'
in maybeReqRegions l ++ maybeReqRegions u
reqRegions' :: Region -> [Syn.Variable]
reqRegions' RegionConst{} = []
reqRegions' (Or r1 r2) = reqRegions' r1 ++ reqRegions' r2
reqRegions' (And r1 r2) = reqRegions' r1 ++ reqRegions' r2
reqRegions' (Var v) = [v]
data Region
= RegionConst Syn.Region
| Or Region Region
| And Region Region
| Var String
deriving (Show, Eq, Ord, Typeable, Data)
data SpecInner = SpecInner
(Multiplicity (Approximation Region)) -- main specification content
Syn.IsStencil -- a bool: stencil or access
deriving (Show, Eq, Typeable, Data)
| dorchard/camfort | src/Camfort/Specification/Stencils/Parser/Types.hs | apache-2.0 | 1,948 | 0 | 12 | 438 | 491 | 266 | 225 | 42 | 7 |
{-| Implementation of the LUXI loader.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Backend.Luxi
( loadData
, parseData
) where
import qualified Control.Exception as E
import Control.Monad (liftM)
import Text.JSON.Types
import qualified Text.JSON
import Ganeti.BasicTypes
import Ganeti.Errors
import qualified Ganeti.Luxi as L
import qualified Ganeti.Query.Language as Qlang
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.JSON (fromJVal, tryFromObj, arrayMaybeFromJVal,
getKeysFromContainer, Container)
import Ganeti.Objects (PartialNicParams)
{-# ANN module "HLint: ignore Eta reduce" #-}
-- * Utility functions
annotateConvert :: String -> String -> String -> Result a -> Result a
annotateConvert otype oname oattr =
annotateResult $ otype ++ " '" ++ oname ++
"', error while reading attribute '" ++ oattr ++ "'"
-- | Annotate errors when converting values with owner/attribute for
-- better debugging.
genericConvert :: (Text.JSON.JSON a) =>
String -- ^ The object type
-> String -- ^ The object name
-> String -- ^ The attribute we're trying to convert
-> (JSValue, JSValue) -- ^ The value we're trying to convert
-> Result a -- ^ The annotated result
genericConvert otype oname oattr =
annotateConvert otype oname oattr . L.fromJValWithStatus
convertArrayMaybe :: (Text.JSON.JSON a) =>
String -- ^ The object type
-> String -- ^ The object name
-> String -- ^ The attribute we're trying to convert
-> (JSValue, JSValue) -- ^ The value we're trying to convert
-> Result [Maybe a] -- ^ The annotated result
convertArrayMaybe otype oname oattr (st, v) = do
st' <- fromJVal st
Qlang.checkRS st' v >>=
annotateConvert otype oname oattr . arrayMaybeFromJVal
-- * Data querying functionality
-- | The input data for node query.
queryNodesMsg :: L.LuxiOp
queryNodesMsg =
L.Query (Qlang.ItemTypeOpCode Qlang.QRNode)
["name", "mtotal", "mnode", "mfree", "dtotal", "dfree",
"ctotal", "cnos", "offline", "drained", "vm_capable",
"ndp/spindle_count", "group.uuid", "tags",
"ndp/exclusive_storage", "sptotal", "spfree", "ndp/cpu_speed",
"hv_state"]
Qlang.EmptyFilter
-- | The input data for instance query.
queryInstancesMsg :: L.LuxiOp
queryInstancesMsg =
L.Query (Qlang.ItemTypeOpCode Qlang.QRInstance)
["name", "disk_usage", "be/memory", "be/vcpus",
"status", "pnode", "snodes", "tags", "oper_ram",
"be/auto_balance", "disk_template",
"be/spindle_use", "disk.sizes", "disk.spindles",
"forthcoming"] Qlang.EmptyFilter
-- | The input data for cluster query.
queryClusterInfoMsg :: L.LuxiOp
queryClusterInfoMsg = L.QueryClusterInfo
-- | The input data for node group query.
queryGroupsMsg :: L.LuxiOp
queryGroupsMsg =
L.Query (Qlang.ItemTypeOpCode Qlang.QRGroup)
["uuid", "name", "alloc_policy", "ipolicy", "tags", "networks"]
Qlang.EmptyFilter
-- | Wraper over 'callMethod' doing node query.
queryNodes :: L.Client -> IO (Result JSValue)
queryNodes = liftM errToResult . L.callMethod queryNodesMsg
-- | Wraper over 'callMethod' doing instance query.
queryInstances :: L.Client -> IO (Result JSValue)
queryInstances = liftM errToResult . L.callMethod queryInstancesMsg
-- | Wrapper over 'callMethod' doing cluster information query.
queryClusterInfo :: L.Client -> IO (Result JSValue)
queryClusterInfo = liftM errToResult . L.callMethod queryClusterInfoMsg
-- | Wrapper over callMethod doing group query.
queryGroups :: L.Client -> IO (Result JSValue)
queryGroups = liftM errToResult . L.callMethod queryGroupsMsg
-- | Parse a instance list in JSON format.
getInstances :: Bool -- ^ use only state-of-record (SoR) data
-> NameAssoc
-> JSValue
-> Result [(String, Instance.Instance)]
getInstances sor ktn arr = L.extractArray arr >>= mapM (parseInstance sor ktn)
-- | Construct an instance from a JSON object.
parseInstance :: Bool -- ^ use only state-of-record (SoR) data
-> NameAssoc
-> [(JSValue, JSValue)]
-> Result (String, Instance.Instance)
parseInstance sor ktn
[ name, disk, mem, vcpus, status, pnode, snodes, tags, oram
, auto_balance, disk_template, su, dsizes, dspindles, forthcoming ] = do
xname <- annotateResult "Parsing new instance" (L.fromJValWithStatus name)
let convert a = genericConvert "Instance" xname a
xdisk <- convert "disk_usage" disk
xmem <- case (sor, oram) of -- FIXME: remove the "guessing"
(False, (_, JSRational _ _)) -> convert "oper_ram" oram
-- Note: "oper_ram" is live data; we only use it if not told
-- to restrict to state-of-record data
_ -> convert "be/memory" mem
xvcpus <- convert "be/vcpus" vcpus
xpnode <- convert "pnode" pnode >>= lookupNode ktn xname
xsnodes <- convert "snodes" snodes::Result [String]
snode <- case xsnodes of
[] -> return Node.noSecondary
x:_ -> lookupNode ktn xname x
xrunning <- convert "status" status
xtags <- convert "tags" tags
xauto_balance <- convert "auto_balance" auto_balance
xdt <- convert "disk_template" disk_template
xsu <- convert "be/spindle_use" su
xdsizes <- convert "disk.sizes" dsizes
xdspindles <- convertArrayMaybe "Instance" xname "disk.spindles" dspindles
xforthcoming <- convert "forthcoming" forthcoming
let disks = zipWith Instance.Disk xdsizes xdspindles
inst = Instance.create xname xmem xdisk disks
xvcpus xrunning xtags xauto_balance xpnode snode xdt xsu []
xforthcoming
return (xname, inst)
parseInstance _ _ v = fail ("Invalid instance query result: " ++ show v)
-- | Parse a node list in JSON format.
getNodes :: NameAssoc -> JSValue -> Result [(String, Node.Node)]
getNodes ktg arr = L.extractArray arr >>= mapM (parseNode ktg)
-- | Construct a node from a JSON object.
parseNode :: NameAssoc -> [(JSValue, JSValue)] -> Result (String, Node.Node)
parseNode ktg [ name, mtotal, mnode, mfree, dtotal, dfree
, ctotal, cnos, offline, drained, vm_capable, spindles, g_uuid
, tags, excl_stor, sptotal, spfree, cpu_speed, hv_state ]
= do
xname <- annotateResult "Parsing new node" (L.fromJValWithStatus name)
let convert a = genericConvert "Node" xname a
xoffline <- convert "offline" offline
xdrained <- convert "drained" drained
xvm_capable <- convert "vm_capable" vm_capable
xgdx <- convert "group.uuid" g_uuid >>= lookupGroup ktg xname
xtags <- convert "tags" tags
xexcl_stor <- convert "exclusive_storage" excl_stor
xcpu_speed <- convert "cpu_speed" cpu_speed
let live = not xoffline && xvm_capable
lvconvert def n d = eitherLive live def $ convert n d
xsptotal <- if xexcl_stor
then lvconvert 0 "sptotal" sptotal
else convert "spindles" spindles
let xspfree = genericResult (const (0 :: Int)) id
$ lvconvert 0 "spfree" spfree
-- "spfree" might be missing, if sharedfile is the only
-- supported disk template
xmtotal <- lvconvert 0.0 "mtotal" mtotal
xmnode <- lvconvert 0 "mnode" mnode
xmfree <- lvconvert 0 "mfree" mfree
let xdtotal = genericResult (const 0.0) id
$ lvconvert 0.0 "dtotal" dtotal
xdfree = genericResult (const 0) id
$ lvconvert 0 "dfree" dfree
-- "dtotal" and "dfree" might be missing, e.g., if sharedfile
-- is the only supported disk template
xctotal <- lvconvert 0.0 "ctotal" ctotal
xcnos <- lvconvert 0 "cnos" cnos
xhv_state <- convert "hv_state" hv_state
let node_mem = obtainNodeMemory xhv_state xmnode
node = flip Node.setCpuSpeed xcpu_speed .
flip Node.setNodeTags xtags $
Node.create xname xmtotal node_mem xmfree xdtotal xdfree
xctotal xcnos (not live || xdrained) xsptotal xspfree
xgdx xexcl_stor
return (xname, node)
parseNode _ v = fail ("Invalid node query result: " ++ show v)
-- | Parses the cluster tags.
getClusterData :: JSValue -> Result ([String], IPolicy, String)
getClusterData (JSObject obj) = do
let errmsg = "Parsing cluster info"
obj' = fromJSObject obj
ctags <- tryFromObj errmsg obj' "tags"
cpol <- tryFromObj errmsg obj' "ipolicy"
master <- tryFromObj errmsg obj' "master"
return (ctags, cpol, master)
getClusterData _ = Bad "Cannot parse cluster info, not a JSON record"
-- | Parses the cluster groups.
getGroups :: JSValue -> Result [(String, Group.Group)]
getGroups jsv = L.extractArray jsv >>= mapM parseGroup
-- | Parses a given group information.
parseGroup :: [(JSValue, JSValue)] -> Result (String, Group.Group)
parseGroup [uuid, name, apol, ipol, tags, nets] = do
xname <- annotateResult "Parsing new group" (L.fromJValWithStatus name)
let convert a = genericConvert "Group" xname a
xuuid <- convert "uuid" uuid
xapol <- convert "alloc_policy" apol
xipol <- convert "ipolicy" ipol
xtags <- convert "tags" tags
xnets <- convert "networks" nets :: Result (Container PartialNicParams)
let xnetids = getKeysFromContainer xnets
return (xuuid, Group.create xname xuuid xapol xnetids xipol xtags)
parseGroup v = fail ("Invalid group query result: " ++ show v)
-- * Main loader functionality
-- | Builds the cluster data by querying a given socket name.
readData :: String -- ^ Unix socket to use as source
-> IO (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
readData master =
E.bracket
(L.getLuxiClient master)
L.closeClient
(\s -> do
nodes <- queryNodes s
instances <- queryInstances s
cinfo <- queryClusterInfo s
groups <- queryGroups s
return (groups, nodes, instances, cinfo)
)
-- | Converts the output of 'readData' into the internal cluster
-- representation.
parseData :: Bool -- ^ use only state-of-record (SoR) data
-> (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
-> Result ClusterData
parseData sor (groups, nodes, instances, cinfo) = do
group_data <- groups >>= getGroups
let (group_names, group_idx) = assignIndices group_data
node_data <- nodes >>= getNodes group_names
let (node_names, node_idx) = assignIndices node_data
inst_data <- instances >>= getInstances sor node_names
let (_, inst_idx) = assignIndices inst_data
(ctags, cpol, master) <- cinfo >>= getClusterData
node_idx' <- setMaster node_names node_idx master
return (ClusterData group_idx node_idx' inst_idx ctags cpol)
-- | Top level function for data loading.
loadData :: Bool -- ^ use only state-of-record (SoR) data
-> String -- ^ Unix socket to use as source
-> IO (Result ClusterData)
loadData sor = fmap (parseData sor) . readData
| onponomarev/ganeti | src/Ganeti/HTools/Backend/Luxi.hs | bsd-2-clause | 12,436 | 0 | 14 | 2,717 | 2,786 | 1,439 | 1,347 | 205 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudSearch.CreateDomain
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates a new search domain. For more information, see Creating a Search
-- Domain in the /Amazon CloudSearch Developer Guide/.
--
-- <http://docs.aws.amazon.com/cloudsearch/latest/developerguide/API_CreateDomain.html>
module Network.AWS.CloudSearch.CreateDomain
(
-- * Request
CreateDomain
-- ** Request constructor
, createDomain
-- ** Request lenses
, cdDomainName
-- * Response
, CreateDomainResponse
-- ** Response constructor
, createDomainResponse
-- ** Response lenses
, cdrDomainStatus
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.CloudSearch.Types
import qualified GHC.Exts
newtype CreateDomain = CreateDomain
{ _cdDomainName :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'CreateDomain' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cdDomainName' @::@ 'Text'
--
createDomain :: Text -- ^ 'cdDomainName'
-> CreateDomain
createDomain p1 = CreateDomain
{ _cdDomainName = p1
}
-- | A name for the domain you are creating. Allowed characters are a-z
-- (lower-case letters), 0-9, and hyphen (-). Domain names must start with a
-- letter or number and be at least 3 and no more than 28 characters long.
cdDomainName :: Lens' CreateDomain Text
cdDomainName = lens _cdDomainName (\s a -> s { _cdDomainName = a })
newtype CreateDomainResponse = CreateDomainResponse
{ _cdrDomainStatus :: Maybe DomainStatus
} deriving (Eq, Read, Show)
-- | 'CreateDomainResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cdrDomainStatus' @::@ 'Maybe' 'DomainStatus'
--
createDomainResponse :: CreateDomainResponse
createDomainResponse = CreateDomainResponse
{ _cdrDomainStatus = Nothing
}
cdrDomainStatus :: Lens' CreateDomainResponse (Maybe DomainStatus)
cdrDomainStatus = lens _cdrDomainStatus (\s a -> s { _cdrDomainStatus = a })
instance ToPath CreateDomain where
toPath = const "/"
instance ToQuery CreateDomain where
toQuery CreateDomain{..} = mconcat
[ "DomainName" =? _cdDomainName
]
instance ToHeaders CreateDomain
instance AWSRequest CreateDomain where
type Sv CreateDomain = CloudSearch
type Rs CreateDomain = CreateDomainResponse
request = post "CreateDomain"
response = xmlResponse
instance FromXML CreateDomainResponse where
parseXML = withElement "CreateDomainResult" $ \x -> CreateDomainResponse
<$> x .@? "DomainStatus"
| kim/amazonka | amazonka-cloudsearch/gen/Network/AWS/CloudSearch/CreateDomain.hs | mpl-2.0 | 3,574 | 0 | 9 | 768 | 424 | 260 | 164 | 53 | 1 |
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS -fno-warn-name-shadowing #-}
-- | Freenect example with GLUT.
-- Video demo here: http://www.youtube.com/watch?v=as2syH8Y8yc
module Main
where
import Control.Concurrent
import Control.Monad
import Data.Bits
import Data.IORef
import Data.Vector.Storable (Vector,(!))
import qualified Data.Vector.Storable as V
import Data.Word
import Foreign.ForeignPtr
import Freenect
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT hiding (shift)
width, height :: Int
width = 640
height = 480
main :: IO ()
main = do
depthGrid <- newMVar Nothing
_ <- getDepthThread depthGrid
glThread depthGrid
getDepthThread :: MVar (Maybe (Vector Word16)) -> IO ThreadId
getDepthThread depthGrid = forkOS $ do
withContext $ \context -> do
setLogLevel LogFatal context
selectSubdevices context devices
withDevice context index $ \device -> do
setDepthMode device Medium ElevenBit
setDepthCallback device $ \payload _timestamp -> do
_ <- swapMVar depthGrid (Just payload)
postRedisplay Nothing
return ()
startDepth device
forever $ processEvents context
where devices = [Camera]
index = 0 :: Integer
glThread :: MVar (Maybe (Vector Word16)) -> IO ()
glThread depthGrid = do
(_progname,_args) <- getArgsAndInitialize
-- initialDisplayMode $= [DoubleBuffered]
_window <- createWindow "Kinect"
windowSize $= Size (fromIntegral width) (fromIntegral height)
ortho2D 0 (fromIntegral width) 0 (fromIntegral height)
displayCallback $= display depthGrid
mainLoop
display :: MVar (Maybe (Vector Word16)) -> IO ()
display depthGrid = do
depthGrid <- readMVar depthGrid
case depthGrid of
Nothing -> return ()
Just grid -> do
forM_ [(x,y) | x <- [0..width-1], y <- [0..height-1]] $ \(x,y) -> do
let depth = grid ! (y*width + x)
d = fromIntegral (fromIntegral depth :: Word8)/255
patch (x,height-y)
(d,d,d)
swapBuffers
type PatchColor = (GLfloat,GLfloat,GLfloat)
type Loc = (Int,Int)
patch :: Loc -> PatchColor -> IO ()
patch (x,y) (r,g,b) = do
color $ Color3 r g b
rect (Vertex2 xf yf) (Vertex2 (xf+1) (yf+1))
where xf = fromIntegral x :: GLfloat
yf = fromIntegral y :: GLfloat
| chrisdone/freenect | examples/src/Glut.hs | bsd-3-clause | 2,381 | 0 | 23 | 593 | 800 | 412 | 388 | 65 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Version
-- Copyright : Isaac Jones, Simon Marlow 2003-2004
--
-- Maintainer : Isaac Jones <[email protected]>
-- Stability : alpha
-- Portability : portable
--
-- Versions for packages, based on the 'Version' datatype.
{- Copyright (c) 2003-2004, Isaac Jones
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module UnitTest.Distribution.Version (hunitTests) where
import Distribution.Version
import Distribution.Text ( simpleParse )
import Data.Version ( Version(..), showVersion )
import Control.Monad ( liftM )
import Data.Char ( isSpace, isDigit, isAlphaNum )
import Data.Maybe ( listToMaybe )
import Distribution.Compat.ReadP
import Test.HUnit
-- ------------------------------------------------------------
-- * Testing
-- ------------------------------------------------------------
branch1 :: [Int]
branch1 = [1]
branch2 :: [Int]
branch2 = [1,2]
branch3 :: [Int]
branch3 = [1,2,3]
release1 :: Version
release1 = Version{versionBranch=branch1, versionTags=[]}
release2 :: Version
release2 = Version{versionBranch=branch2, versionTags=[]}
release3 :: Version
release3 = Version{versionBranch=branch3, versionTags=[]}
hunitTests :: [Test]
hunitTests
= [
"released version 1" ~: "failed"
~: (Just release1) ~=? simpleParse "1",
"released version 3" ~: "failed"
~: (Just release3) ~=? simpleParse "1.2.3",
"range comparison LaterVersion 1" ~: "failed"
~: True
~=? release3 `withinRange` (LaterVersion release2),
"range comparison LaterVersion 2" ~: "failed"
~: False
~=? release2 `withinRange` (LaterVersion release3),
"range comparison EarlierVersion 1" ~: "failed"
~: True
~=? release3 `withinRange` (LaterVersion release2),
"range comparison EarlierVersion 2" ~: "failed"
~: False
~=? release2 `withinRange` (LaterVersion release3),
"range comparison orLaterVersion 1" ~: "failed"
~: True
~=? release3 `withinRange` (orLaterVersion release3),
"range comparison orLaterVersion 2" ~: "failed"
~: True
~=? release3 `withinRange` (orLaterVersion release2),
"range comparison orLaterVersion 3" ~: "failed"
~: False
~=? release2 `withinRange` (orLaterVersion release3),
"range comparison orEarlierVersion 1" ~: "failed"
~: True
~=? release2 `withinRange` (orEarlierVersion release2),
"range comparison orEarlierVersion 2" ~: "failed"
~: True
~=? release2 `withinRange` (orEarlierVersion release3),
"range comparison orEarlierVersion 3" ~: "failed"
~: False
~=? release3 `withinRange` (orEarlierVersion release2)
]
| dcreager/cabal | tests/UnitTest/Distribution/Version.hs | bsd-3-clause | 4,361 | 6 | 9 | 954 | 577 | 340 | 237 | 58 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE CPP #-}
module BuildTyCl (
buildDataCon, mkDataConUnivTyVarBinders,
buildPatSyn,
TcMethInfo, buildClass,
distinctAbstractTyConRhs, totallyAbstractTyConRhs,
mkNewTyConRhs, mkDataTyConRhs,
newImplicitBinder, newTyConRepName
) where
#include "HsVersions.h"
import IfaceEnv
import FamInstEnv( FamInstEnvs, mkNewTypeCoAxiom )
import TysWiredIn( isCTupleTyConName )
import DataCon
import PatSyn
import Var
import VarSet
import BasicTypes
import Name
import MkId
import Class
import TyCon
import Type
import Id
import TcType
import SrcLoc( SrcSpan, noSrcSpan )
import DynFlags
import TcRnMonad
import UniqSupply
import Util
import Outputable
distinctAbstractTyConRhs, totallyAbstractTyConRhs :: AlgTyConRhs
distinctAbstractTyConRhs = AbstractTyCon True
totallyAbstractTyConRhs = AbstractTyCon False
mkDataTyConRhs :: [DataCon] -> AlgTyConRhs
mkDataTyConRhs cons
= DataTyCon {
data_cons = cons,
is_enum = not (null cons) && all is_enum_con cons
-- See Note [Enumeration types] in TyCon
}
where
is_enum_con con
| (_univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res)
<- dataConFullSig con
= null ex_tvs && null eq_spec && null theta && null arg_tys
mkNewTyConRhs :: Name -> TyCon -> DataCon -> TcRnIf m n AlgTyConRhs
-- ^ Monadic because it makes a Name for the coercion TyCon
-- We pass the Name of the parent TyCon, as well as the TyCon itself,
-- because the latter is part of a knot, whereas the former is not.
mkNewTyConRhs tycon_name tycon con
= do { co_tycon_name <- newImplicitBinder tycon_name mkNewTyCoOcc
; let nt_ax = mkNewTypeCoAxiom co_tycon_name tycon etad_tvs etad_roles etad_rhs
; traceIf (text "mkNewTyConRhs" <+> ppr nt_ax)
; return (NewTyCon { data_con = con,
nt_rhs = rhs_ty,
nt_etad_rhs = (etad_tvs, etad_rhs),
nt_co = nt_ax } ) }
-- Coreview looks through newtypes with a Nothing
-- for nt_co, or uses explicit coercions otherwise
where
tvs = tyConTyVars tycon
roles = tyConRoles tycon
inst_con_ty = piResultTys (dataConUserType con) (mkTyVarTys tvs)
rhs_ty = ASSERT( isFunTy inst_con_ty ) funArgTy inst_con_ty
-- Instantiate the data con with the
-- type variables from the tycon
-- NB: a newtype DataCon has a type that must look like
-- forall tvs. <arg-ty> -> T tvs
-- Note that we *can't* use dataConInstOrigArgTys here because
-- the newtype arising from class Foo a => Bar a where {}
-- has a single argument (Foo a) that is a *type class*, so
-- dataConInstOrigArgTys returns [].
etad_tvs :: [TyVar] -- Matched lazily, so that mkNewTypeCo can
etad_roles :: [Role] -- return a TyCon without pulling on rhs_ty
etad_rhs :: Type -- See Note [Tricky iface loop] in LoadIface
(etad_tvs, etad_roles, etad_rhs) = eta_reduce (reverse tvs) (reverse roles) rhs_ty
eta_reduce :: [TyVar] -- Reversed
-> [Role] -- also reversed
-> Type -- Rhs type
-> ([TyVar], [Role], Type) -- Eta-reduced version
-- (tyvars in normal order)
eta_reduce (a:as) (_:rs) ty | Just (fun, arg) <- splitAppTy_maybe ty,
Just tv <- getTyVar_maybe arg,
tv == a,
not (a `elemVarSet` tyCoVarsOfType fun)
= eta_reduce as rs fun
eta_reduce tvs rs ty = (reverse tvs, reverse rs, ty)
------------------------------------------------------
buildDataCon :: FamInstEnvs
-> Name
-> Bool -- Declared infix
-> TyConRepName
-> [HsSrcBang]
-> Maybe [HsImplBang]
-- See Note [Bangs on imported data constructors] in MkId
-> [FieldLabel] -- Field labels
-> [TyVarBinder] -- Universals
-> [TyVarBinder] -- Existentials
-> [EqSpec] -- Equality spec
-> ThetaType -- Does not include the "stupid theta"
-- or the GADT equalities
-> [Type] -> Type -- Argument and result types
-> TyCon -- Rep tycon
-> TcRnIf m n DataCon
-- A wrapper for DataCon.mkDataCon that
-- a) makes the worker Id
-- b) makes the wrapper Id if necessary, including
-- allocating its unique (hence monadic)
-- c) Sorts out the TyVarBinders. See mkDataConUnivTyBinders
buildDataCon fam_envs src_name declared_infix prom_info src_bangs impl_bangs field_lbls
univ_tvs ex_tvs eq_spec ctxt arg_tys res_ty rep_tycon
= do { wrap_name <- newImplicitBinder src_name mkDataConWrapperOcc
; work_name <- newImplicitBinder src_name mkDataConWorkerOcc
-- This last one takes the name of the data constructor in the source
-- code, which (for Haskell source anyway) will be in the DataName name
-- space, and puts it into the VarName name space
; traceIf (text "buildDataCon 1" <+> ppr src_name)
; us <- newUniqueSupply
; dflags <- getDynFlags
; let stupid_ctxt = mkDataConStupidTheta rep_tycon arg_tys univ_tvs
data_con = mkDataCon src_name declared_infix prom_info
src_bangs field_lbls
univ_tvs ex_tvs eq_spec ctxt
arg_tys res_ty NoRRI rep_tycon
stupid_ctxt dc_wrk dc_rep
dc_wrk = mkDataConWorkId work_name data_con
dc_rep = initUs_ us (mkDataConRep dflags fam_envs wrap_name
impl_bangs data_con)
; traceIf (text "buildDataCon 2" <+> ppr src_name)
; return data_con }
-- The stupid context for a data constructor should be limited to
-- the type variables mentioned in the arg_tys
-- ToDo: Or functionally dependent on?
-- This whole stupid theta thing is, well, stupid.
mkDataConStupidTheta :: TyCon -> [Type] -> [TyVarBinder] -> [PredType]
mkDataConStupidTheta tycon arg_tys univ_tvs
| null stupid_theta = [] -- The common case
| otherwise = filter in_arg_tys stupid_theta
where
tc_subst = zipTvSubst (tyConTyVars tycon)
(mkTyVarTys (binderVars univ_tvs))
stupid_theta = substTheta tc_subst (tyConStupidTheta tycon)
-- Start by instantiating the master copy of the
-- stupid theta, taken from the TyCon
arg_tyvars = tyCoVarsOfTypes arg_tys
in_arg_tys pred = not $ isEmptyVarSet $
tyCoVarsOfType pred `intersectVarSet` arg_tyvars
mkDataConUnivTyVarBinders :: [TyConBinder] -- From the TyCon
-> [TyVarBinder] -- For the DataCon
-- See Note [Building the TyBinders for a DataCon]
mkDataConUnivTyVarBinders tc_bndrs
= map mk_binder tc_bndrs
where
mk_binder (TvBndr tv tc_vis) = mkTyVarBinder vis tv
where
vis = case tc_vis of
AnonTCB -> Specified
NamedTCB Required -> Specified
NamedTCB vis -> vis
{- Note [Building the TyBinders for a DataCon]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A DataCon needs to keep track of the visibility of its universals and
existentials, so that visible type application can work properly. This
is done by storing the universal and existential TyVarBinders.
See Note [TyVarBinders in DataCons] in DataCon.
During construction of a DataCon, we often start from the TyBinders of
the parent TyCon. For example
data Maybe a = Nothing | Just a
The DataCons start from the TyBinders of the parent TyCon.
But the ultimate TyBinders for the DataCon are *different* than those
of the DataCon. Here is an example:
data App a b = MkApp (a b) -- App :: forall {k}. (k->*) -> k -> *
The TyCon has
tyConTyVars = [ k:*, a:k->*, b:k]
tyConTyBinders = [ Named (TvBndr (k :: *) Inferred), Anon (k->*), Anon k ]
The TyBinders for App line up with App's kind, given above.
But the DataCon MkApp has the type
MkApp :: forall {k} (a:k->*) (b:k). a b -> App k a b
That is, its TyBinders should be
dataConUnivTyVarBinders = [ TvBndr (k:*) Inferred
, TvBndr (a:k->*) Specified
, TvBndr (b:k) Specified ]
So we want to take the TyCon's TyBinders and the TyCon's TyVars and
merge them, pulling
- variable names from the TyVars
- visibilities from the TyBinders
- but changing Anon/Required to Specified
The last part about Required->Specified comes from this:
data T k (a:k) b = MkT (a b)
Here k is Required in T's kind, but we don't have Required binders in
the TyBinders for a term (see Note [No Required TyBinder in terms]
in TyCoRep), so we change it to Specified when making MkT's TyBinders
This merging operation is done by mkDataConUnivTyBinders. In contrast,
the TyBinders passed to mkDataCon are the final TyBinders stored in the
DataCon (mkDataCon does no further work).
-}
------------------------------------------------------
buildPatSyn :: Name -> Bool
-> (Id,Bool) -> Maybe (Id, Bool)
-> ([TyVarBinder], ThetaType) -- ^ Univ and req
-> ([TyVarBinder], ThetaType) -- ^ Ex and prov
-> [Type] -- ^ Argument types
-> Type -- ^ Result type
-> [FieldLabel] -- ^ Field labels for
-- a record pattern synonym
-> PatSyn
buildPatSyn src_name declared_infix matcher@(matcher_id,_) builder
(univ_tvs, req_theta) (ex_tvs, prov_theta) arg_tys
pat_ty field_labels
= -- The assertion checks that the matcher is
-- compatible with the pattern synonym
ASSERT2((and [ univ_tvs `equalLength` univ_tvs1
, ex_tvs `equalLength` ex_tvs1
, pat_ty `eqType` substTy subst pat_ty1
, prov_theta `eqTypes` substTys subst prov_theta1
, req_theta `eqTypes` substTys subst req_theta1
, arg_tys `eqTypes` substTys subst arg_tys1
])
, (vcat [ ppr univ_tvs <+> twiddle <+> ppr univ_tvs1
, ppr ex_tvs <+> twiddle <+> ppr ex_tvs1
, ppr pat_ty <+> twiddle <+> ppr pat_ty1
, ppr prov_theta <+> twiddle <+> ppr prov_theta1
, ppr req_theta <+> twiddle <+> ppr req_theta1
, ppr arg_tys <+> twiddle <+> ppr arg_tys1]))
mkPatSyn src_name declared_infix
(univ_tvs, req_theta) (ex_tvs, prov_theta)
arg_tys pat_ty
matcher builder field_labels
where
((_:_:univ_tvs1), req_theta1, tau) = tcSplitSigmaTy $ idType matcher_id
([pat_ty1, cont_sigma, _], _) = tcSplitFunTys tau
(ex_tvs1, prov_theta1, cont_tau) = tcSplitSigmaTy cont_sigma
(arg_tys1, _) = tcSplitFunTys cont_tau
twiddle = char '~'
subst = zipTvSubst (univ_tvs1 ++ ex_tvs1)
(mkTyVarTys (binderVars (univ_tvs ++ ex_tvs)))
------------------------------------------------------
type TcMethInfo -- A temporary intermediate, to communicate
-- between tcClassSigs and buildClass.
= ( Name -- Name of the class op
, Type -- Type of the class op
, Maybe (DefMethSpec (SrcSpan, Type)))
-- Nothing => no default method
--
-- Just VanillaDM => There is an ordinary
-- polymorphic default method
--
-- Just (GenericDM (loc, ty)) => There is a generic default metho
-- Here is its type, and the location
-- of the type signature
-- We need that location /only/ to attach it to the
-- generic default method's Name; and we need /that/
-- only to give the right location of an ambiguity error
-- for the generic default method, spat out by checkValidClass
buildClass :: Name -- Name of the class/tycon (they have the same Name)
-> [TyConBinder] -- Of the tycon
-> [Role] -> ThetaType
-> [FunDep TyVar] -- Functional dependencies
-> [ClassATItem] -- Associated types
-> [TcMethInfo] -- Method info
-> ClassMinimalDef -- Minimal complete definition
-> TcRnIf m n Class
buildClass tycon_name binders roles sc_theta
fds at_items sig_stuff mindef
= fixM $ \ rec_clas -> -- Only name generation inside loop
do { traceIf (text "buildClass")
; datacon_name <- newImplicitBinder tycon_name mkClassDataConOcc
; tc_rep_name <- newTyConRepName tycon_name
; op_items <- mapM (mk_op_item rec_clas) sig_stuff
-- Build the selector id and default method id
-- Make selectors for the superclasses
; sc_sel_names <- mapM (newImplicitBinder tycon_name . mkSuperDictSelOcc)
(takeList sc_theta [fIRST_TAG..])
; let sc_sel_ids = [ mkDictSelId sc_name rec_clas
| sc_name <- sc_sel_names]
-- We number off the Dict superclass selectors, 1, 2, 3 etc so that we
-- can construct names for the selectors. Thus
-- class (C a, C b) => D a b where ...
-- gives superclass selectors
-- D_sc1, D_sc2
-- (We used to call them D_C, but now we can have two different
-- superclasses both called C!)
; let use_newtype = isSingleton arg_tys
-- Use a newtype if the data constructor
-- (a) has exactly one value field
-- i.e. exactly one operation or superclass taken together
-- (b) that value is of lifted type (which they always are, because
-- we box equality superclasses)
-- See note [Class newtypes and equality predicates]
-- We treat the dictionary superclasses as ordinary arguments.
-- That means that in the case of
-- class C a => D a
-- we don't get a newtype with no arguments!
args = sc_sel_names ++ op_names
op_tys = [ty | (_,ty,_) <- sig_stuff]
op_names = [op | (op,_,_) <- sig_stuff]
arg_tys = sc_theta ++ op_tys
rec_tycon = classTyCon rec_clas
univ_bndrs = mkDataConUnivTyVarBinders binders
univ_tvs = binderVars univ_bndrs
; rep_nm <- newTyConRepName datacon_name
; dict_con <- buildDataCon (panic "buildClass: FamInstEnvs")
datacon_name
False -- Not declared infix
rep_nm
(map (const no_bang) args)
(Just (map (const HsLazy) args))
[{- No fields -}]
univ_bndrs
[{- no existentials -}]
[{- No GADT equalities -}]
[{- No theta -}]
arg_tys
(mkTyConApp rec_tycon (mkTyVarTys univ_tvs))
rec_tycon
; rhs <- if use_newtype
then mkNewTyConRhs tycon_name rec_tycon dict_con
else if isCTupleTyConName tycon_name
then return (TupleTyCon { data_con = dict_con
, tup_sort = ConstraintTuple })
else return (mkDataTyConRhs [dict_con])
; let { tycon = mkClassTyCon tycon_name binders roles
rhs rec_clas tc_rep_name
-- A class can be recursive, and in the case of newtypes
-- this matters. For example
-- class C a where { op :: C b => a -> b -> Int }
-- Because C has only one operation, it is represented by
-- a newtype, and it should be a *recursive* newtype.
-- [If we don't make it a recursive newtype, we'll expand the
-- newtype like a synonym, but that will lead to an infinite
-- type]
; result = mkClass tycon_name univ_tvs fds
sc_theta sc_sel_ids at_items
op_items mindef tycon
}
; traceIf (text "buildClass" <+> ppr tycon)
; return result }
where
no_bang = HsSrcBang Nothing NoSrcUnpack NoSrcStrict
mk_op_item :: Class -> TcMethInfo -> TcRnIf n m ClassOpItem
mk_op_item rec_clas (op_name, _, dm_spec)
= do { dm_info <- mk_dm_info op_name dm_spec
; return (mkDictSelId op_name rec_clas, dm_info) }
mk_dm_info :: Name -> Maybe (DefMethSpec (SrcSpan, Type))
-> TcRnIf n m (Maybe (Name, DefMethSpec Type))
mk_dm_info _ Nothing
= return Nothing
mk_dm_info op_name (Just VanillaDM)
= do { dm_name <- newImplicitBinder op_name mkDefaultMethodOcc
; return (Just (dm_name, VanillaDM)) }
mk_dm_info op_name (Just (GenericDM (loc, dm_ty)))
= do { dm_name <- newImplicitBinderLoc op_name mkDefaultMethodOcc loc
; return (Just (dm_name, GenericDM dm_ty)) }
{-
Note [Class newtypes and equality predicates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
class (a ~ F b) => C a b where
op :: a -> b
We cannot represent this by a newtype, even though it's not
existential, because there are two value fields (the equality
predicate and op. See Trac #2238
Moreover,
class (a ~ F b) => C a b where {}
Here we can't use a newtype either, even though there is only
one field, because equality predicates are unboxed, and classes
are boxed.
-}
newImplicitBinder :: Name -- Base name
-> (OccName -> OccName) -- Occurrence name modifier
-> TcRnIf m n Name -- Implicit name
-- Called in BuildTyCl to allocate the implicit binders of type/class decls
-- For source type/class decls, this is the first occurrence
-- For iface ones, the LoadIface has alrady allocated a suitable name in the cache
newImplicitBinder base_name mk_sys_occ
= newImplicitBinderLoc base_name mk_sys_occ (nameSrcSpan base_name)
newImplicitBinderLoc :: Name -- Base name
-> (OccName -> OccName) -- Occurrence name modifier
-> SrcSpan
-> TcRnIf m n Name -- Implicit name
-- Just the same, but lets you specify the SrcSpan
newImplicitBinderLoc base_name mk_sys_occ loc
| Just mod <- nameModule_maybe base_name
= newGlobalBinder mod occ loc
| otherwise -- When typechecking a [d| decl bracket |],
-- TH generates types, classes etc with Internal names,
-- so we follow suit for the implicit binders
= do { uniq <- newUnique
; return (mkInternalName uniq occ loc) }
where
occ = mk_sys_occ (nameOccName base_name)
-- | Make the 'TyConRepName' for this 'TyCon'
newTyConRepName :: Name -> TcRnIf gbl lcl TyConRepName
newTyConRepName tc_name
| Just mod <- nameModule_maybe tc_name
, (mod, occ) <- tyConRepModOcc mod (nameOccName tc_name)
= newGlobalBinder mod occ noSrcSpan
| otherwise
= newImplicitBinder tc_name mkTyConRepOcc
| snoyberg/ghc | compiler/iface/BuildTyCl.hs | bsd-3-clause | 20,357 | 0 | 19 | 7,001 | 2,942 | 1,602 | 1,340 | 251 | 5 |
{-# LANGUAGE NoImplicitPrelude #-}
module Main where
import Criterion.Main
import Numeric.Units.Dimensional.Prelude
import qualified Prelude as P
main :: IO ()
main = defaultMain [
bench "RawArithmetic" $ nf rawArithmetic 1000
, bench "Arithmetic" $ nf arithmetic 1000
]
rawArithmetic :: Int -> [Double]
rawArithmetic n = fmap (P./ 3.7) $ [1.0 .. fromIntegral n]
arithmetic :: Int -> [Density Double]
arithmetic n = fmap (/ (3.7 *~ cubic meter)) $ [1.0 .. fromIntegral n] *~~ kilo gram
| bjornbm/dimensional-dk | benchmarks/Main.hs | bsd-3-clause | 514 | 0 | 11 | 105 | 173 | 94 | 79 | 13 | 1 |
module Network.XmlRpc.Introspect where
import Network.XmlRpc.Internals
import Network.XmlRpc.Client
type Signature = ([Type],Type)
type Help = String
type MethodInfo = (String,[Signature],Help)
-- Primitive introspection functions
listMethods :: String -> IO [String]
listMethods url = remote url "system.listMethods"
methodSignature :: String -> String -> IO [[String]]
methodSignature url = remote url "system.methodSignature"
methodHelp :: String -> String -> IO String
methodHelp url = remote url "system.methodHelp"
signatures :: String -> String -> IO [Signature]
signatures url name = do
sigs <- methodSignature url name
return [ (map read as,read r) | (r:as) <- sigs ]
methodInfo :: String -> String -> IO MethodInfo
methodInfo url name = do
sigs <- signatures url name
help <- methodHelp url name
return (name, sigs, help)
| laurencer/confluence-sync | vendor/haxr/Network/XmlRpc/Introspect.hs | bsd-3-clause | 881 | 0 | 12 | 168 | 295 | 156 | 139 | 21 | 1 |
module A (mainA) where
import B
mainA :: IO ()
mainA = do
putStrLn "Hello"
putStrLn name
| sdiehl/ghc | testsuite/tests/driver/T16511/A.hs | bsd-3-clause | 99 | 0 | 7 | 27 | 39 | 20 | 19 | 6 | 1 |
module Main where
import Common
import Text.Megaparsec
import Text.Megaparsec.Char
import Data.List
data Scramble
= RotateRight Int
| RotateLeft Int
| SwapPosition Int Int
| SwapLetter Char Char
| RotateChar Char
| ReversePositions Int Int
| MovePosition Int Int
deriving Show
parseScramble :: Parser Scramble
parseScramble =
RotateRight 1 <$ wholestring "rotate right 1 step" <|>
RotateRight <$ wholestring "rotate right " <*> number <* string " steps" <|>
RotateLeft 1 <$ wholestring "rotate left 1 step" <|>
RotateLeft <$ wholestring "rotate left " <*> number <* string " steps" <|>
SwapPosition <$ wholestring "swap position " <*> number <* string " with position " <*> number <|>
SwapLetter <$ wholestring "swap letter " <*> letterChar <* string " with letter " <*> letterChar <|>
RotateChar <$ wholestring "rotate based on position of letter " <*> letterChar <|>
ReversePositions <$ wholestring "reverse positions " <*> number <* string " through " <*> number <|>
MovePosition <$ wholestring "move position " <*> number <* string " to position " <*> number
part1, part2 :: String
part1 = "abcdefgh"
part2 = "fbgdceah"
main =
do inp <- parseLines parseScramble <$> readInputFile 21
putStrLn $ foldl (flip forward) part1 inp
putStrLn $ foldr backward part2 inp
rotateRight :: Int -> [a] -> [a]
rotateRight n xs = b ++ a
where
n' = n `mod` length xs
(a,b) = splitAt (length xs - n') xs
rotateLeft :: Int -> [a] -> [a]
rotateLeft n xs = b ++ a
where
n' = n `mod` length xs
(a,b) = splitAt (n') xs
set :: Int -> a -> [a] -> [a]
set i x xs = a ++ [x] ++ b
where
(a,_:b) = splitAt i xs
forward scram =
case scram of
RotateRight i -> rotateRight i
RotateLeft i -> rotateLeft i
SwapPosition i j -> \xs -> set i (xs!!j)
$ set j (xs!!i) xs
SwapLetter x y -> map $ \a -> if a == x then y else if a == y then x else a
RotateChar e -> rotatePositionOf e
ReversePositions i j -> reverseRange i j
MovePosition i j -> movePosition i j
backward scram =
case scram of
RotateRight i -> rotateLeft i
RotateLeft i -> rotateRight i
SwapPosition i j -> \xs -> set i (xs!!j)
$ set j (xs!!i) xs
SwapLetter x y -> map $ \a -> if a == x then y else if a == y then x else a
RotateChar e -> \xs ->
let Just r = find (\a -> rotatePositionOf e a == xs)
$ map (`rotateRight` xs) [0..]
in r
ReversePositions i j -> reverseRange i j
MovePosition i j -> movePosition j i
rotatePositionOf e xs = rotateRight j xs
where
Just i = elemIndex e xs
j | i >=4 = i + 2
| otherwise = i + 1
reverseRange i j xs = a ++ reverse c ++ d
where
(a,b) = splitAt i xs
(c,d) = splitAt (j-i+1) b
movePosition i j xs = c ++ [x] ++ d
where
(a,x:b) = splitAt i xs
(c,d) = splitAt j (a++b)
| glguy/advent2016 | Day21.hs | isc | 3,169 | 0 | 39 | 1,050 | 1,158 | 583 | 575 | 76 | 9 |
module Y2017.M07.D03.Solution where
{--
It's a question of Ord.
From the Mensa Genius Quiz-a-Day Book by Dr. Abbie F. Salny, July 1 problem:
Tom is younger than Rose, but older than Will and Jack, in that order. Rose is
younger than Susie, but older than Jack. Jack is younger than Jim. Susie is
older than Rose, but younger than Jim. Jim is older than Tom. Who is the oldest?
--}
data Person = Tom | Rose | Will | Jack | Susie | Jim
deriving (Eq, Show)
data AgeRel = IsOlderThan Person Person | IsYoungerThan Person Person
deriving (Eq, Show)
statements :: [AgeRel]
statements = [Tom `IsYoungerThan` Rose,
Tom `IsOlderThan` Will,
Tom `IsOlderThan` Jack,
-- 'in that order'? What does that mean? My hazard:
Will `IsYoungerThan` Jack, -- is this a correct interpretation?
-- carrying on:
Rose `IsYoungerThan` Susie,
Rose `IsOlderThan` Jack,
Jack `IsYoungerThan` Jim,
Susie `IsOlderThan` Rose, -- obviously
Susie `IsYoungerThan` Jim,
Jim `IsOlderThan` Tom]
-- The answer, at a glance, is Jim, as Tom is younger than Rose, who is younger
-- than Susan, who is younger than Jim who is only older, not younger, than the
-- others. But how do we go about proving it? Well, as Larry Wall says,
-- TMTOWTDI, or, 'There's More Than One Way To Do It,' so, take your pick. An
-- easy approach is to use list-sorting logic, I suppose.
oldest :: [AgeRel] -> [Person]
oldest peeps = [b | (a `IsYoungerThan` b) <- peeps,
not (any (\(x `IsYoungerThan` y) -> x == b)
(filter ahYouth peeps))]
ahYouth :: AgeRel -> Bool
ahYouth (_ `IsYoungerThan` _) = True
ahYouth _ = False
{--
>>> oldest statements
[Jim,Jim]
Which shows there are two paths to arriving at Jim as the eldest.
Do you see them?
--}
{--
Another approach would be to map these statements into an ontology of age-
relations then go to the top (or the bottom) of that graph. That, however, is
an exercise for another day.
--}
| geophf/1HaskellADay | exercises/HAD/Y2017/M07/D03/Solution.hs | mit | 2,092 | 0 | 13 | 553 | 306 | 190 | 116 | 23 | 1 |
-- Count the Monkeys!
-- http://www.codewars.com/kata/56f69d9f9400f508fb000ba7
module Codewars.Kata.Monkeys where
monkeyCount :: Int -> [Int]
monkeyCount x = [1..x] | gafiatulin/codewars | src/8 kyu/Monkeys.hs | mit | 166 | 0 | 6 | 19 | 34 | 21 | 13 | 3 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : <File name or $Header$ to be replaced automatically>
Description : <optional short text displayed on contents page>
Copyright : (c) <Authors or Affiliations>
License : <license>
Maintainer : <email>
Stability : unstable | experimental | provisional | stable | frozen
Portability : portable | non-portable (<reason>)
<module description starting at first column>
-}
module Client.CSS (layoutCss) where
-- Standard Library
import Data.Text.Lazy (Text)
-- Third Party
import Clay
layoutCss :: Text
layoutCss = render $ do
body ? minHeight (px 2000)
element ".jumbotron" ? (textAlign $ alignSide sideCenter)
element "#gmail" ? (marginLeft $ px 10)
| slogsdon/url | src/Client/CSS.hs | mit | 750 | 0 | 11 | 160 | 99 | 53 | 46 | 9 | 1 |
module DataStructuresUtil where
type Line = String
type Column = [Line]
type Document = [Column]
| axelGschaider/ttofu | src/DataStructuresUtil.hs | mit | 100 | 0 | 5 | 18 | 28 | 19 | 9 | 4 | 0 |
{-# Language BangPatterns #-}
{-# Language DeriveGeneric #-}
{-# Language GeneralizedNewtypeDeriving #-}
{-# Language TupleSections #-}
module Unison.Runtime.Multiplex where
import Control.Applicative
import Control.Concurrent.Async (Async)
import Control.Concurrent.MVar
import Control.Concurrent.STM (atomically, STM)
import Control.Concurrent.STM.TVar
import Control.Concurrent.STM.TMVar
import Control.Concurrent.STM.TQueue
import Control.Exception (catch,throwIO,SomeException,mask_)
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Reader (ReaderT,runReaderT,MonadReader,local)
import Data.Bytes.Serial (Serial(serialize,deserialize))
import Data.Functor
import Data.IORef
import Data.Maybe
import Data.Word
import GHC.Generics
import qualified Data.ByteString.Base64.URL as Base64
import qualified Control.Concurrent as C
import qualified Control.Concurrent.Async as Async
import qualified Control.Monad.Reader as Reader
import qualified Crypto.Random as Random
import qualified Crypto.Hash as Hash
import qualified Data.ByteArray as BA
import qualified Data.ByteString as B
import qualified Data.Bytes.Get as Get
import qualified Data.Bytes.Put as Put
import qualified STMContainers.Map as M
import qualified Unison.Cryptography as C
import qualified Unison.Runtime.Queue as Q
import qualified Unison.Util.Logger as L
data Packet = Packet { destination :: !B.ByteString, content :: !B.ByteString } deriving (Generic)
instance Serial Packet
instance Show Packet where
show (Packet d c) =
show $ (Base64.encode d, Base64.encode . BA.convert $ (Hash.hash c :: Hash.Digest Hash.SHA1))
type IsSubscription = Bool
data Callbacks =
Callbacks (M.Map B.ByteString (B.ByteString -> IO ())) (TVar Word64)
type Env =
( STM Packet -> STM ()
, Callbacks
, IO B.ByteString
, M.Map B.ByteString (Multiplex B.ByteString)
, L.Logger)
newtype Multiplex a = Multiplex (ReaderT Env IO a)
deriving (Applicative, Alternative, Functor, Monad, MonadIO, MonadPlus, MonadReader Env)
env0 :: L.Logger -> IO (Env, Maybe Packet -> IO (), IO (Maybe Packet), STM Bool)
env0 logger = do
fresh <- uniqueChannel
output <- atomically Q.empty :: IO (Q.Queue (Maybe Packet))
input <- atomically newTQueue :: IO (TQueue (Maybe Packet))
cb0@(Callbacks m _) <- Callbacks <$> atomically M.new <*> atomically (newTVar 0)
recvs0 <- atomically M.new
let env = (Q.enqueue output . (Just <$>), cb0, fresh, recvs0, logger)
isActive = (||) <$> (not <$> M.null m) <*> (not <$> M.null recvs0)
_ <- run env (fork $ process (atomically (readTQueue input)))
pure ( env
, atomically . writeTQueue input
, atomically $ Q.dequeue output
, isActive )
run :: Env -> Multiplex a -> IO a
run env (Multiplex go) = runReaderT go env
liftLogged :: String -> IO a -> Multiplex a
liftLogged msg action = ask >>= \env -> liftIO $ catch action (handle env) where
handle :: Env -> SomeException -> IO a
handle env ex = run env (warn $ msg ++ " " ++ show ex) >> throwIO ex
ask :: Multiplex Env
ask = Multiplex Reader.ask
logger :: Multiplex L.Logger
logger = do
~(_, _, _, _, logger) <- ask
pure logger
scope :: String -> Multiplex a -> Multiplex a
scope msg = local tweak where
tweak (a,b,c,d,logger) = (a,b,c,d,L.scope msg logger)
-- | Crash with a message. Include the current logging scope.
crash :: String -> Multiplex a
crash msg = do
warn msg
scope msg $ do
l <- logger
fail (show $ L.getScope l)
info, warn, debug :: String -> Multiplex ()
info msg = logger >>= \logger -> liftIO $ L.info logger msg
warn msg = logger >>= \logger -> liftIO $ L.warn logger msg
debug msg = logger >>= \logger -> liftIO $ L.debug logger msg
process :: IO (Maybe Packet) -> Multiplex ()
process recv = scope "Mux.process" $ do
(_, Callbacks cbs _, _, _, logger) <- ask
liftIO . repeatWhile $ do
packet <- recv
case packet of
Nothing -> L.info logger "EOF" $> False
Just (Packet destination content) -> do
callback <- atomically $ M.lookup destination cbs
case callback of
Nothing -> do
L.info logger $ "dropped packet @ " ++ show (Base64.encode destination)
pure True
Just callback -> do
L.debug logger $ "packet delivered @ " ++ show (Base64.encode destination)
callback content
pure True
process1 :: Packet -> Multiplex ()
process1 pk = liftIO (newIORef (Just pk)) >>= \ref -> process $ do
pk <- readIORef ref
case pk of
Just _ -> writeIORef ref Nothing $> pk
Nothing -> pure Nothing
repeatWhile :: Monad f => f Bool -> f ()
repeatWhile action = do
ok <- action
when ok (repeatWhile action)
untilDefined :: Monad f => f (Maybe a) -> f a
untilDefined action = do
ok <- action
case ok of
Nothing -> untilDefined action
Just a -> pure a
uniqueChannel :: IO (IO B.ByteString)
uniqueChannel = do
nonce <- newIORef (0 :: Word)
rng <- newIORef =<< Random.getSystemDRG
pure $ do
n <- atomicModifyIORef' nonce (\n -> (n+1,n))
(bytes,rng') <- Random.randomBytesGenerate 12 <$> readIORef rng
_ <- atomicModifyIORef' rng (\_ -> (rng',rng'))
pure . Put.runPutS $ Put.putByteString (Put.runPutS $ serialize n) >> Put.putByteString bytes
callbacks0 :: STM Callbacks
callbacks0 = Callbacks <$> M.new <*> newTVar 0
data Channel a = Channel (Type a) B.ByteString deriving Generic
instance Show (Channel a) where
show = show . Base64.encode . channelId
newtype EncryptedChannel u o i = EncryptedChannel (Channel B.ByteString) deriving Generic
instance Serial (EncryptedChannel u o i)
erase :: EncryptedChannel u o i -> Channel B.ByteString
erase (EncryptedChannel chan) = chan
channelId :: Channel a -> B.ByteString
channelId (Channel _ id) = id
instance Serial (Channel a)
data Type a = Type deriving Generic
instance Serial (Type a)
type Request a b = Channel (a, Channel b)
type Microseconds = Int
requestTimedVia' :: (Serial a, Serial b)
=> String
-> Microseconds
-> (STM (a, Channel b) -> Multiplex ())
-> Channel b
-> STM a
-> Multiplex (Multiplex b)
requestTimedVia' msg micros send replyTo a = do
env <- ask
(receive, cancel) <- receiveCancellable replyTo
send $ (,replyTo) <$> a
watchdog <- liftIO . C.forkIO $ do
liftIO $ C.threadDelay micros
run env (cancel $ "requestTimedVia timeout " ++ msg)
pure $ receive <* liftIO (C.killThread watchdog)
requestTimedVia :: (Serial a, Serial b) => String -> Microseconds -> Request a b -> Channel b -> STM a
-> Multiplex (Multiplex b)
requestTimedVia msg micros req replyTo a =
requestTimedVia' msg micros (send' req) replyTo a
requestTimed' :: (Serial a, Serial b) => String -> Microseconds -> Request a b -> STM a -> Multiplex (Multiplex b)
requestTimed' msg micros req a = do
replyTo <- channel
requestTimedVia msg micros req replyTo a
requestTimed :: (Serial a, Serial b) => String -> Microseconds -> Request a b -> a -> Multiplex (Multiplex b)
requestTimed msg micros req a = do
replyTo <- channel
env <- ask
(receive, cancel) <- receiveCancellable replyTo
send req (a, replyTo)
watchdog <- liftIO . C.forkIO $ do
liftIO $ C.threadDelay micros
run env (cancel $ "requestTimed timeout " ++ msg)
pure $ receive <* liftIO (C.killThread watchdog) <* cancel ("requestTimed completed")
type Cleartext = B.ByteString
type Ciphertext = B.ByteString
type CipherState = (Cleartext -> STM Ciphertext, Ciphertext -> STM Cleartext)
encryptedRequestTimedVia
:: (Serial a, Serial b)
=> String
-> CipherState
-> Microseconds
-> ((a,Channel b) -> Multiplex ())
-> Channel b
-> a
-> Multiplex b
encryptedRequestTimedVia msg (_,decrypt) micros send replyTo@(Channel _ bs) a = do
responseCiphertext <- receiveTimed msg micros (Channel Type bs)
send (a, replyTo)
responseCiphertext <- responseCiphertext -- force the receive
responseCleartext <- liftIO . atomically . decrypt $ responseCiphertext
either crash pure $ Get.runGetS deserialize responseCleartext
encryptAndSendTo
:: (Serial a, Serial node)
=> node -> Channel B.ByteString -> (Cleartext -> STM Ciphertext) -> a
-> Multiplex ()
encryptAndSendTo recipient chan encrypt a = do
let !bytes = Put.runPutS (serialize a)
nest recipient (send' chan (encrypt bytes))
encryptAndSendTo'
:: (Serial a, Serial node)
=> node -> Channel a -> (Cleartext -> STM Ciphertext) -> a
-> Multiplex ()
encryptAndSendTo' recipient (Channel _ chan) encrypt a =
encryptAndSendTo recipient (Channel Type chan) encrypt a
fork :: Multiplex a -> Multiplex (Async a)
fork m = do
env <- ask
liftIO . Async.async $ run env m
nest :: Serial k => k -> Multiplex a -> Multiplex a
nest outer m = Reader.local tweak m where
tweak (send,cbs,fresh,recvs,log) = (send' send,cbs,fresh,recvs,log)
kbytes = Put.runPutS (serialize outer)
send' send p = send $ (\p -> Packet kbytes (Put.runPutS (serialize p))) <$> p
channel :: Multiplex (Channel a)
channel = do
~(_,_,fresh,_,_) <- ask
Channel Type <$> liftIO fresh
send :: Serial a => Channel a -> a -> Multiplex ()
send chan a = send' chan (pure a)
send' :: Serial a => Channel a -> STM a -> Multiplex ()
send' (Channel _ key) a = do
~(send,_,_,_,_) <- ask
liftIO . atomically $ send (Packet key . Put.runPutS . serialize <$> a)
receiveCancellable' :: Channel a
-> Multiplex (Multiplex B.ByteString, String -> Multiplex ())
receiveCancellable' chan@(Channel _ key) = do
(_,Callbacks cbs _,_,_,_) <- ask
result <- liftIO newEmptyMVar
liftIO . atomically $ M.insert (void . tryPutMVar result . Right) key cbs
cancel <- pure $ \reason -> do
liftIO . atomically $ M.delete key cbs
liftIO . void $ tryPutMVar result (Left $ "Mux.cancelled: " ++ reason)
force <- pure . scope (show chan) . scope "receiveCancellable" $ do
info "awaiting result"
bytes <- liftIO $ takeMVar result
info "got result"
bytes <- either crash pure bytes
info "got result bytes"
pure bytes
pure (force, cancel)
receiveCancellable :: Serial a => Channel a -> Multiplex (Multiplex a, String -> Multiplex ())
receiveCancellable chan@(Channel _ key) = f <$> receiveCancellable' chan where
f (get, cancel) = (g =<< get, cancel)
g bytes = either crash pure $ Get.runGetS deserialize bytes
receiveTimed' :: String -> Microseconds -> Channel a -> Multiplex (Multiplex B.ByteString)
receiveTimed' msg micros chan = do
(force, cancel) <- receiveCancellable' chan
env <- ask
watchdog <- liftIO . C.forkIO $ do
liftIO $ C.threadDelay micros
run env (cancel $ "receiveTimed timeout during " ++ msg)
pure $ scope "receiveTimed" (force <* liftIO (C.killThread watchdog) <* cancel ("receiveTimed completed" ++ msg))
receiveTimed :: Serial a => String -> Microseconds -> Channel a -> Multiplex (Multiplex a)
receiveTimed msg micros chan = tweak <$> receiveTimed' msg micros chan where
tweak bytes = tweak' =<< bytes
tweak' bytes = either crash pure $ Get.runGetS deserialize bytes
-- Save a receive future as part of
saveReceive :: Microseconds
-> B.ByteString -> Multiplex B.ByteString -> Multiplex ()
saveReceive micros chan force = do
(_,_,_,recvs,_) <- ask
tid <- liftIO . C.forkIO $ do
C.threadDelay micros
atomically $ M.delete chan recvs
let force' = do
liftIO $ C.killThread tid
liftIO $ atomically (M.delete chan recvs)
force
liftIO . atomically $ M.insert force' chan recvs
restoreReceive :: B.ByteString -> Multiplex B.ByteString
restoreReceive chan = do
(_,_,_,recvs,_) <- ask
o <- liftIO . atomically $ M.lookup chan recvs
fromMaybe (crash $ "chan could not be restored: " ++ show (Base64.encode chan))
o
timeout' :: Microseconds -> a -> Multiplex a -> Multiplex a
timeout' micros onTimeout m = fromMaybe onTimeout <$> timeout micros m
timeout :: Microseconds -> Multiplex a -> Multiplex (Maybe a)
timeout micros m = do
env <- ask
t1 <- liftIO $ Async.async (Just <$> run env m)
t2 <- liftIO $ Async.async (C.threadDelay micros $> Nothing)
liftIO $ snd <$> Async.waitAnyCancel [t1, t2]
subscribeTimed :: Serial a => Microseconds -> Channel a -> Multiplex (Multiplex (Maybe a), Multiplex ())
subscribeTimed micros chan = do
(fetch, cancel) <- subscribe chan
result <- liftIO . atomically $ newEmptyTMVar
activity <- liftIO . atomically . newTVar $ False
fetch' <- pure $ do
void . fork $ do
r <- fetch
liftIO . atomically $ do
putTMVar result (Just r)
writeTVar activity True
liftIO . atomically $ takeTMVar result
watchdog <- do
env <- ask
l <- logger
liftIO . C.forkIO $ loop l activity result (run env cancel)
cancel' <- pure $ cancel >> liftIO (C.killThread watchdog)
pure (fetch', cancel')
where
loop logger activity result cancel = do
atomically $ writeTVar activity False
C.threadDelay micros
active <- atomically $ readTVar activity
case active of
False -> do
L.debug logger $ "timed out on " ++ show chan
void $ atomically (tryPutTMVar result Nothing) <* cancel
L.debug logger $ "cancelled subscription to " ++ show chan
True -> do
L.trace logger $ "still activity on " ++ show chan
loop logger activity result cancel
subscribe :: Serial a => Channel a -> Multiplex (Multiplex a, Multiplex ())
subscribe (Channel _ key) = scope "subscribe" $ do
(_, Callbacks cbs _, _, _, _) <- ask
q <- liftIO . atomically $ newTQueue
liftIO . atomically $ M.insert (atomically . writeTQueue q) key cbs
unsubscribe <- pure . liftIO . atomically . M.delete key $ cbs
force <- pure $ do
bytes <- liftIO . atomically $ readTQueue q
either crash pure $ Get.runGetS deserialize bytes
pure (force, unsubscribe)
seconds :: Microseconds -> Int
seconds micros = micros * 1000000
attemptMasked :: IO a -> IO (Either String a)
attemptMasked a =
catch (Right <$> mask_ a) (\e -> pure (Left $ show (e :: SomeException)))
handshakeTimeout :: Microseconds
handshakeTimeout = seconds 5
connectionTimeout :: Microseconds
connectionTimeout = seconds 20
delayBeforeFailure :: Microseconds
delayBeforeFailure = seconds 2
pipeInitiate
:: (Serial i, Serial o, Serial key, Serial u, Serial node)
=> C.Cryptography key t1 t2 t3 t4 t5 Cleartext
-> EncryptedChannel u o i
-> (node,key)
-> u
-> Multiplex (Maybe o -> Multiplex (), Multiplex (Maybe i), CipherState)
pipeInitiate crypto rootChan (recipient,recipientKey) u = scope "pipeInitiate" $ do
info "starting"
(doneHandshake, encrypt, decrypt) <- liftIO $ C.pipeInitiator crypto recipientKey
handshakeChan <- channel
connectedChan <- channel
handshakeSub <- scope "handshakeSub" $ subscribeTimed handshakeTimeout handshakeChan
connectedSub <- scope "connectedSub" $ subscribeTimed connectionTimeout connectedChan
let chans = (handshakeChan,connectedChan)
info $ "handshake channels " ++ show chans
handshake doneHandshake encrypt decrypt chans handshakeSub connectedSub
where
handshake doneHandshake encrypt decrypt cs@(chanh,chanc) (fetchh,cancelh) (fetchc,cancelc) =
encryptAndSendTo recipient (erase rootChan) encrypt (u,cs) >>
fetchh >> -- sync packet, ignored, but lets us know recipient is listening
go
where
recv = untilDefined $ do
bytes <- fetchc
debug $ "recv " ++ show (B.length <$> bytes)
case bytes of
Nothing -> pure (Just Nothing)
Just bytes -> do
decrypted <- liftIO . atomically $ decrypt bytes
case Get.runGetS deserialize decrypted of
Left err -> info err >> pure Nothing
Right mi -> pure (Just mi)
go = do
ready <- liftIO $ atomically doneHandshake
debug $ "ready: " ++ show ready
case ready of
True -> do
info "handshake complete"
-- encryptAndSendTo recipient chanh encrypt () -- todo: not sure this flush needed
pure (encryptAndSendTo recipient chanc encrypt, recv, (encrypt,decrypt))
False -> do
debug "handshake round trip... "
nest recipient $ send' chanh (encrypt B.empty)
bytes <- fetchh
debug "... handshake round trip completed"
case bytes of
Nothing -> cancelh >> cancelc >> crash "cancelled handshake"
Just bytes -> liftIO (atomically $ decrypt bytes) >> go
-- todo: add access control here, better to bail ASAP (or after 1s delay
-- to discourage sniffing for nodes with access) rather than continuing with
-- handshake if we know we can't accept messages from that party
pipeRespond
:: (Serial o, Serial i, Serial u, Serial node)
=> C.Cryptography key t1 t2 t3 t4 t5 Cleartext
-> (key -> Multiplex Bool)
-> EncryptedChannel u i o
-> (u -> node)
-> B.ByteString
-> Multiplex (key, u, Maybe o -> Multiplex (), Multiplex (Maybe i), CipherState)
pipeRespond crypto allow _ extractSender payload = do
(doneHandshake, senderKey, encrypt, decrypt) <- liftIO $ C.pipeResponder crypto
debug $ "decrypting initial payload"
bytes <- (liftLogged "[Mux.pipeRespond] decrypt" . atomically . decrypt) payload
(u, chans@(handshakeChan,connectedChan)) <- either crash pure $ Get.runGetS deserialize bytes
debug $ "handshake channels: " ++ show chans
let sender = extractSender u
handshakeSub <- subscribeTimed handshakeTimeout handshakeChan
connectedSub <- subscribeTimed connectionTimeout connectedChan
ok <- liftIO $ C.randomBytes crypto 8
nest sender $ send (Channel Type $ channelId handshakeChan) ok
handshake doneHandshake senderKey encrypt decrypt chans handshakeSub connectedSub sender u
where
handshake doneHandshake senderKey encrypt decrypt (chanh,chanc) (fetchh,cancelh) (fetchc,cancelc) sender u = go
where
recv = untilDefined $ do
bytes <- fetchc
case bytes of
Nothing -> pure (Just Nothing)
Just bytes -> do
decrypted <- liftIO . atomically $ decrypt bytes
case Get.runGetS deserialize decrypted of
Left err -> info err >> pure Nothing
Right mi -> pure (Just mi)
checkSenderKey = do
senderKey <- liftIO $ atomically senderKey
case senderKey of
Nothing -> pure ()
Just senderKey -> allow senderKey >>= \ok ->
if ok then pure ()
else liftIO (C.threadDelay delayBeforeFailure) >> crash "disallowed key"
go = do
ready <- liftIO $ atomically doneHandshake
checkSenderKey
case ready of
True -> do
-- encryptAndSendTo sender chanh encrypt () -- todo: not sure this flush needed
Just senderKey <- liftIO $ atomically senderKey
info $ "completed and listening on " ++ show chanc
pure (senderKey, u, encryptAndSendTo sender chanc encrypt, recv, (encrypt,decrypt))
False -> do
nest sender $ send' chanh (encrypt B.empty)
bytes <- fetchh
case bytes of
Nothing -> cancelh >> cancelc >> crash "cancelled handshake"
Just bytes -> liftIO (atomically $ decrypt bytes) >> go
| nightscape/platform | node/src/Unison/Runtime/Multiplex.hs | mit | 19,148 | 0 | 25 | 4,221 | 6,956 | 3,439 | 3,517 | 443 | 7 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.WebGLShaderPrecisionFormat
(js_getRangeMin, getRangeMin, js_getRangeMax, getRangeMax,
js_getPrecision, getPrecision, WebGLShaderPrecisionFormat,
castToWebGLShaderPrecisionFormat, gTypeWebGLShaderPrecisionFormat)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"rangeMin\"]" js_getRangeMin
:: WebGLShaderPrecisionFormat -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGLShaderPrecisionFormat.rangeMin Mozilla WebGLShaderPrecisionFormat.rangeMin documentation>
getRangeMin :: (MonadIO m) => WebGLShaderPrecisionFormat -> m Int
getRangeMin self = liftIO (js_getRangeMin (self))
foreign import javascript unsafe "$1[\"rangeMax\"]" js_getRangeMax
:: WebGLShaderPrecisionFormat -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGLShaderPrecisionFormat.rangeMax Mozilla WebGLShaderPrecisionFormat.rangeMax documentation>
getRangeMax :: (MonadIO m) => WebGLShaderPrecisionFormat -> m Int
getRangeMax self = liftIO (js_getRangeMax (self))
foreign import javascript unsafe "$1[\"precision\"]"
js_getPrecision :: WebGLShaderPrecisionFormat -> IO Int
-- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGLShaderPrecisionFormat.precision Mozilla WebGLShaderPrecisionFormat.precision documentation>
getPrecision :: (MonadIO m) => WebGLShaderPrecisionFormat -> m Int
getPrecision self = liftIO (js_getPrecision (self)) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/WebGLShaderPrecisionFormat.hs | mit | 2,264 | 18 | 8 | 247 | 492 | 300 | 192 | 31 | 1 |
-- | Custom wrapper around @hslogger.Priority@.
module System.Wlog.Severity
( Severity (..)
, Severities
, LogRecord(..)
-- * Severity utilities
, allSeverities
, severityPlus
, debugPlus, infoPlus
, noticePlus
, warningPlus, errorPlus
, excludeError
) where
import Universum
import Data.Yaml (FromJSON, ToJSON)
import qualified Data.Set as Set
-- | Severity is level of log message importance. It uniquely
-- determines which messages to print.
data Severity
= Debug -- ^ Debug messages
| Info -- ^ Information
| Notice -- ^ Important (more than average) information
| Warning -- ^ General warnings
| Error -- ^ General errors/severe errors
deriving (Eq, Ord, Enum, Bounded, Show, Read, Generic, Typeable)
instance FromJSON Severity
instance ToJSON Severity
-- | Set of 'Severity'.
type Severities = Set Severity
-- | Internal type of log records.
data LogRecord = LR !Severity !Text deriving Show
-- | 'Set' of all 'Severity's.
allSeverities :: Set Severity
allSeverities = Set.fromAscList [minBound .. maxBound]
-- | Returns the 'Set' of 'Severity's of elements greater or equal to the given value.
severityPlus :: Severity -> Set Severity
severityPlus s = Set.fromAscList [s .. maxBound]
-- | Returns 'Set' of 'Severity's not less than 'Debug'.
debugPlus :: Set Severity
debugPlus = severityPlus Debug
-- | Returns 'Set' of 'Severity's not less than 'Info'.
infoPlus :: Set Severity
infoPlus = severityPlus Info
-- | Returns 'Set' of 'Severity's not less than 'Notice'.
noticePlus :: Set Severity
noticePlus = severityPlus Notice
-- | Returns 'Set' of 'Severity's not less than 'Warning'.
warningPlus :: Set Severity
warningPlus = severityPlus Warning
-- | Returns 'Set' of 'Severity's not less than 'Error'.
errorPlus :: Set Severity
errorPlus = Set.singleton Error
-- | Excludes 'Error' from the 'Set' of 'Severity's.
excludeError :: Set Severity -> Set Severity
excludeError = Set.delete Error
| serokell/log-warper | src/System/Wlog/Severity.hs | mit | 2,055 | 0 | 7 | 447 | 348 | 200 | 148 | 44 | 1 |
{-# LANGUAGE MonadComprehensions #-}
{-# LANGUAGE RebindableSyntax #-}
module Set3 where
import MCPrelude
-- 1. Generating combinations
allPairs :: [a] -> [b] -> [(a,b)]
allPairs [] _ = []
allPairs (x:xs) ys = map (\y -> (x, y)) ys ++ allPairs xs ys
-- 2. Poker hands
data Card = Card {rank :: Int, suite :: String}
instance Show Card where
show c = (show . rank $ c) ++ suite c
allCards :: [Int] -> [String] -> [Card]
allCards [] _ = []
allCards (x:xs) ys = map (Card x) ys ++ allCards xs ys
-- 3. Generalizing pairs and cards
allCombs :: (a -> b -> c) -> [a] -> [b] -> [c]
allCombs _ [] _ = []
allCombs f (x:xs) ys = map (f x) ys ++ allCombs f xs ys
allPairs' = allCombs (\x y -> (x, y))
allCards' = allCombs Card
instance Eq Card where
(==) (Card r1 s1) (Card r2 s2) = r1 == r2 && s1 == s2
-- 4. Combinations of three things
allCombs3 :: (a -> b -> c -> d) -> [a] -> [b] -> [c] -> [d]
allCombs3 _ [] _ _ = []
allCombs3 f (x:xs) ys zs = allCombs (f x) ys zs ++ allCombs3 f xs ys zs
-- 5. Combinations of more things
combStep :: [a -> b] -> [a] -> [b]
combStep [] _ = []
combStep (f:fs) xs = map f xs ++ combStep fs xs
allCombs' :: (a -> b -> c) -> [a] -> [b] -> [c]
allCombs' f xs = combStep (map f xs)
allCombs3' :: (a -> b -> c -> d) -> [a] -> [b] -> [c] -> [d]
allCombs3' f xs ys zs = map f xs `combStep` ys `combStep` zs
| gafiatulin/monad-challenges | src/Set3.hs | mit | 1,354 | 0 | 9 | 322 | 724 | 391 | 333 | 30 | 1 |
module BasicTypes where
import qualified Data.Map
data Bit = B0 | B1 deriving (Show, Eq)
data Hex = H0 | H1 | H2 | H3 | H4 | H5 | H6 | H7 | H8 | H9 | HA | HB | HC | HD | HE | HF deriving (Show, Eq)
type Octet = (Bit,Bit,Bit,Bit,Bit,Bit,Bit,Bit)
toBit :: Char -> Maybe Bit
toBit = (flip Data.Map.lookup) (Data.Map.fromList [('0', B0), ('1', B1)])
toHex :: Char -> Maybe Hex
toHex = (flip Data.Map.lookup) (Data.Map.fromList [('0', H0), ('1', H1), ('2', H2), ('3', H3),
('4', H4), ('5', H5), ('6', H6), ('7', H7),
('8', H8), ('9', H9), ('A', HA), ('B', HB),
('C', HC), ('D', HD), ('E', HE), ('F', HF)])
hexesToBits :: [Hex] -> [Bit]
hexesToBits = concat . map (\x -> case x of H0 -> [B0,B0,B0,B0]; H1 -> [B0,B0,B0,B1]; H2 -> [B0,B0,B1,B0]; H3 -> [B0,B0,B1,B1]
H4 -> [B0,B1,B0,B0]; H5 -> [B0,B1,B0,B1]; H6 -> [B0,B1,B1,B0]; H7 -> [B0,B1,B1,B1]
H8 -> [B1,B0,B0,B0]; H9 -> [B1,B0,B0,B1]; HA -> [B1,B0,B1,B0]; HB -> [B1,B0,B1,B1]
HC -> [B1,B1,B0,B0]; HD -> [B1,B1,B0,B1]; HE -> [B1,B1,B1,B0]; HF -> [B1,B1,B1,B1])
makeOctet :: [Bit] -> Octet
makeOctet (a:b:c:d:e:f:g:h:[]) = (a,b,c,d,e,f,g,h)
bitsToOctets :: [Bit] -> [Octet]
bitsToOctets bits = let (eight, rest) = splitAt 8 bits in
case rest of [] -> [makeOctet (take 8 (bits ++ repeat B0))]
otherwise -> makeOctet eight : bitsToOctets rest | paradigmsort/ASN1_Happy_LALR1 | BasicTypes.hs | mit | 1,658 | 0 | 16 | 592 | 880 | 532 | 348 | 23 | 16 |
module CodeGen.Metadata.CoLimit where
import Data.Maybe
import Data.List
import Data.Serialize
import GHC.Generics
import Data.Tuple.HT
import Control.Monad.Trans.Either
import Utils.Utils
import CodeGen.HaskellCode
import Server.NutleyInstance
import CodeGen.Metadata.Metadata
import Data.Name
import Data.Schema
import Data.Types
import CodeGen.TupleUtils
import CodeGen.Metadata.Shriek
import CodeGen.NutleyQueryUtils
import qualified Crypto.Hash.SHA256 as SHA
coLimitOne :: [(DBMetadata,NutleyInstance)] -> (DBMetadata,NutleyInstance)
coLimitOne inners' =
let inners'' :: [(DBMetadata,[NutleyInstance])]
inners'' = concatMap (\(db,inst) -> case inst of
(CoLimit ins) -> zipWith (,) (coLimitInnerMetadatas db) ins
_ -> [(db,[inst])])
inners'
inners = map (\((md,c):cs) -> (md,c ++ (concatMap snd cs)))
$ groupBy ((==) `on` fst) $ sortBy (compare `on` fst) inners''
in
(
CoLimitMetadata
{
coLimitName = "coprd_" ++ (name $ fst $ head inners),
coLimitInnerMetadatas = map fst inners,
coLimitHashCode = SHA.finalize $ foldr (flip SHA.update) SHA.init $ map (dbHashCode.fst) inners
},
CoLimit $ map snd inners
)
coProduct :: [DBMetadata] -> DBMetadata
coProduct inners = CoLimitMetadata
{
coLimitName = cim "_coprd_" name inners,
coLimitInnerMetadatas = zipWith shriek incs inners,
coLimitHashCode = SHA.finalize $ foldr (flip SHA.update) SHA.init $ map dbHashCode inners
}
where (coProdSchema,incs) = schemaCoProduct $ map dbSchema inners
coLimitInnerMaterializeQueries db ss =
map (\(i,imd) -> ("imp" ++ (show i), MaterializeQuery imd ss,"ins_" ++ (show i)))
$ filter ((nontrivialMaterialization ss).snd) $ zip [1..] $ coLimitInnerMetadatas db
codeCoLimitMaterialize metadata ss =
(map (\(x,y,_) -> (x,y)) innerMats,
Fun (materializeFName metadata ss) (materializeType metadata ss)
$ Lam (Fnp "CoLimit" $ [Lstp $ map Ltp $ map (\(i,_) -> "ins_" ++ (show i)) $ zip [1..] $ coLimitInnerMetadatas metadata])
$ Do
$ (map (\(i,(mod,q,nms)) -> (Ltp $ "kinz_" ++ (show i),
c_mapM (Lit $ mod ++ "_" ++ (name q)) $ Lit nms))
$ zip [1..] innerMats) ++
[do_return $ tupConcat (length $ subSchemaSimplices ss) $ c_concat $ Lst $ map (\(i,_) -> Lit $ "kinz_" ++ (show i))
$ zip [1..] innerMats]
)
where innerMats = coLimitInnerMaterializeQueries metadata ss
| jvictor0/JoSQL | CodeGen/Metadata/CoLimit.hs | mit | 2,561 | 0 | 22 | 616 | 940 | 516 | 424 | 53 | 2 |
{-# LANGUAGE
NoImplicitPrelude,
MultiParamTypeClasses,
FlexibleInstances,
FunctionalDependencies
#-}
module OctoTactics.Util.Class where
import Data.Functor (Functor, fmap)
import Data.Either (Either(..))
import Data.Function ((.), id)
class Functor' f a b where
fmap' :: (a -> b) -> f a -> f b
instance Functor f => Functor' f a b where
fmap' = fmap
class Swap f where
swap :: f a b -> f b a
instance Swap Either where
swap (Left x) = Right x
swap (Right y) = Left y
instance Swap (,) where
swap (x, y) = (y, x)
class Assoc a b where
assoc :: a -> b
instance Assoc ((a, b), c) (a, (b, c)) where assoc ((x, y), z) = (x, (y, z))
instance Assoc (a, (b, c)) ((a, b), c) where assoc (x, (y, z)) = ((x, y), z)
instance Assoc (Either a (Either b c)) (Either (Either a b) c) where assoc (Left x ) = Left (Left x)
assoc (Right (Left y)) = Left (Right y)
assoc (Right (Right z)) = Right z
instance Assoc (Either (Either a b) c) (Either a (Either b c)) where assoc (Left (Left x)) = Left x
assoc (Left (Right y)) = Right (Left y)
assoc (Right z ) = Right (Right z) | Solonarv/OctoTactics | OctoTactics/Util/Class.hs | mit | 1,433 | 0 | 10 | 583 | 600 | 321 | 279 | 30 | 0 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Text.InflectionsSpec (spec) where
import Data.Void
import Test.Hspec
import Test.QuickCheck
import Text.Inflections
import Text.Megaparsec
import Data.Text
import qualified Data.List.NonEmpty as NE
import qualified Data.Set as S
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
instance Arbitrary Text where
arbitrary = pack <$> (arbitrary :: Gen String)
arbitraryParseErrorBundle :: Gen (ParseErrorBundle Text Void)
arbitraryParseErrorBundle = ParseErrorBundle <$> nonEmptyParseErrors <*> arbitraryPosState
where
posArbitrary = mkPos <$> ((+1) . abs <$> arbitrary)
nonEmptyParseErrors :: Gen (NE.NonEmpty (ParseError Text Void))
nonEmptyParseErrors = NE.fromList <$> listOf1 arbitraryParseError
arbitrarySourcePos :: Gen SourcePos
arbitrarySourcePos = SourcePos <$> arbitrary <*> posArbitrary <*> posArbitrary
arbitraryPosState :: Gen (PosState Text)
arbitraryPosState =
PosState
<$> arbitrary
<*> arbitrary
<*> arbitrarySourcePos
<*> posArbitrary
<*> arbitrary
arbitraryParseError :: Gen (ParseError Text Void)
arbitraryParseError = oneof [trivialError, fancyError]
where
trivialError = TrivialError <$> arbitrary <*> maybeErrorItem <*> setErrorItem
fancyError = FancyError <$> arbitrary <*> setErrorFancy
setErrorFancy = S.fromList <$> listOf arbitraryErrorFancy
maybeErrorItem = oneof [ Just <$> arbitraryErrorItem, return Nothing]
setErrorItem = S.fromList <$> listOf arbitraryErrorItem
arbitraryErrorFancy :: Gen (ErrorFancy e)
arbitraryErrorFancy = oneof [ ErrorFail <$> arbitrary ]
arbitraryErrorItem :: (Arbitrary e) => Gen (ErrorItem e)
arbitraryErrorItem = oneof [ tokens_, labels_, return EndOfInput ]
where
tokens_ = Tokens <$> (NE.fromList <$> listOf1 arbitrary)
labels_ = Label <$> (NE.fromList <$> listOf1 arbitrary)
spec :: Spec
spec = do
describe "toUnderscore" $ do
it "converts camel case to snake case" $
toUnderscore "camelCasedText" `shouldBe` Right "camel_cased_text"
it "converts camel case to snake case with numbers" $
toUnderscore "ipv4Address" `shouldBe` Right "ipv4_address"
describe "toDashed" $
it "converts camel case to dashed" $
toDashed "camelCasedText" `shouldBe` Right "camel-cased-text"
describe "toCamelCased" $ do
context "when the first argument is False" $
it "converts snake case to camel case" $
toCamelCased False "underscored_text" `shouldBe` Right "underscoredText"
context "when the first argument is True" $
it "converts snake case to camel case with the first word capitalized" $
toCamelCased True "underscored_text" `shouldBe` Right "UnderscoredText"
describe "toHumanized" $ do
context "when the first argument is False" $
it "converts snake case to human-readable form with lower-case initial letter" $
toHumanized False "underscored_text" `shouldBe` Right "underscored text"
context "when the first argument is True" $
it "converts snake case to human-readable form with the first word capitalized" $
toHumanized True "underscored_text" `shouldBe` Right "Underscored text"
describe "betterThrow" $ do
context "when given a parse error" $
it "throws the correct exception" $
property $ forAll arbitraryParseErrorBundle $ \err ->
betterThrow (Left err) `shouldThrow`
(== InflectionParsingFailed err)
context "when given a value in Right" $
it "returns the value" $
property $ \x ->
betterThrow (Right x) `shouldReturn` (x :: Int)
| stackbuilders/inflections-hs | test/Text/InflectionsSpec.hs | mit | 3,737 | 0 | 15 | 777 | 827 | 423 | 404 | 77 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : SPI
-- License : MIT (see the LICENSE file)
-- Maintainer : Felix Klein ([email protected])
--
-- Serial Peripheral Interface
--
-----------------------------------------------------------------------------
module SPI
( transferIO
, setSpeed
, setMode
) where
-----------------------------------------------------------------------------
import Data.Word
( Word8
)
import Control.Monad.State
( get
, put
, when
)
-----------------------------------------------------------------------------
import Data
( PortNumber
, ST(..)
, OP
, Size
, Mode
)
import Commands
( Command(..)
, SubCommand(..)
)
import Utils
( sendCmd
, verify
, i2W4
, w42I
, readData
, writeData
, splitBlocks
)
-----------------------------------------------------------------------------
-- | Transfers data over the interface.
transferIO
:: [Word8] -> Size -> OP [Word8]
transferIO bs n = do
initInterface 0
let
i = max 0 $ n - length bs
bs' = bs ++ replicate i 0x00
(sa,_) <- sendCmd SPI ASSCS [0x00, 0x00]
verify (sa == 0)
"assertion failed"
let x = [0x00, 0x00, 0x00, if n > 0 then 0x01 else 0x00]
++ i2W4 (length bs')
(ss,_) <- sendCmd SPI START x
verify (ss == 0)
"start failed"
rs <- transfer [] n $ splitBlocks 64 bs'
(se,y) <- sendCmd SPI END [0x00,0x00]
if se >= 192 then do
verify (length y == 8)
"Incompatible response"
let
(f,s) = splitAt 4 y
vo = w42I f
vi = w42I s
verify (vo == length bs' && vi == n)
"Read/Write failure"
else if se >= 128 then do
verify (length y == 4)
"Incompatible response"
verify (w42I y == length bs')
"Read failure"
else when (se >= 64) $ do
verify (length y == 4)
"Incompatible response"
verify (w42I y == n)
"Read failure"
(sc,_) <- sendCmd SPI ASSCS [0x00, 0x01]
verify (sc == 0)
"clear failed"
return $ concat rs
where
transfer a i xs = case xs of
[]
| i > 64 -> do
x <- readData 64
transfer (x:a) (i - 64) []
| i > 0 -> do
x <- readData i
return $ reverse $ x : a
| otherwise ->
return $ reverse a
(y:yr) -> do
writeData y
if i > 64 then do
x <- readData 64
transfer (x:a) (i - 64) yr
else if i > 0 then do
x <- readData i
transfer (x:a) 0 yr
else
transfer a 0 yr
-----------------------------------------------------------------------------
-- | Sets the speed of the interface.
setSpeed
:: Int -> OP ()
setSpeed n = do
initInterface 0
(s,x) <- sendCmd SPI SPEED (0x00 : i2W4 n)
verify (s == 0 && length x == 4)
"Speed setting failed "
verify (w42I x == n)
("Speed count set correctly" ++ show x)
-----------------------------------------------------------------------------
-- | Sets the mode of the interface.
setMode
:: Mode -> OP ()
setMode v = do
initInterface 0
(s,x) <- sendCmd SPI MODE [0x00, v]
verify (s == 0 && null x)
"Mode setting failed"
-----------------------------------------------------------------------------
initInterface
:: PortNumber -> OP ()
initInterface pn = do
st <- get
case fSPIPort st of
Just _ -> return ()
Nothing -> do
(so, x) <- sendCmd SPI OPEN [pn]
verify (so == 0 && null x)
"Opening SPI port failed"
put st {
fSPIPort = Just pn,
cleanup = do
(sc, y) <- sendCmd SPI CLOSE [pn]
verify (sc == 0 && null y)
"Closing SPI port failed"
cleanup st
}
-----------------------------------------------------------------------------
| reactive-systems/icedude | src/SPI.hs | mit | 3,911 | 0 | 20 | 1,178 | 1,230 | 621 | 609 | 119 | 7 |
module CFDI.Types.ProductId where
import CFDI.Chainable
import CFDI.Types.Type
import Data.Text (Text, pack, unpack)
import Text.Regex (mkRegex)
import Text.Regex.Posix (matchTest)
newtype ProductId = ProductId Text deriving (Eq, Show)
instance Chainable ProductId where
chain (ProductId i) = i
instance Type ProductId where
parseExpr str
| matchTest regExp str = Right . ProductId $ pack str
| otherwise = Left $ DoesNotMatchExpr "[^|]{1,100}"
where
regExp = mkRegex "^(.|á|é|í|ó|ú|ñ|Á|É|Í|Ó|Ú|Ñ){1,100}$"
render (ProductId i) = unpack i
| yusent/cfdis | src/CFDI/Types/ProductId.hs | mit | 593 | 0 | 9 | 112 | 177 | 94 | 83 | 15 | 0 |
{-# htermination (maxBoundTup0 :: Tup0) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Tup0 = Tup0 ;
maxBoundTup0 :: Tup0
maxBoundTup0 = Tup0;
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/maxBound_1.hs | mit | 203 | 0 | 8 | 42 | 56 | 34 | 22 | 6 | 1 |
{-# LANGUAGE TypeFamilies, UndecidableInstances #-}
module Arithmetic where
import Boolean
( False
, True
, And
, If
)
import Ordering
( LT
, EQ
, GT
, OEq
)
-- Natural numbers
data Z -- Zero
data S n -- Successor of n (n = Z or n = S n')
type N0 = Z
type N1 = S N0
type N2 = S N1
-- Natural comparison
type family NCmp m n
type instance NCmp Z Z = EQ
type instance NCmp Z (S n') = LT
type instance NCmp (S m') Z = GT
type instance NCmp (S m') (S n') = NCmp m' n'
-- Natural addition
type family NAdd m n
type instance NAdd m Z = m
type instance NAdd m (S n') = NAdd (S m) n'
-- Natural subtraction
type family NSub m n
type instance NSub m Z = m
type instance NSub Z Z = Z
type instance NSub (S m') (S n') = NSub m' n'
-- Natural multiplication
type family NMul m n
type instance NMul m Z = N0
type instance NMul m (S n') = NAdd m (NMul m n')
-- Natural division
type family NDiv m n
type instance NDiv Z (S n') = Z
type instance NDiv (S m') (S n') = If (OEq LT (NCmp m' n'))
Z
(S (NDiv (NSub m' n') (S n')))
-- Natural power
type family NPow m n
type instance NPow m Z = N1
type instance NPow m (S n') = NMul m (NPow m n')
-- Integers
data I a b -- Integers of the form a-b (a and b are natural numbers)
type I_1 = I N0 N1
type I0 = I N0 N0
type I1 = I N1 N0
type I2 = I N2 N0
-- Integer from natural
type family IFromN n
type instance IFromN n = I n N0
-- Integer to natural
type family NFromI n
type instance NFromI (I m Z) = m
-- Integer comparison
type family ICmp m n
type instance ICmp (I am bm) (I an bn) = NCmp (NAdd am bn) (NAdd an bm)
-- Integer normalization
type family INorm n
type instance INorm (I a Z) = I a Z
type instance INorm (I Z b) = I Z b
type instance INorm (I (S a') (S b')) = I a' b'
-- Integer signum
type family ISign n
type instance ISign n = If (OEq LT (ICmp n I0))
I_1
(If (OEq GT (ICmp n I0))
I1
I0)
-- Integer absolute value
type family IAbs n
type instance IAbs n = IMul n (ISign n)
-- Integer addition
type family IAdd m n
type instance IAdd (I am bm) (I an bn)
= INorm (I (NAdd am an) (NAdd bm bn))
-- Integer negation
type family INeg n
type instance INeg (I a b) = INorm (I b a)
-- Integer subtraction
type family ISub m n
type instance ISub m n = IAdd m (INeg n)
-- Integer multiplication
type family IMul m n
type instance IMul (I am bm) (I an bn)
= INorm (I (NAdd (NMul am an) (NMul bm bn)) (NAdd (NMul am bn) (NMul bm an)))
-- Integer division
type family IDiv m n
type instance IDiv m n
= IMul (IFromN (NDiv (NFromI (IAbs m)) (NFromI (IAbs n)))) (IMul (ISign m) (ISign n))
-- Integer power (with natural exponent)
type family IPow m n
type instance IPow m Z = I1
type instance IPow m (S n') = IMul m (IPow m n')
| ahihi/type-computation | Arithmetic.hs | cc0-1.0 | 3,012 | 0 | 13 | 950 | 1,167 | 653 | 514 | -1 | -1 |
{-# LANGUAGE UnicodeSyntax #-}
-- Copyright (C) 2008 Diego Souza <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License along
-- with this program; if not, write to the Free Software Foundation, Inc.,
-- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
module Battleship.UI.Window (mainloop) where
import qualified Battleship.Battleship as B
import qualified Battleship.Vector as V
import qualified Battleship.Game.Game as GM
import qualified Battleship.UI.Player as P
import qualified Data.IORef as I
import qualified Graphics.Rendering.Cairo as C
import qualified Graphics.UI.Gtk as G
import qualified Graphics.UI.Gtk.Glade as GG
import qualified Graphics.UI.Gtk.Gdk.Events as E
import qualified Control.Concurrent as CC
import qualified Control.Monad as M
_draw_game :: (G.WidgetClass widget) => I.IORef GM.GameT -> (GM.GameT -> GM.PlayerT) -> (GM.GameT -> GM.PlayerT) -> widget -> IO Bool
_draw_game rgame player oponent window = do
(width, height) <- G.widgetGetSize window
drawwin <- G.widgetGetDrawWindow window
game <- I.readIORef rgame
I.writeIORef rgame (GM.time_step game)
G.renderWithDrawable drawwin $ do
C.setSourceRGB 1 1 1
C.paint
P.draw_player (player game) (oponent game) (fromIntegral width) (fromIntegral height)
P.draw_console (player game)
return True
_draw_radar :: (G.WidgetClass widget) => I.IORef GM.GameT -> widget -> IO Bool
_draw_radar rgame window = do
(width, height) <- G.widgetGetSize window
drawwin <- G.widgetGetDrawWindow window
game <- I.readIORef rgame
I.writeIORef rgame (GM.time_step game)
G.renderWithDrawable drawwin $ do
C.setSourceRGB 1 1 1
C.paint
C.save
let w = fromIntegral width
h = fromIntegral height
s0@(V.Vector2D (s0x, s0y)) = snd $ GM.space0 (GM.player1 game)
s1@(V.Vector2D (s1x, s1y)) = snd $ GM.space0 (GM.player2 game)
(V.Vector2D (smx, smy)) = V.vzip_with max (V.vmap abs s0) (V.vmap abs s1)
xpos x = x/(2*smx)
ypos y = y/(2*smy)
C.scale w h
C.setSourceRGB 1 0 0
C.arc (min 0.9 $ xpos s0x+0.6) (min 0.9 $ ypos s0y+0.6) 0.05 0 (2*pi)
C.fill
C.setSourceRGB 0 1 0
C.arc (min 0.9 $ xpos s1x+0.6) (min 0.9 $ ypos s1y+0.6) 0.05 0 (2*pi)
C.fill
C.restore
return True
_read_ships :: (G.ComboBoxClass a) => a -> IO (B.BattleshipT, B.BattleshipT)
_read_ships combo = do
ship <- G.comboBoxGetActive combo
case ship of
0 -> return (B.AircraftCarrier 0 0, B.AircraftCarrier 0 0)
1 -> return (B.Battleship 0 0, B.Battleship 0 0)
2 -> return (B.Destroyer 0 0, B.Destroyer 0 0)
3 -> return (B.Submarine 0 0, B.Submarine 0 0)
4 -> return (B.Cruiser 0 0, B.Cruiser 0 0)
_ -> error "Bye!"
_startup_dialog :: IO (B.BattleshipT, B.BattleshipT)
_startup_dialog = do
dialogXmlM <- GG.xmlNew "resources/glade/startup.glade"
let dialogXml = case dialogXmlM
of (Just xml) -> xml
Nothing -> error "can't find the glade file \"startup.glade\" in the current directory"
dialog <- GG.xmlGetWidget dialogXml G.castToDialog "dialog1"
response <- G.dialogRun dialog
combo <- GG.xmlGetWidget dialogXml G.castToComboBox "cb_ships"
case response of
(G.ResponseUser 1) -> G.widgetHideAll dialog >> _read_ships combo
_ -> error $ "Bye!"
mainloop :: IO ()
mainloop = do
G.initGUI
window_p0 <- G.windowNew
window_p1 <- G.windowNew
window_rd <- G.windowNew
(shipa, shipb) <- _startup_dialog
G.windowSetTitle window_p0 "Haskell Animated Battleship - Player1"
G.windowSetTitle window_p1 "Haskell Animated Battleship - Player2"
G.windowSetTitle window_rd "Haskell Animated Battleship - Radar"
M.forM [(window_p0,500,500), (window_p1,500,500), (window_rd,100,100)] $ \(w, ww, wh) -> do
G.windowSetDecorated w True
G.windowSetResizable w True
-- G.windowSetPosition window_p0 G.WinPosCenterAlways
G.widgetSetAppPaintable w True
G.windowSetDefaultSize w ww wh
-- TODO: initialize the game
let player1 = GM.HumanPlayer shipa (V.Vector2D (0,0), V.Vector2D (1000,500))
player2 = GM.HumanPlayer shipb (V.Vector2D (0,0), V.Vector2D (1000,2000))
rgame <- I.newIORef $ GM.TwoPlayerGame player1 player2
M.forM [window_p0, window_p1, window_rd] $ \w ->
G.onKeyPress w $ \x ->
case (E.eventKeyChar x)
of (Just key) -> I.modifyIORef rgame (GM.exec_command key) >> (return $ E.eventSent x)
Nothing -> (return $ E.eventSent x)
G.timeoutAdd (CC.yield >> _draw_game rgame (GM.player1) (GM.player2) window_p0) 150
G.timeoutAdd (CC.yield >> _draw_game rgame (GM.player2) (GM.player1) window_p1) 150
G.timeoutAdd (CC.yield >> _draw_radar rgame window_rd) 150
M.mapM (G.onDestroy >> G.widgetShowAll) [window_p0, window_p1, window_rd]
G.mainGUI
| dgvncsz0f/boats | src/main/haskell/Battleship/UI/Window.hs | gpl-2.0 | 5,530 | 0 | 18 | 1,235 | 1,766 | 894 | 872 | 101 | 6 |
module ArithPrettyPrint (render_term) where
import Arith
import Text.PrettyPrint
pretty_term :: Term -> Doc
pretty_term Tru = text "#t"
pretty_term Fls = text "#f"
pretty_term Zero = text "#z"
pretty_term (Succ x) = pr_func_app "succ" [x]
pretty_term (Pred x) = pr_func_app "pred" [x]
pretty_term (IsZero x) = pr_func_app "zero?" [x]
pretty_term (If x y z) = pr_func_app "if" [x, y, z]
pr_func_app :: String -> [Term] -> Doc
pr_func_app name = parens . hsep . (:) (text name) . map pretty_term
render_term :: Term -> String
render_term = render . pretty_term
| robertclancy/tapl | arith/ArithPrettyPrint.hs | gpl-2.0 | 563 | 0 | 9 | 94 | 225 | 117 | 108 | 15 | 1 |
{-# LANGUAGE BangPatterns, DeriveDataTypeable, DeriveGeneric, FlexibleInstances, MultiParamTypeClasses, OverloadedStrings #-}
{-# OPTIONS_GHC -w #-}
module GTFS.Realtime.Internal.Com.Google.Transit.Realtime.Alert.Effect (Effect(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.List as Prelude'
import qualified Data.Typeable as Prelude'
import qualified GHC.Generics as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data Effect = NO_SERVICE
| REDUCED_SERVICE
| SIGNIFICANT_DELAYS
| DETOUR
| ADDITIONAL_SERVICE
| MODIFIED_SERVICE
| OTHER_EFFECT
| UNKNOWN_EFFECT
| STOP_MOVED
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data, Prelude'.Generic)
instance P'.Mergeable Effect
instance Prelude'.Bounded Effect where
minBound = NO_SERVICE
maxBound = STOP_MOVED
instance P'.Default Effect where
defaultValue = NO_SERVICE
toMaybe'Enum :: Prelude'.Int -> P'.Maybe Effect
toMaybe'Enum 1 = Prelude'.Just NO_SERVICE
toMaybe'Enum 2 = Prelude'.Just REDUCED_SERVICE
toMaybe'Enum 3 = Prelude'.Just SIGNIFICANT_DELAYS
toMaybe'Enum 4 = Prelude'.Just DETOUR
toMaybe'Enum 5 = Prelude'.Just ADDITIONAL_SERVICE
toMaybe'Enum 6 = Prelude'.Just MODIFIED_SERVICE
toMaybe'Enum 7 = Prelude'.Just OTHER_EFFECT
toMaybe'Enum 8 = Prelude'.Just UNKNOWN_EFFECT
toMaybe'Enum 9 = Prelude'.Just STOP_MOVED
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum Effect where
fromEnum NO_SERVICE = 1
fromEnum REDUCED_SERVICE = 2
fromEnum SIGNIFICANT_DELAYS = 3
fromEnum DETOUR = 4
fromEnum ADDITIONAL_SERVICE = 5
fromEnum MODIFIED_SERVICE = 6
fromEnum OTHER_EFFECT = 7
fromEnum UNKNOWN_EFFECT = 8
fromEnum STOP_MOVED = 9
toEnum
= P'.fromMaybe
(Prelude'.error
"hprotoc generated code: toEnum failure for type GTFS.Realtime.Internal.Com.Google.Transit.Realtime.Alert.Effect")
. toMaybe'Enum
succ NO_SERVICE = REDUCED_SERVICE
succ REDUCED_SERVICE = SIGNIFICANT_DELAYS
succ SIGNIFICANT_DELAYS = DETOUR
succ DETOUR = ADDITIONAL_SERVICE
succ ADDITIONAL_SERVICE = MODIFIED_SERVICE
succ MODIFIED_SERVICE = OTHER_EFFECT
succ OTHER_EFFECT = UNKNOWN_EFFECT
succ UNKNOWN_EFFECT = STOP_MOVED
succ _
= Prelude'.error "hprotoc generated code: succ failure for type GTFS.Realtime.Internal.Com.Google.Transit.Realtime.Alert.Effect"
pred REDUCED_SERVICE = NO_SERVICE
pred SIGNIFICANT_DELAYS = REDUCED_SERVICE
pred DETOUR = SIGNIFICANT_DELAYS
pred ADDITIONAL_SERVICE = DETOUR
pred MODIFIED_SERVICE = ADDITIONAL_SERVICE
pred OTHER_EFFECT = MODIFIED_SERVICE
pred UNKNOWN_EFFECT = OTHER_EFFECT
pred STOP_MOVED = UNKNOWN_EFFECT
pred _
= Prelude'.error "hprotoc generated code: pred failure for type GTFS.Realtime.Internal.Com.Google.Transit.Realtime.Alert.Effect"
instance P'.Wire Effect where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB Effect
instance P'.MessageAPI msg' (msg' -> Effect) Effect where
getVal m' f' = f' m'
instance P'.ReflectEnum Effect where
reflectEnum
= [(1, "NO_SERVICE", NO_SERVICE), (2, "REDUCED_SERVICE", REDUCED_SERVICE), (3, "SIGNIFICANT_DELAYS", SIGNIFICANT_DELAYS),
(4, "DETOUR", DETOUR), (5, "ADDITIONAL_SERVICE", ADDITIONAL_SERVICE), (6, "MODIFIED_SERVICE", MODIFIED_SERVICE),
(7, "OTHER_EFFECT", OTHER_EFFECT), (8, "UNKNOWN_EFFECT", UNKNOWN_EFFECT), (9, "STOP_MOVED", STOP_MOVED)]
reflectEnumInfo _
= P'.EnumInfo
(P'.makePNF (P'.pack ".transit_realtime.Alert.Effect") ["GTFS", "Realtime", "Internal"]
["Com", "Google", "Transit", "Realtime", "Alert"]
"Effect")
["GTFS", "Realtime", "Internal", "Com", "Google", "Transit", "Realtime", "Alert", "Effect.hs"]
[(1, "NO_SERVICE"), (2, "REDUCED_SERVICE"), (3, "SIGNIFICANT_DELAYS"), (4, "DETOUR"), (5, "ADDITIONAL_SERVICE"),
(6, "MODIFIED_SERVICE"), (7, "OTHER_EFFECT"), (8, "UNKNOWN_EFFECT"), (9, "STOP_MOVED")]
Prelude'.False
instance P'.TextType Effect where
tellT = P'.tellShow
getT = P'.getRead | romanofski/gtfsschedule | src/GTFS/Realtime/Internal/Com/Google/Transit/Realtime/Alert/Effect.hs | gpl-3.0 | 4,426 | 0 | 11 | 740 | 1,089 | 607 | 482 | 99 | 1 |
#!/usr/bin/runghc
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
import Shelly
import Prelude hiding (FilePath)
import Data.Text.Lazy as LT
import Data.Monoid
import Prelude
--default (LT.Text)
branches = [ "novacollider/cmake-defaults",
"novacollider/dark-palette",
"novacollider/ide-dark-colorscheme",
"novacollider/intrusive_muladds",
"novacollider/ide-hacks",
"novacollider/dll",
"fixes/alsa-midi-fixes",
"fixes/freqscope-resolution",
"novacollider/cmake-modernisation",
"novacollider/oscillator-optimizations",
-- "novacollider/dumpparsenode_cleanup"
-- "novacollider/sleep-in-helper-threads",
"novacollider/sclang-improvements",
"novacollider/gc-improvements",
"feature/larger-synthdef-error-handling",
"novacollider/qtquick",
"novacollider/updater",
-- "novacollider/travis",
"novacollider/hidpi",
"novacollider/msvc2015"
]
fetch_origin = run_ "git" ["fetch", "origin"]
rebase_branch branch base =
run_ command args
where
command = "git"
args = [ "rebase", base, branch ]
rebase_on_master branch = rebase_branch branch "origin/master"
rebase_all_branches = do
mapM_ rebase_on_master branches
push_rebased_branch branch =
run_ "git" ["push", "github-tim", "-f", branch]
push_all_rebased_branches = do
mapM_ push_rebased_branch branches
merge_branch branch = do
run_ "git" ["merge", branch]
synthesize_novacollider = do
run_ "git" ["checkout", "-b", "novacollider/next_tip", "origin/master"]
mapM_ merge_branch branches
run_ "git" ["checkout", "novacollider/tip"]
run_ "git" ["reset", "--hard", "novacollider/next_tip"]
run_ "git" ["branch", "-D", "novacollider/next_tip"]
push_rebased_branch "novacollider/tip"
main = shelly $ verbosely $ do
fetch_origin
rebase_all_branches
push_all_rebased_branches
synthesize_novacollider
| timblechmann/supercollider | novacollider_updater.hs | gpl-3.0 | 2,146 | 0 | 8 | 499 | 343 | 191 | 152 | 51 | 1 |
module Note (Note, show', fromPair, isCymbal, isTom, flams, pedals) where
import Control.Arrow (first)
---------------
-- Constants --
---------------
-- Threshold value for a note to be considered as ghost.
threshold :: Int
threshold = 50
----------------
-- Structures --
----------------
data Note = Note (Instrument, Int) | Flam (Instrument)
data Velocity = Ghost | Regular | Accent deriving (Eq, Ord, Show)
instance Eq Note where
x@(Note (p, _)) == x'@(Note (p', _)) = p == p' && velocity x == velocity x'
Flam (p) == Flam (p') = p == p'
_ == _ = False
instance Show Note where
show = flip show' ""
------------------------
-- Exported functions --
------------------------
-- | Take a note and the lilypond representation of the duration.
-- Return the lilypond representation of the note.
show' :: Note -> String -> String
show' x@(Note (i, _)) = let instr = show i in case velocity x of
Ghost -> (("\\parenthesize " ++ instr) ++)
Regular -> (instr ++)
Accent -> (instr ++) . (++ "->")
show' (Flam i) = (("\\acciaccatura{\\once\\stemUp " ++ i' ++ "8}" ++ i') ++)
where i' = show i
-- | Create a note from a pair (pitch, velocity).
fromPair :: (Int, Int) -> Note
fromPair = Note . first td15
-- | Determine if a note is a cymbal.
isCymbal :: Note -> Bool
isCymbal = flip elem [CrashCymbal, HalfOpenHiHat, HiHat, OpenHiHat, RideBell,
RideCymbal] . instrument
-- | Determine if a note is a tom.
isTom :: Note -> Bool
isTom = flip elem [CrossStick, FloorTom, HighTom, MidTom, Rimshot, Snare]
. instrument
-- | Detect the flams in a list of simultaneous notes.
flams :: [Note] -> [Note]
flams (x@(Note (i, _)) : xs)
| i `elem` (map instrument xs) = Flam i :
flams (filter ((/= i) . instrument) xs)
| otherwise = x : flams xs
flams (x : xs) = x : flams xs
flams x = x
pedals :: [Note] -> [Note]
pedals xs
| HiHat `elem` map instrument xs = filter ((/= PedalHiHat) . instrument) xs
| otherwise = xs
---------------------
-- Local functions --
---------------------
-- Get the instrument of a note.
instrument :: Note -> Instrument
instrument (Note (i, _)) = i
instrument (Flam (i)) = i
-- Get the velocity (i.e. the strength) of a note.
velocity :: Note -> Velocity
velocity (Note (_, v)) | v == 127 = Accent
velocity (Note (_, v)) | v > threshold = Regular
velocity (Note (_, _)) = Ghost
velocity _ = Regular
data Instrument = BassDrum
| CrashCymbal
| CrossStick
| FloorTom
| HalfOpenHiHat
| HiHat
| HighTom
| MidTom
| OpenHiHat
| PedalHiHat
| RideBell
| RideCymbal
| Rimshot
| Snare
| Other deriving Eq
instance Show Instrument where
show BassDrum = "bd"
show CrashCymbal = "cymc"
show CrossStick = "sn" -- TODO
show FloorTom = "tomfh"
show HalfOpenHiHat = "hhho"
show HiHat = "hh"
show HighTom = "tomh"
show MidTom = "toml"
show OpenHiHat = "hho"
show PedalHiHat = "hhp"
show RideBell = "rb"
show RideCymbal = "cymr"
show Rimshot = "sn" -- TODO
show Snare = "sn"
show Other = "ss" -- Dummy
-- Convert a MIDI instrument (number) to its Lilypond value.
td15 :: (Num a, Eq a) => a -> Instrument
td15 36 = BassDrum -- Bass Drum 1
td15 37 = CrossStick -- Side Stick/Rimshot
td15 38 = Snare -- Snare Drum 1
td15 40 = Rimshot -- Snare Drum 2
td15 42 = HiHat -- Closed Hi-hat
td15 43 = FloorTom -- Low Tom 1
td15 44 = PedalHiHat -- Pedal Hi-hat
td15 45 = MidTom -- Mid Tom 2
td15 46 = HalfOpenHiHat -- Open Hi-hat
td15 47 = MidTom -- Mid Tom 1 (TODO: this is rimshot)
td15 48 = HighTom -- High Tom 2
td15 49 = CrashCymbal -- Crash Cymbal 1
td15 50 = HighTom -- High Tom 1 (TODO: this is rimshot)
td15 51 = RideCymbal -- Ride Cymbal 1
td15 53 = RideBell -- Ride Bell
td15 55 = CrashCymbal -- Splash Cymbal (TODO: this is the edge)
td15 58 = FloorTom -- Vibra Slap (TODO: this is floor tom rimshot)
td15 59 = RideCymbal -- Ride Cymbal 2 (TODO: this is the edge)
td15 22 = HiHat -- TD15 closed hi-hat
td15 26 = OpenHiHat -- TD15 open hi-hat
td15 _ = Other
| Shaac/chartleston | src/note.hs | gpl-3.0 | 4,569 | 0 | 12 | 1,446 | 1,240 | 686 | 554 | 99 | 3 |
{-# LANGUAGE UnicodeSyntax #-}
module Data.Vector.Unicode.SR (
(⊳), (⊲), (⋈), (∅)
) where
import Data.Vector (Vector)
import qualified Data.Vector as Vector
(⊳) ∷ α → Vector α → Vector α
(⊳) = Vector.cons
infixl 5 ⊳
(⊲) ∷ Vector α → α → Vector α
(⊲) = Vector.snoc
infixr 5 ⊲
(⋈) ∷ Vector α → Vector α → Vector α
(⋈) = (Vector.++)
infixr 5 ⋈
(∅) ∷ Vector α
(∅) = Vector.empty
| eigengrau/haskell-unicode-symbols | src/Data/Vector/Unicode/SR.hs | gpl-3.0 | 458 | 0 | 7 | 97 | 178 | 111 | 67 | 16 | 1 |
module KRPCHS.KerbalAlarmClock
( AlarmAction(..)
, AlarmType(..)
, Alarm
, alarmWithName
, alarmWithNameStream
, alarmWithNameStreamReq
, alarmRemove
, getAlarmAction
, getAlarmActionStream
, getAlarmActionStreamReq
, getAlarmID
, getAlarmIDStream
, getAlarmIDStreamReq
, getAlarmMargin
, getAlarmMarginStream
, getAlarmMarginStreamReq
, getAlarmName
, getAlarmNameStream
, getAlarmNameStreamReq
, getAlarmNotes
, getAlarmNotesStream
, getAlarmNotesStreamReq
, getAlarmRemaining
, getAlarmRemainingStream
, getAlarmRemainingStreamReq
, getAlarmRepeat
, getAlarmRepeatStream
, getAlarmRepeatStreamReq
, getAlarmRepeatPeriod
, getAlarmRepeatPeriodStream
, getAlarmRepeatPeriodStreamReq
, getAlarmTime
, getAlarmTimeStream
, getAlarmTimeStreamReq
, getAlarmType
, getAlarmTypeStream
, getAlarmTypeStreamReq
, getAlarmVessel
, getAlarmVesselStream
, getAlarmVesselStreamReq
, getAlarmXferOriginBody
, getAlarmXferOriginBodyStream
, getAlarmXferOriginBodyStreamReq
, getAlarmXferTargetBody
, getAlarmXferTargetBodyStream
, getAlarmXferTargetBodyStreamReq
, setAlarmAction
, setAlarmMargin
, setAlarmName
, setAlarmNotes
, setAlarmRepeat
, setAlarmRepeatPeriod
, setAlarmTime
, setAlarmVessel
, setAlarmXferOriginBody
, setAlarmXferTargetBody
, alarmsWithType
, alarmsWithTypeStream
, alarmsWithTypeStreamReq
, createAlarm
, createAlarmStream
, createAlarmStreamReq
, getAlarms
, getAlarmsStream
, getAlarmsStreamReq
, getAvailable
, getAvailableStream
, getAvailableStreamReq
) where
import qualified Data.Text
import qualified KRPCHS.SpaceCenter
import KRPCHS.Internal.Requests
import KRPCHS.Internal.SerializeUtils
{-
- Represents an alarm. Obtained by calling
- <see cref="M:KerbalAlarmClock.Alarms" />,
- <see cref="M:KerbalAlarmClock.AlarmWithName" /> or
- <see cref="M:KerbalAlarmClock.AlarmsWithType" />.
-}
newtype Alarm = Alarm { alarmId :: Int }
deriving (Show, Eq, Ord)
instance PbSerializable Alarm where
encodePb = encodePb . alarmId
decodePb b = Alarm <$> decodePb b
instance KRPCResponseExtractable Alarm
{-
- The action performed by an alarm when it fires.
-}
data AlarmAction
= AlarmAction'DoNothing
| AlarmAction'DoNothingDeleteWhenPassed
| AlarmAction'KillWarp
| AlarmAction'KillWarpOnly
| AlarmAction'MessageOnly
| AlarmAction'PauseGame
deriving (Show, Eq, Ord, Enum)
instance PbSerializable AlarmAction where
encodePb = encodePb . fromEnum
decodePb b = toEnum <$> decodePb b
instance KRPCResponseExtractable AlarmAction
{-
- The type of an alarm.
-}
data AlarmType
= AlarmType'Raw
| AlarmType'Maneuver
| AlarmType'Crew
| AlarmType'Distance
| AlarmType'EarthTime
| AlarmType'LaunchRendevous
| AlarmType'SOIChange
| AlarmType'SOIChangeAuto
| AlarmType'Transfer
| AlarmType'TransferModelled
| AlarmType'ManeuverAuto
| AlarmType'Apoapsis
| AlarmType'Periapsis
| AlarmType'AscendingNode
| AlarmType'DescendingNode
| AlarmType'Closest
| AlarmType'Contract
| AlarmType'ContractAuto
deriving (Show, Eq, Ord, Enum)
instance PbSerializable AlarmType where
encodePb = encodePb . fromEnum
decodePb b = toEnum <$> decodePb b
instance KRPCResponseExtractable AlarmType
{-
- Get the alarm with the given <paramref name="name" />, ornullif no alarms have that name. If more than one alarm has the name,
- only returns one of them.<param name="name">Name of the alarm to search for.
-}
alarmWithName :: Data.Text.Text -> RPCContext (KRPCHS.KerbalAlarmClock.Alarm)
alarmWithName nameArg = do
let r = makeRequest "KerbalAlarmClock" "AlarmWithName" [makeArgument 0 nameArg]
res <- sendRequest r
processResponse res
alarmWithNameStreamReq :: Data.Text.Text -> KRPCStreamReq (KRPCHS.KerbalAlarmClock.Alarm)
alarmWithNameStreamReq nameArg =
let req = makeRequest "KerbalAlarmClock" "AlarmWithName" [makeArgument 0 nameArg]
in makeStream req
alarmWithNameStream :: Data.Text.Text -> RPCContext (KRPCStream (KRPCHS.KerbalAlarmClock.Alarm))
alarmWithNameStream nameArg = requestStream $ alarmWithNameStreamReq nameArg
{-
- Removes the alarm.
-}
alarmRemove :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext ()
alarmRemove thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_Remove" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
{-
- The action that the alarm triggers.
-}
getAlarmAction :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCHS.KerbalAlarmClock.AlarmAction)
getAlarmAction thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_Action" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmActionStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (KRPCHS.KerbalAlarmClock.AlarmAction)
getAlarmActionStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_Action" [makeArgument 0 thisArg]
in makeStream req
getAlarmActionStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (KRPCHS.KerbalAlarmClock.AlarmAction))
getAlarmActionStream thisArg = requestStream $ getAlarmActionStreamReq thisArg
{-
- The unique identifier for the alarm.
-}
getAlarmID :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (Data.Text.Text)
getAlarmID thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_ID" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmIDStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (Data.Text.Text)
getAlarmIDStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_ID" [makeArgument 0 thisArg]
in makeStream req
getAlarmIDStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (Data.Text.Text))
getAlarmIDStream thisArg = requestStream $ getAlarmIDStreamReq thisArg
{-
- The number of seconds before the event that the alarm will fire.
-}
getAlarmMargin :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (Double)
getAlarmMargin thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_Margin" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmMarginStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (Double)
getAlarmMarginStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_Margin" [makeArgument 0 thisArg]
in makeStream req
getAlarmMarginStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (Double))
getAlarmMarginStream thisArg = requestStream $ getAlarmMarginStreamReq thisArg
{-
- The short name of the alarm.
-}
getAlarmName :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (Data.Text.Text)
getAlarmName thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_Name" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmNameStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (Data.Text.Text)
getAlarmNameStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_Name" [makeArgument 0 thisArg]
in makeStream req
getAlarmNameStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (Data.Text.Text))
getAlarmNameStream thisArg = requestStream $ getAlarmNameStreamReq thisArg
{-
- The long description of the alarm.
-}
getAlarmNotes :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (Data.Text.Text)
getAlarmNotes thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_Notes" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmNotesStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (Data.Text.Text)
getAlarmNotesStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_Notes" [makeArgument 0 thisArg]
in makeStream req
getAlarmNotesStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (Data.Text.Text))
getAlarmNotesStream thisArg = requestStream $ getAlarmNotesStreamReq thisArg
{-
- The number of seconds until the alarm will fire.
-}
getAlarmRemaining :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (Double)
getAlarmRemaining thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_Remaining" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmRemainingStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (Double)
getAlarmRemainingStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_Remaining" [makeArgument 0 thisArg]
in makeStream req
getAlarmRemainingStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (Double))
getAlarmRemainingStream thisArg = requestStream $ getAlarmRemainingStreamReq thisArg
{-
- Whether the alarm will be repeated after it has fired.
-}
getAlarmRepeat :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (Bool)
getAlarmRepeat thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_Repeat" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmRepeatStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (Bool)
getAlarmRepeatStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_Repeat" [makeArgument 0 thisArg]
in makeStream req
getAlarmRepeatStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (Bool))
getAlarmRepeatStream thisArg = requestStream $ getAlarmRepeatStreamReq thisArg
{-
- The time delay to automatically create an alarm after it has fired.
-}
getAlarmRepeatPeriod :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (Double)
getAlarmRepeatPeriod thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_RepeatPeriod" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmRepeatPeriodStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (Double)
getAlarmRepeatPeriodStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_RepeatPeriod" [makeArgument 0 thisArg]
in makeStream req
getAlarmRepeatPeriodStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (Double))
getAlarmRepeatPeriodStream thisArg = requestStream $ getAlarmRepeatPeriodStreamReq thisArg
{-
- The time at which the alarm will fire.
-}
getAlarmTime :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (Double)
getAlarmTime thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_Time" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmTimeStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (Double)
getAlarmTimeStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_Time" [makeArgument 0 thisArg]
in makeStream req
getAlarmTimeStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (Double))
getAlarmTimeStream thisArg = requestStream $ getAlarmTimeStreamReq thisArg
{-
- The type of the alarm.
-}
getAlarmType :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCHS.KerbalAlarmClock.AlarmType)
getAlarmType thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_Type" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmTypeStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (KRPCHS.KerbalAlarmClock.AlarmType)
getAlarmTypeStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_Type" [makeArgument 0 thisArg]
in makeStream req
getAlarmTypeStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (KRPCHS.KerbalAlarmClock.AlarmType))
getAlarmTypeStream thisArg = requestStream $ getAlarmTypeStreamReq thisArg
{-
- The vessel that the alarm is attached to.
-}
getAlarmVessel :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCHS.SpaceCenter.Vessel)
getAlarmVessel thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_Vessel" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmVesselStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (KRPCHS.SpaceCenter.Vessel)
getAlarmVesselStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_Vessel" [makeArgument 0 thisArg]
in makeStream req
getAlarmVesselStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (KRPCHS.SpaceCenter.Vessel))
getAlarmVesselStream thisArg = requestStream $ getAlarmVesselStreamReq thisArg
{-
- The celestial body the vessel is departing from.
-}
getAlarmXferOriginBody :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCHS.SpaceCenter.CelestialBody)
getAlarmXferOriginBody thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_XferOriginBody" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmXferOriginBodyStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (KRPCHS.SpaceCenter.CelestialBody)
getAlarmXferOriginBodyStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_XferOriginBody" [makeArgument 0 thisArg]
in makeStream req
getAlarmXferOriginBodyStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (KRPCHS.SpaceCenter.CelestialBody))
getAlarmXferOriginBodyStream thisArg = requestStream $ getAlarmXferOriginBodyStreamReq thisArg
{-
- The celestial body the vessel is arriving at.
-}
getAlarmXferTargetBody :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCHS.SpaceCenter.CelestialBody)
getAlarmXferTargetBody thisArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_get_XferTargetBody" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAlarmXferTargetBodyStreamReq :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCStreamReq (KRPCHS.SpaceCenter.CelestialBody)
getAlarmXferTargetBodyStreamReq thisArg =
let req = makeRequest "KerbalAlarmClock" "Alarm_get_XferTargetBody" [makeArgument 0 thisArg]
in makeStream req
getAlarmXferTargetBodyStream :: KRPCHS.KerbalAlarmClock.Alarm -> RPCContext (KRPCStream (KRPCHS.SpaceCenter.CelestialBody))
getAlarmXferTargetBodyStream thisArg = requestStream $ getAlarmXferTargetBodyStreamReq thisArg
{-
- The action that the alarm triggers.
-}
setAlarmAction :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCHS.KerbalAlarmClock.AlarmAction -> RPCContext ()
setAlarmAction thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_Action" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The number of seconds before the event that the alarm will fire.
-}
setAlarmMargin :: KRPCHS.KerbalAlarmClock.Alarm -> Double -> RPCContext ()
setAlarmMargin thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_Margin" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The short name of the alarm.
-}
setAlarmName :: KRPCHS.KerbalAlarmClock.Alarm -> Data.Text.Text -> RPCContext ()
setAlarmName thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_Name" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The long description of the alarm.
-}
setAlarmNotes :: KRPCHS.KerbalAlarmClock.Alarm -> Data.Text.Text -> RPCContext ()
setAlarmNotes thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_Notes" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- Whether the alarm will be repeated after it has fired.
-}
setAlarmRepeat :: KRPCHS.KerbalAlarmClock.Alarm -> Bool -> RPCContext ()
setAlarmRepeat thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_Repeat" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The time delay to automatically create an alarm after it has fired.
-}
setAlarmRepeatPeriod :: KRPCHS.KerbalAlarmClock.Alarm -> Double -> RPCContext ()
setAlarmRepeatPeriod thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_RepeatPeriod" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The time at which the alarm will fire.
-}
setAlarmTime :: KRPCHS.KerbalAlarmClock.Alarm -> Double -> RPCContext ()
setAlarmTime thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_Time" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The vessel that the alarm is attached to.
-}
setAlarmVessel :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCHS.SpaceCenter.Vessel -> RPCContext ()
setAlarmVessel thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_Vessel" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The celestial body the vessel is departing from.
-}
setAlarmXferOriginBody :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCHS.SpaceCenter.CelestialBody -> RPCContext ()
setAlarmXferOriginBody thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_XferOriginBody" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The celestial body the vessel is arriving at.
-}
setAlarmXferTargetBody :: KRPCHS.KerbalAlarmClock.Alarm -> KRPCHS.SpaceCenter.CelestialBody -> RPCContext ()
setAlarmXferTargetBody thisArg valueArg = do
let r = makeRequest "KerbalAlarmClock" "Alarm_set_XferTargetBody" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- Get a list of alarms of the specified <paramref name="type" />.<param name="type">Type of alarm to return.
-}
alarmsWithType :: KRPCHS.KerbalAlarmClock.AlarmType -> RPCContext ([KRPCHS.KerbalAlarmClock.Alarm])
alarmsWithType typeArg = do
let r = makeRequest "KerbalAlarmClock" "AlarmsWithType" [makeArgument 0 typeArg]
res <- sendRequest r
processResponse res
alarmsWithTypeStreamReq :: KRPCHS.KerbalAlarmClock.AlarmType -> KRPCStreamReq ([KRPCHS.KerbalAlarmClock.Alarm])
alarmsWithTypeStreamReq typeArg =
let req = makeRequest "KerbalAlarmClock" "AlarmsWithType" [makeArgument 0 typeArg]
in makeStream req
alarmsWithTypeStream :: KRPCHS.KerbalAlarmClock.AlarmType -> RPCContext (KRPCStream ([KRPCHS.KerbalAlarmClock.Alarm]))
alarmsWithTypeStream typeArg = requestStream $ alarmsWithTypeStreamReq typeArg
{-
- Create a new alarm and return it.<param name="type">Type of the new alarm.<param name="name">Name of the new alarm.<param name="ut">Time at which the new alarm should trigger.
-}
createAlarm :: KRPCHS.KerbalAlarmClock.AlarmType -> Data.Text.Text -> Double -> RPCContext (KRPCHS.KerbalAlarmClock.Alarm)
createAlarm typeArg nameArg utArg = do
let r = makeRequest "KerbalAlarmClock" "CreateAlarm" [makeArgument 0 typeArg, makeArgument 1 nameArg, makeArgument 2 utArg]
res <- sendRequest r
processResponse res
createAlarmStreamReq :: KRPCHS.KerbalAlarmClock.AlarmType -> Data.Text.Text -> Double -> KRPCStreamReq (KRPCHS.KerbalAlarmClock.Alarm)
createAlarmStreamReq typeArg nameArg utArg =
let req = makeRequest "KerbalAlarmClock" "CreateAlarm" [makeArgument 0 typeArg, makeArgument 1 nameArg, makeArgument 2 utArg]
in makeStream req
createAlarmStream :: KRPCHS.KerbalAlarmClock.AlarmType -> Data.Text.Text -> Double -> RPCContext (KRPCStream (KRPCHS.KerbalAlarmClock.Alarm))
createAlarmStream typeArg nameArg utArg = requestStream $ createAlarmStreamReq typeArg nameArg utArg
{-
- A list of all the alarms.
-}
getAlarms :: RPCContext ([KRPCHS.KerbalAlarmClock.Alarm])
getAlarms = do
let r = makeRequest "KerbalAlarmClock" "get_Alarms" []
res <- sendRequest r
processResponse res
getAlarmsStreamReq :: KRPCStreamReq ([KRPCHS.KerbalAlarmClock.Alarm])
getAlarmsStreamReq =
let req = makeRequest "KerbalAlarmClock" "get_Alarms" []
in makeStream req
getAlarmsStream :: RPCContext (KRPCStream ([KRPCHS.KerbalAlarmClock.Alarm]))
getAlarmsStream = requestStream $ getAlarmsStreamReq
{-
- Whether Kerbal Alarm Clock is available.
-}
getAvailable :: RPCContext (Bool)
getAvailable = do
let r = makeRequest "KerbalAlarmClock" "get_Available" []
res <- sendRequest r
processResponse res
getAvailableStreamReq :: KRPCStreamReq (Bool)
getAvailableStreamReq =
let req = makeRequest "KerbalAlarmClock" "get_Available" []
in makeStream req
getAvailableStream :: RPCContext (KRPCStream (Bool))
getAvailableStream = requestStream $ getAvailableStreamReq
| Cahu/krpc-hs | src/KRPCHS/KerbalAlarmClock.hs | gpl-3.0 | 20,347 | 0 | 12 | 2,943 | 4,409 | 2,239 | 2,170 | 368 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.LiveChatBans.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Adds a new ban to the chat.
--
-- /See:/ <https://developers.google.com/youtube/v3 YouTube Data API Reference> for @youtube.liveChatBans.insert@.
module Network.Google.Resource.YouTube.LiveChatBans.Insert
(
-- * REST Resource
LiveChatBansInsertResource
-- * Creating a Request
, liveChatBansInsert
, LiveChatBansInsert
-- * Request Lenses
, lcbiPart
, lcbiPayload
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.liveChatBans.insert@ method which the
-- 'LiveChatBansInsert' request conforms to.
type LiveChatBansInsertResource =
"youtube" :>
"v3" :>
"liveChat" :>
"bans" :>
QueryParam "part" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] LiveChatBan :>
Post '[JSON] LiveChatBan
-- | Adds a new ban to the chat.
--
-- /See:/ 'liveChatBansInsert' smart constructor.
data LiveChatBansInsert = LiveChatBansInsert'
{ _lcbiPart :: !Text
, _lcbiPayload :: !LiveChatBan
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'LiveChatBansInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lcbiPart'
--
-- * 'lcbiPayload'
liveChatBansInsert
:: Text -- ^ 'lcbiPart'
-> LiveChatBan -- ^ 'lcbiPayload'
-> LiveChatBansInsert
liveChatBansInsert pLcbiPart_ pLcbiPayload_ =
LiveChatBansInsert'
{ _lcbiPart = pLcbiPart_
, _lcbiPayload = pLcbiPayload_
}
-- | The part parameter serves two purposes in this operation. It identifies
-- the properties that the write operation will set as well as the
-- properties that the API response returns. Set the parameter value to
-- snippet.
lcbiPart :: Lens' LiveChatBansInsert Text
lcbiPart = lens _lcbiPart (\ s a -> s{_lcbiPart = a})
-- | Multipart request metadata.
lcbiPayload :: Lens' LiveChatBansInsert LiveChatBan
lcbiPayload
= lens _lcbiPayload (\ s a -> s{_lcbiPayload = a})
instance GoogleRequest LiveChatBansInsert where
type Rs LiveChatBansInsert = LiveChatBan
type Scopes LiveChatBansInsert =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl"]
requestClient LiveChatBansInsert'{..}
= go (Just _lcbiPart) (Just AltJSON) _lcbiPayload
youTubeService
where go
= buildClient
(Proxy :: Proxy LiveChatBansInsertResource)
mempty
| rueshyna/gogol | gogol-youtube/gen/Network/Google/Resource/YouTube/LiveChatBans/Insert.hs | mpl-2.0 | 3,411 | 0 | 14 | 797 | 398 | 240 | 158 | 63 | 1 |
import qualified Dao.Test.Interval
-- import qualified Dao.Test.Grammar
import Control.Monad
import System.Exit
main :: IO ()
main = (>>= (flip unless exitFailure)) $ fmap and $ sequence $
[
-- , Dao.Test.Interval.runTests
-- , Dao.Test.Grammar.runTests
Dao.Test.Grammar.runTests
]
| printedheart/Dao | tests/dao-tests.hs | agpl-3.0 | 318 | 0 | 10 | 72 | 70 | 41 | 29 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module WordProblem (answer) where
import Data.Text (pack)
import Data.List (foldl')
import Control.Applicative (pure, (<|>), (<$>), (<*>), (<*), (*>))
import Data.Attoparsec.Text
answerParser :: Parser Int
answerParser = do
n <- "What is " .*> signed decimal
ops <- many' (space *> operation)
"?" .*> pure (foldl' (flip ($)) n ops)
answer :: String -> Maybe Int
answer = maybeResult . parse answerParser . pack
operation :: Parser (Int -> Int)
operation = (flip <$> operator) <* space <*> signed decimal
operator :: Parser (Int -> Int -> Int)
operator = "plus" .*> pure (+) <|>
"minus" .*> pure (-) <|>
"multiplied by" .*> pure (*) <|>
"divided by" .*> pure div | mscoutermarsh/exercism_coveralls | assignments/haskell/wordy/example.hs | agpl-3.0 | 759 | 0 | 12 | 176 | 279 | 155 | 124 | 20 | 1 |
module Network.Haskoin.Cereal.Tests (tests) where
import Data.Serialize
import Network.Haskoin.Test
import Network.Haskoin.Util
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck
tests :: [Test]
tests =
[ testGroup "Binary encoding and decoding of utility types"
[ testProperty "ByteString" $ forAll arbitraryBS testId ]
, testGroup "Binary encoding and decoding of hash types"
[ testProperty "Hash160" $ forAll arbitraryHash160 testId
, testProperty "Hash256" $ forAll arbitraryHash256 testId
, testProperty "Hash512" $ forAll arbitraryHash512 testId
]
, testGroup "Binary encoding and decoding of crypto types"
[ testProperty "Signature" $ forAll arbitrarySignature $ testId . lst3
, testProperty "PubKey" $ forAll arbitraryPubKey $ testId . snd
, testProperty "XPrvKey" $ forAll arbitraryXPrvKey testId
, testProperty "XPubKey" $ forAll arbitraryXPubKey $ testId . snd
]
, testGroup "Binary encoding and decoding of protocol types"
[ testProperty "VarInt" $ forAll arbitraryVarInt testId
, testProperty "VarString" $ forAll arbitraryVarString testId
, testProperty "NetworkAddress" $ forAll arbitraryNetworkAddress testId
, testProperty "InvType" $ forAll arbitraryInvType testId
, testProperty "InvVector" $ forAll arbitraryInvVector testId
, testProperty "Inv" $ forAll arbitraryInv1 testId
, testProperty "Version" $ forAll arbitraryVersion testId
, testProperty "Addr" $ forAll arbitraryAddr1 testId
, testProperty "Alert" $ forAll arbitraryAlert testId
, testProperty "Reject" $ forAll arbitraryReject testId
, testProperty "GetData" $ forAll arbitraryGetData testId
, testProperty "NotFound" $ forAll arbitraryNotFound testId
, testProperty "Ping" $ forAll arbitraryPing testId
, testProperty "Pong" $ forAll arbitraryPong testId
, testProperty "MessageCommand" $ forAll arbitraryMessageCommand testId
, testProperty "MessageHeader" $ forAll arbitraryMessageHeader testId
, testProperty "Message" $ forAll arbitraryMessage testId
]
, testGroup "Binary encoding and decoding of script types"
[ testProperty "ScriptOp" $ forAll arbitraryScriptOp testId
, testProperty "Script" $ forAll arbitraryScript testId
]
, testGroup "Binary encoding and decoding of transaction types"
[ testProperty "TxIn" $ forAll arbitraryTxIn testId
, testProperty "TxOut" $ forAll arbitraryTxOut testId
, testProperty "OutPoint" $ forAll arbitraryOutPoint testId
, testProperty "Tx" $ forAll arbitraryTx testId
, testProperty "Tx" $ forAll arbitraryWitnessTx testId
, testProperty "Tx" $ forAll arbitraryLegacyTx testId
]
, testGroup "Binary encoding and decoding of block types"
[ testProperty "Block" $ forAll arbitraryBlock testId
, testProperty "BlockHeader" $ forAll arbitraryBlockHeader testId
, testProperty "GetBlocks" $ forAll arbitraryGetBlocks testId
, testProperty "GetHeaders" $ forAll arbitraryGetHeaders testId
, testProperty "Headers" $ forAll arbitraryHeaders testId
, testProperty "MerkleBlock" $ forAll arbitraryMerkleBlock testId
]
, testGroup "Binary encoding and decoding of bloom types"
[ testProperty "BloomFlags" $ forAll arbitraryBloomFlags testId
, testProperty "BloomFilter" $ forAll arbitraryBloomFilter $ testId . lst3
, testProperty "FilterLoad" $ forAll arbitraryFilterLoad testId
, testProperty "FilterAdd" $ forAll arbitraryFilterAdd testId
]
]
testId :: (Serialize a, Eq a) => a -> Bool
testId x = decode (encode x) == Right x
| xenog/haskoin | test/bitcoin/Network/Haskoin/Cereal/Tests.hs | unlicense | 3,884 | 0 | 11 | 933 | 841 | 408 | 433 | 62 | 1 |
module Main where
import Factors
import Primes
sequenceLength = 4
countDistinctFactors = 4
isMatchingNumber :: Int -> Bool
isMatchingNumber = (countDistinctFactors <=) . length . primeFactors
findFirstSequence = findFirstSequenceNumber' 0 1
findFirstSequenceNumber' :: Int -> Int -> Int
findFirstSequenceNumber' currentLength currentNumber
| currentLength == sequenceLength = currentNumber - sequenceLength
| isMatchingNumber currentNumber = findFirstSequenceNumber' (currentLength+1) (currentNumber+1)
| otherwise = findFirstSequenceNumber' 0 (currentNumber+1)
main = print $ findFirstSequence
| kliuchnikau/project-euler | 047/Main.hs | apache-2.0 | 606 | 0 | 8 | 78 | 154 | 81 | 73 | 14 | 1 |
module Nanocoin (
initNode
) where
import Protolude hiding (get, put)
import Control.Concurrent.Chan
import Control.Distributed.Process.Lifted (NodeId(..))
import qualified Data.Set as Set
import Data.Maybe (catMaybes)
import Config
import Logger
import qualified Key
import qualified Nanocoin.Block as B
import qualified Nanocoin.CLI as CLI
import qualified Nanocoin.Ledger as L
import qualified Nanocoin.Transaction as T
import qualified Nanocoin.Network.Message as Msg
import qualified Nanocoin.Network.Node as Node
import qualified Nanocoin.Network.P2P as P2P
import qualified Nanocoin.Network.Peer as Peer
import qualified Nanocoin.Network.RPC as RPC
import qualified Nanocoin.Network.Utils as Utils
-- | Initializes a node on the network with it's own copy of
-- the blockchain, and invokes a p2p server and an http server.
initNode
:: Config
-> IO ()
initNode (Config hostname rpcPort p2pPort bootnodes mKeysPath mLogPath) = do
-- Initialize Node Keys
keys <- case mKeysPath of
Nothing -> Key.newKeyPair
Just keysPath -> do
eNodeKeys <- Key.readKeys keysPath
case eNodeKeys of
Left err -> die $ show err
Right keys -> pure keys
-- Initialize Genesis Block
genesisBlock <- do
eKeys <- Key.readKeys "keys/genesis"
case eKeys of
Left err -> die $ show err
Right gkeys -> B.genesisBlock gkeys
-- Initialize NodeState & NodeConfig
nodeState <- Node.initNodeState genesisBlock
nodeConfig <- Node.initNodeConfig hostname p2pPort rpcPort (Just keys)
let nodeEnv = Node.NodeEnv nodeConfig nodeState
logger <- mkLogger mLogPath
-- Init chan to send Msgs from
-- rpc & console proc to p2p network
cmdChan <- newChan
-- Fork RPC server
forkIO $
RPC.rpcServer
logger
nodeEnv
cmdChan
-- Construct bootnode NodeIds from bootnode configs
-- XXX Fail with more information on invalid hostname:port config
bootnodeIds <- fmap catMaybes $ mapM Utils.mkNodeId' bootnodes
-- Fork P2P server
forkIO $
P2P.bootstrap
logger
nodeEnv
cmdChan
bootnodeIds
-- Run cmd line interface
CLI.cli logger nodeEnv cmdChan
| tdietert/nanocoin | src/Nanocoin.hs | apache-2.0 | 2,166 | 0 | 17 | 457 | 474 | 261 | 213 | 54 | 4 |
{- | Implements several tests to control the validy of the program
-}
module Test.Grammar where
import Test.QuickCheck
import Test.Framework
import PGF
import PGF.Internal
import Muste.Grammar.Internal
import Test.HUnit.Text
import Test.HUnit.Base
import Data.Maybe
import qualified Data.Map as M
import Control.Monad
import Data.Set (Set(..),empty,fromList)
import Data.List (sort,nub)
-- HUnit tests
hunit_Eq_Grammar_eq_test =
let
grammar1 = Grammar "S"
[
Function "f" (Fun "A" ["A","B"]),
Function "g" (Fun "B" ["B","C"]),
Function "h" (Fun "A" ["A","A","A"]),
Function "s" (Fun "S" ["A"])
]
[
Function "a" (Fun "A" []),
Function "b" (Fun "B" []),
Function "c" (Fun "C" [])
]
emptyPGF
grammar2 = Grammar "A" [] [] emptyPGF
pgf = readPGF "gf/ABCAbs.pgf"
grammar3 = fmap pgfToGrammar pgf
in
TestList [
TestLabel "Empty grammar" ( emptyGrammar == emptyGrammar ~?= True ),
TestLabel "Simple Grammar reflexivity" ( grammar1 == grammar1 ~?= True ),
TestLabel "Inequality 1" ( grammar1 == grammar2 ~?= False ),
TestLabel "Inequality 2" ( grammar2 == grammar1 ~?= False ),
TestLabel "Complex grammar" $ TestCase $ join $ fmap (\g -> grammar1 == g @?= True) grammar3
]
hunit_Show_Grammar_show_test =
let
grammar1 = Grammar "S" [] [] emptyPGF
grammar2 = Grammar "S"
[]
[
Function "a" (Fun "A" []),
Function "b" (Fun "B" [])
]
emptyPGF
grammar3 = Grammar "S"
[
Function "f1" (Fun "A" ["A","B"]),
Function "f2" (Fun "B" ["B","B"])
]
[]
emptyPGF
grammar4 = Grammar "S"
[
Function "f1" (Fun "A" ["A","B"]),
Function "f2" (Fun "B" ["B","B"])
]
[
Function "a" (Fun "A" []),
Function "b" (Fun "B" [])
]
emptyPGF
in
TestList [
TestLabel "Empty Grammar" ( show grammar1 ~?= "Startcat: \"S\"\nSyntactic Rules: \n\nLexical Rules: \n" ),
TestLabel "Simple Grammar 1" ( show grammar2 ~?= "Startcat: \"S\"\nSyntactic Rules: \n\nLexical Rules: \n\tFunction \"a\" (Fun \"A\" [])\n \tFunction \"b\" (Fun \"B\" [])\n" ),
TestLabel "Simple Grammar 2" ( show grammar3 ~?= "Startcat: \"S\"\nSyntactic Rules: \n\tFunction \"f1\" (Fun \"A\" [\"A\",\"B\"])\n \tFunction \"f2\" (Fun \"B\" [\"B\",\"B\"])\n\nLexical Rules: \n" ),
TestLabel "Grammar" ( show grammar4 ~?= "Startcat: \"S\"\nSyntactic Rules: \n\tFunction \"f1\" (Fun \"A\" [\"A\",\"B\"])\n \tFunction \"f2\" (Fun \"B\" [\"B\",\"B\"])\n\nLexical Rules: \n\tFunction \"a\" (Fun \"A\" [])\n \tFunction \"b\" (Fun \"B\" [])\n" )
]
hunit_isEmptyPGF_test =
let
pgf = PGF M.empty (mkCId "Abs") (Abstr M.empty M.empty M.empty) M.empty
pgf2 = readPGF "gf/ABCAbs.pgf"
in
TestList [
TestLabel "Empty PGF" $ isEmptyPGF emptyPGF ~?= True,
TestLabel "Almost empty PGF with a name" $ isEmptyPGF pgf ~?= False,
TestLabel "Non-empty PGF" $ TestCase $ pgf2 >>= (\g -> isEmptyPGF g @?= False)
]
hunit_isEmptyGrammar_test =
let
grammar1 = Grammar "S"
[
Function "f" (Fun "S" ["A"]),
Function "a" (Fun "A" [])
]
[]
emptyPGF
pgf = readPGF "gf/ABCAbs.pgf"
grammar2 = fmap (Grammar wildCard [] []) pgf
in
TestList [
TestLabel "Empty Grammar" (isEmptyGrammar emptyGrammar ~?= True),
TestLabel "Almost empty Grammar with a name" (isEmptyGrammar grammar1 ~?= False),
TestLabel "Grammar without a name" $ TestCase $ grammar2 >>= (\g -> isEmptyGrammar g @?= False),
TestLabel "Complete grammar from PGF" $ TestCase $ pgf >>= (\g -> isEmptyGrammar (pgfToGrammar g) @?= False)
]
hunit_getFunTypeWithPGF_test =
let
pgf = readPGF "gf/ABCAbs.pgf"
in
TestList [
TestLabel "Empty PGF" $ getFunTypeWithPGF emptyPGF (mkCId "f") ~?= NoType,
TestLabel "Existing Constant" $ TestCase $ pgf >>= (\g -> getFunTypeWithPGF g (mkCId "a") @?= Fun "A" [] ),
TestLabel "Existing Function" $ TestCase $ pgf >>= (\g -> getFunTypeWithPGF g (mkCId "f") @?= Fun "A" ["A", "B"]),
TestLabel "Non-Existing Function" $ TestCase $ pgf >>= (\g -> getFunTypeWithPGF g (mkCId "foo") @?= NoType)
]
hunit_getFunTypeWithGrammar_test =
let
pgf = readPGF "gf/ABCAbs.pgf"
grammar = fmap pgfToGrammar pgf
in
TestList [
TestLabel "Empty PGF" $ getFunTypeWithGrammar emptyGrammar "f" ~?= NoType,
TestLabel "Existing Constant" $ TestCase $ grammar >>= (\g -> getFunTypeWithGrammar g "a" @?= Fun "A" [] ),
TestLabel "Existing Function" $ TestCase $ grammar >>= (\g -> getFunTypeWithGrammar g "f" @?= Fun "A" ["A", "B"]),
TestLabel "Non-Existing Function" $ TestCase $ grammar >>= (\g -> getFunTypeWithGrammar g "foo" @?= NoType)
]
hunit_getFunCat_test =
TestList [
TestLabel "NoType" ( getFunCat NoType ~?= wildCard),
TestLabel "Constant" ( getFunCat (Fun "A" []) ~?= "A"),
TestLabel "Constant" ( getFunCat (Fun "A" ["A","B"]) ~?= "A")
]
hunit_getRuleCat_test =
TestList [
TestLabel "NoType" ( getRuleCat (Function "f" NoType) ~?= wildCard),
TestLabel "Constant" ( getRuleCat (Function "f" (Fun "A" [])) ~?= "A"),
TestLabel "Constant" ( getRuleCat (Function "f" (Fun "A" ["A","B"])) ~?= "A")
]
hunit_getRuleName_test =
TestList [
TestLabel "NoType" ( getRuleName (Function "f" NoType) ~?= "f"),
TestLabel "Constant" ( getRuleName (Function "g" (Fun "A" [])) ~?= "g"),
TestLabel "Constant" ( getRuleName (Function "h" (Fun "A" ["A","B"])) ~?= "h")
]
hunit_getRulesSet_test =
let
rule1 = Function "r1" (Fun "A" [])
rule2 = Function "r2" (Fun "A" ["A"])
rule3 = Function "r3" (Fun "B" ["A"])
rule4 = Function "r4" (Fun "A" ["A","A"])
grammar = Grammar "S"
[ rule2, rule3, rule4 ]
[ rule1 ]
emptyPGF
in
TestList [
TestLabel "Empty Grammar" ( getRulesSet (getAllRules emptyGrammar) [] ~?= empty),
TestLabel "No categories" ( getRulesSet (getAllRules grammar) [] ~?= empty),
TestLabel "No match" ( getRulesSet (getAllRules grammar) ["Z"] ~?= empty),
TestLabel "One match" ( getRulesSet (getAllRules grammar) ["B"] ~?= fromList [rule3]),
TestLabel "Three matches" ( getRulesSet (getAllRules grammar) ["A"] ~?= fromList [rule1, rule2, rule4]),
TestLabel "All matches" ( getRulesSet (getAllRules grammar) ["A","B"] ~?= fromList (getAllRules grammar))
]
hunit_getRulesList_test =
let
rule1 = Function "r1" (Fun "A" [])
rule2 = Function "r2" (Fun "A" ["A"])
rule3 = Function "r3" (Fun "B" ["A"])
rule4 = Function "r4" (Fun "A" ["A","A"])
grammar = Grammar "S"
[ rule2, rule3, rule4 ]
[ rule1 ]
emptyPGF
in
TestList [
TestLabel "Empty Grammar" ( getRulesList (getAllRules emptyGrammar) [] ~?= []),
TestLabel "No categories" ( getRulesList (getAllRules grammar) [] ~?= []),
TestLabel "No match" ( getRulesList (getAllRules grammar) ["Z"] ~?= []),
TestLabel "One match" ( getRulesList (getAllRules grammar) ["B"] ~?= [rule3]),
TestLabel "Three matches" ( getRulesList (getAllRules grammar) ["A"] ~?= [rule2, rule4, rule1]),
TestLabel "All matches" ( ( sort $ getRulesList (getAllRules grammar) ["A","B"]) ~?= (sort $ getAllRules grammar))
]
hunit_getAllRules_test =
let
rule1 = Function "r1" (Fun "A" [])
rule2 = Function "r2" (Fun "B" [])
rule3 = Function "r3" (Fun "A" ["A","A"])
rule4 = Function "r4" (Fun "A" ["B","A"])
grammar1 = Grammar "S" [] [] emptyPGF
grammar2 = Grammar "S" [ rule3, rule4] [] emptyPGF
grammar3 = Grammar "S" [] [ rule1, rule2 ] emptyPGF
grammar4 = Grammar "S" [ rule3, rule4] [ rule1, rule2 ] emptyPGF
in
TestList [
TestLabel "Empty Grammar 1" ( (getAllRules emptyGrammar) ~?= []),
TestLabel "Empty Grammar 2" ( (getAllRules grammar1) ~?= []),
TestLabel "Partial Grammar 1" ( (getAllRules grammar2) ~?= [rule3,rule4]),
TestLabel "Partial Grammar 2" ( (getAllRules grammar3) ~?= [rule1,rule2]),
TestLabel "Full Grammar 1" ( (getAllRules grammar4) ~?= [rule3,rule4,rule1,rule2])
]
hunit_pgfToGrammar_test =
let
pgf = readPGF "gf/ABCAbs.pgf"
grammar = fmap (Grammar
"S"
[
Function "f" (Fun "A" ["A","B"]),
Function "g" (Fun "B" ["B","C"]),
Function "h" (Fun "A" ["A","A","A"]),
Function "s" (Fun "S" ["A"])
]
[
Function "a" (Fun "A" []),
Function "b" (Fun "B" []),
Function "c" (Fun "C" [])
]
) pgf
in
TestList [
TestLabel "Non-empty PGF" $ TestCase $ join $ liftM2 (@?=) (fmap pgfToGrammar pgf ) grammar
]
eq_tests = TestList [
TestLabel "Eq Grammar ===" hunit_Eq_Grammar_eq_test
]
show_tests = TestList [
TestLabel "Show Grammar show" hunit_Show_Grammar_show_test
]
grammar_function_tests =
TestList [
TestLabel "isEmptyPGF" hunit_isEmptyPGF_test,
TestLabel "isEmptyGrammar" hunit_isEmptyGrammar_test,
TestLabel "getFunTypeWithPGF" hunit_getFunTypeWithPGF_test,
TestLabel "getFunTypeWithPGF" hunit_getFunTypeWithGrammar_test,
TestLabel "getFunCat" hunit_getFunCat_test,
TestLabel "getRuleCat" hunit_getRuleCat_test,
TestLabel "getRuleName" hunit_getRuleName_test,
TestLabel "getRulesSet" hunit_getRulesSet_test,
TestLabel "getRulesList" hunit_getRulesList_test,
TestLabel "getAllRules" hunit_getAllRules_test,
TestLabel "pgfToGrammar" hunit_pgfToGrammar_test
]
prop_FunTypeReadShowIdentity :: FunType -> Bool
prop_FunTypeReadShowIdentity fun =
read (show fun) == fun
prop_funTypeEquality :: PGF -> Property
prop_funTypeEquality pgf =
let
grammar = pgfToGrammar pgf
funs = functions pgf
in
property $ and $ map (\f -> getFunTypeWithPGF pgf f == getFunTypeWithGrammar grammar (showCId f)) funs
prop_grammarLexRulesNotEmpty :: Grammar -> Property
prop_grammarLexRulesNotEmpty g = property $ not $ null $ lexrules g
prop_grammarSynRulesNotEmpty :: Grammar -> Property
prop_grammarSynRulesNotEmpty g = property $ not $ null $ synrules g
prop_grammarHasRulesForAllCats :: Grammar -> Property
prop_grammarHasRulesForAllCats g =
let
test c =
property $ not $ null $ getRulesList (getAllRules g) [c]
cats = nub $ concat $ map (\(Function _ (Fun c cs)) -> c:cs) $ (getAllRules g)
in
(not $ isEmptyGrammar g) ==> conjoin (map test cats)
hunit_tests = TestList [eq_tests,show_tests,grammar_function_tests]
quickcheck_tests :: [(TestName,Property)]
quickcheck_tests = [
("Grammar FunType show read equality",property prop_FunTypeReadShowIdentity),
("Grammar has lexical rules",property prop_grammarLexRulesNotEmpty),
("Grammar has syntactic rules",property prop_grammarSynRulesNotEmpty),
("Grammar has rules for all categories",property prop_grammarHasRulesForAllCats)
]
| daherb/Haskell-Muste | muste-lib/Test/Grammar.hs | artistic-2.0 | 10,978 | 0 | 17 | 2,532 | 3,458 | 1,790 | 1,668 | 229 | 1 |
-- Testing entry file, Main.hs
module Main (main) where
import SimpleJSON
main = print (JObject [("foo", JNumber 1), ("bar", JBool False)])
| ploverlake/practice_of_haskell | test/Main.hs | bsd-2-clause | 142 | 0 | 10 | 23 | 51 | 30 | 21 | 3 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Language.Haskell.Exts.Pretty
-- Copyright : (c) Niklas Broberg 2004-2009,
-- (c) The GHC Team, Noel Winstanley 1997-2000
-- License : BSD-style (see the file LICENSE.txt)
--
-- Maintainer : Niklas Broberg, [email protected]
-- Stability : stable
-- Portability : portable
--
-- Pretty printer for Haskell with extensions.
--
-----------------------------------------------------------------------------
module Pretty (
-- * Pretty printing
Pretty,
prettyPrintStyleMode, prettyPrintWithMode, prettyPrint,
-- * Pretty-printing styles (from "Text.PrettyPrint.HughesPJ")
P.Style(..), P.style, P.Mode(..),
-- * Haskell formatting modes
PPHsMode(..), Indent, PPLayout(..), defaultMode) where
import Language.Haskell.Exts.Syntax
import qualified Language.Haskell.Exts.Annotated.Syntax as A
import Language.Haskell.Exts.Annotated.Simplify
import Language.Haskell.Exts.SrcLoc
import qualified Text.PrettyPrint as P
import Data.List (intersperse)
infixl 5 $$$
-----------------------------------------------------------------------------
-- | Varieties of layout we can use.
data PPLayout = PPOffsideRule -- ^ classical layout
| PPSemiColon -- ^ classical layout made explicit
| PPInLine -- ^ inline decls, with newlines between them
| PPNoLayout -- ^ everything on a single line
deriving Eq
type Indent = Int
-- | Pretty-printing parameters.
--
-- /Note:/ the 'onsideIndent' must be positive and less than all other indents.
data PPHsMode = PPHsMode {
-- | indentation of a class or instance
classIndent :: Indent,
-- | indentation of a @do@-expression
doIndent :: Indent,
-- | indentation of the body of a
-- @case@ expression
caseIndent :: Indent,
-- | indentation of the declarations in a
-- @let@ expression
letIndent :: Indent,
-- | indentation of the declarations in a
-- @where@ clause
whereIndent :: Indent,
-- | indentation added for continuation
-- lines that would otherwise be offside
onsideIndent :: Indent,
-- | blank lines between statements?
spacing :: Bool,
-- | Pretty-printing style to use
layout :: PPLayout,
-- | add GHC-style @LINE@ pragmas to output?
linePragmas :: Bool
}
-- | The default mode: pretty-print using the offside rule and sensible
-- defaults.
defaultMode :: PPHsMode
defaultMode = PPHsMode{
classIndent = 8,
doIndent = 3,
caseIndent = 4,
letIndent = 4,
whereIndent = 6,
onsideIndent = 2,
spacing = True,
layout = PPOffsideRule,
linePragmas = False
}
-- | Pretty printing monad
newtype DocM s a = DocM (s -> a)
instance Functor (DocM s) where
fmap f xs = do x <- xs; return (f x)
instance Monad (DocM s) where
(>>=) = thenDocM
(>>) = then_DocM
return = retDocM
{-# INLINE thenDocM #-}
{-# INLINE then_DocM #-}
{-# INLINE retDocM #-}
{-# INLINE unDocM #-}
{-# INLINE getPPEnv #-}
thenDocM :: DocM s a -> (a -> DocM s b) -> DocM s b
thenDocM m k = DocM $ (\s -> case unDocM m $ s of a -> unDocM (k a) $ s)
then_DocM :: DocM s a -> DocM s b -> DocM s b
then_DocM m k = DocM $ (\s -> case unDocM m $ s of _ -> unDocM k $ s)
retDocM :: a -> DocM s a
retDocM a = DocM (\_s -> a)
unDocM :: DocM s a -> (s -> a)
unDocM (DocM f) = f
-- all this extra stuff, just for this one function.
getPPEnv :: DocM s s
getPPEnv = DocM id
-- So that pp code still looks the same
-- this means we lose some generality though
-- | The document type produced by these pretty printers uses a 'PPHsMode'
-- environment.
type Doc = DocM PPHsMode P.Doc
-- | Things that can be pretty-printed, including all the syntactic objects
-- in "Language.Haskell.Exts.Syntax" and "Language.Haskell.Exts.Annotated.Syntax".
class Pretty a where
-- | Pretty-print something in isolation.
pretty :: a -> Doc
-- | Pretty-print something in a precedence context.
prettyPrec :: Int -> a -> Doc
pretty = prettyPrec 0
prettyPrec _ = pretty
-- The pretty printing combinators
empty :: Doc
empty = return P.empty
nest :: Int -> Doc -> Doc
nest i m = m >>= return . P.nest i
-- Literals
text, ptext :: String -> Doc
text = return . P.text
ptext = return . P.text
zeroWidthText = return . P.zeroWidthText
char :: Char -> Doc
char = return . P.char
int :: Int -> Doc
int = return . P.int
integer :: Integer -> Doc
integer = return . P.integer
float :: Float -> Doc
float = return . P.float
double :: Double -> Doc
double = return . P.double
rational :: Rational -> Doc
rational = return . P.rational
-- Simple Combining Forms
parens, brackets, braces,quotes,doubleQuotes :: Doc -> Doc
parens d = d >>= return . P.parens
brackets d = d >>= return . P.brackets
braces d = d >>= return . P.braces
quotes d = d >>= return . P.quotes
doubleQuotes d = d >>= return . P.doubleQuotes
parensIf :: Bool -> Doc -> Doc
parensIf True = parens
parensIf False = id
-- Constants
semi,comma,colon,space,equals :: Doc
semi = return P.semi
comma = return P.comma
colon = return P.colon
space = return P.space
equals = return P.equals
lparen,rparen,lbrack,rbrack,lbrace,rbrace :: Doc
lparen = return P.lparen
rparen = return P.rparen
lbrack = return P.lbrack
rbrack = return P.rbrack
lbrace = return P.lbrace
rbrace = return P.rbrace
-- Combinators
(<>),(<+>),($$),($+$) :: Doc -> Doc -> Doc
aM <> bM = do{a<-aM;b<-bM;return (a P.<> b)}
aM <+> bM = do{a<-aM;b<-bM;return (a P.<+> b)}
aM $$ bM = do{a<-aM;b<-bM;return (a P.$$ b)}
aM $+$ bM = do{a<-aM;b<-bM;return (a P.$+$ b)}
hcat,hsep,vcat,sep,cat,fsep,fcat :: [Doc] -> Doc
hcat dl = sequence dl >>= return . P.hcat
hsep dl = sequence dl >>= return . P.hsep
vcat dl = sequence dl >>= return . P.vcat
sep dl = sequence dl >>= return . P.sep
cat dl = sequence dl >>= return . P.cat
fsep dl = sequence dl >>= return . P.fsep
fcat dl = sequence dl >>= return . P.fcat
-- Some More
hang :: Doc -> Int -> Doc -> Doc
hang dM i rM = do{d<-dM;r<-rM;return $ P.hang d i r}
-- Yuk, had to cut-n-paste this one from Pretty.hs
punctuate :: Doc -> [Doc] -> [Doc]
punctuate _ [] = []
punctuate p (d1:ds) = go d1 ds
where
go d [] = [d]
go d (e:es) = (d <> p) : go e es
-- | render the document with a given style and mode.
renderStyleMode :: P.Style -> PPHsMode -> Doc -> String
renderStyleMode ppStyle ppMode d = P.renderStyle ppStyle . unDocM d $ ppMode
-- | render the document with a given mode.
renderWithMode :: PPHsMode -> Doc -> String
renderWithMode = renderStyleMode P.style
-- | render the document with 'defaultMode'.
render :: Doc -> String
render = renderWithMode defaultMode
-- | pretty-print with a given style and mode.
prettyPrintStyleMode :: Pretty a => P.Style -> PPHsMode -> a -> String
prettyPrintStyleMode ppStyle ppMode = renderStyleMode ppStyle ppMode . pretty
-- | pretty-print with the default style and a given mode.
prettyPrintWithMode :: Pretty a => PPHsMode -> Int -> a -> String
prettyPrintWithMode mode lineLength = prettyPrintStyleMode P.style{P.lineLength = lineLength} mode
-- | pretty-print with the default style and 'defaultMode'.
prettyPrint :: Pretty a => Int -> a -> String
prettyPrint lineLength = prettyPrintWithMode defaultMode lineLength
fullRenderWithMode :: PPHsMode -> P.Mode -> Int -> Float ->
(P.TextDetails -> a -> a) -> a -> Doc -> a
fullRenderWithMode ppMode m i f fn e mD =
P.fullRender m i f fn e $ (unDocM mD) ppMode
fullRender :: P.Mode -> Int -> Float -> (P.TextDetails -> a -> a)
-> a -> Doc -> a
fullRender = fullRenderWithMode defaultMode
------------------------- Pretty-Print a Module --------------------
instance Pretty Module where
pretty (Module pos m os mbWarn mbExports imp decls) =
markLine pos $
myVcat $ map pretty os ++
(if m == ModuleName "" then id
else \x -> [topLevel (ppModuleHeader m mbWarn mbExports) x])
(map pretty imp ++ map pretty decls)
-------------------------- Module Header ------------------------------
ppModuleHeader :: ModuleName -> Maybe WarningText -> Maybe [ExportSpec] -> Doc
ppModuleHeader m mbWarn mbExportList = mySep [
text "module",
pretty m,
maybePP ppWarnTxt mbWarn,
maybePP (parenList . map pretty) mbExportList,
text "where"]
ppWarnTxt :: WarningText -> Doc
ppWarnTxt (DeprText s) = mySep [text "{-# DEPRECATED", text s, text "#-}"]
ppWarnTxt (WarnText s) = mySep [text "{-# WARNING", text s, text "#-}"]
instance Pretty ModuleName where
pretty (ModuleName modName) = text modName
instance Pretty ExportSpec where
pretty (EVar name) = pretty name
pretty (EAbs name) = pretty name
pretty (EThingAll name) = pretty name <> text "(..)"
pretty (EThingWith name nameList) =
pretty name <> (parenList . map pretty $ nameList)
pretty (EModuleContents m) = text "module" <+> pretty m
instance Pretty ImportDecl where
pretty (ImportDecl pos m qual src mbPkg mbName mbSpecs) =
markLine pos $
mySep [text "import",
if src then text "{-# SOURCE #-}" else empty,
if qual then text "qualified" else empty,
maybePP (\s -> text (show s)) mbPkg,
pretty m,
maybePP (\m' -> text "as" <+> pretty m') mbName,
maybePP exports mbSpecs]
where
exports (b,specList) =
if b then text "hiding" <+> specs else specs
where specs = parenList . map pretty $ specList
instance Pretty ImportSpec where
pretty (IVar name) = pretty name
pretty (IAbs name) = pretty name
pretty (IThingAll name) = pretty name <> text "(..)"
pretty (IThingWith name nameList) =
pretty name <> (parenList . map pretty $ nameList)
------------------------- Declarations ------------------------------
instance Pretty Decl where
pretty (TypeDecl loc name nameList htype) =
blankline $
markLine loc $
mySep ( [text "type", pretty name]
++ map pretty nameList
++ [equals, pretty htype])
pretty (DataDecl loc don context name nameList constrList derives) =
blankline $
markLine loc $
mySep ( [pretty don, ppContext context, pretty name]
++ map pretty nameList)
<+> (myVcat (zipWith (<+>) (equals : repeat (char '|'))
(map pretty constrList))
$$$ ppDeriving derives)
pretty (GDataDecl loc don context name nameList optkind gadtList derives) =
blankline $
markLine loc $
mySep ( [pretty don, ppContext context, pretty name]
++ map pretty nameList ++ ppOptKind optkind ++ [text "where"])
$$$ ppBody classIndent (map pretty gadtList)
$$$ ppDeriving derives
pretty (TypeFamDecl loc name nameList optkind) =
blankline $
markLine loc $
mySep ([text "type", text "family", pretty name]
++ map pretty nameList
++ ppOptKind optkind)
pretty (DataFamDecl loc context name nameList optkind) =
blankline $
markLine loc $
mySep ( [text "data", text "family", ppContext context, pretty name]
++ map pretty nameList ++ ppOptKind optkind)
pretty (TypeInsDecl loc ntype htype) =
blankline $
markLine loc $
mySep [text "type", text "instance", pretty ntype, equals, pretty htype]
pretty (DataInsDecl loc don ntype constrList derives) =
blankline $
markLine loc $
mySep [pretty don, text "instance", pretty ntype]
<+> (myVcat (zipWith (<+>) (equals : repeat (char '|'))
(map pretty constrList))
$$$ ppDeriving derives)
pretty (GDataInsDecl loc don ntype optkind gadtList derives) =
blankline $
markLine loc $
mySep ( [pretty don, text "instance", pretty ntype]
++ ppOptKind optkind ++ [text "where"])
$$$ ppBody classIndent (map pretty gadtList)
$$$ ppDeriving derives
--m{spacing=False}
-- special case for empty class declaration
pretty (ClassDecl pos context name nameList fundeps []) =
blankline $
markLine pos $
mySep ( [text "class", ppContext context, pretty name]
++ map pretty nameList ++ [ppFunDeps fundeps])
pretty (ClassDecl pos context name nameList fundeps declList) =
blankline $
markLine pos $
mySep ( [text "class", ppContext context, pretty name]
++ map pretty nameList ++ [ppFunDeps fundeps, text "where"])
$$$ ppBody classIndent (map pretty declList)
-- m{spacing=False}
-- special case for empty instance declaration
pretty (InstDecl pos context name args []) =
blankline $
markLine pos $
mySep ( [text "instance", ppContext context, pretty name]
++ map ppAType args)
pretty (InstDecl pos context name args declList) =
blankline $
markLine pos $
mySep ( [text "instance", ppContext context, pretty name]
++ map ppAType args ++ [text "where"])
$$$ ppBody classIndent (map pretty declList)
pretty (DerivDecl pos context name args) =
blankline $
markLine pos $
mySep ( [text "deriving", text "instance", ppContext context, pretty name]
++ map ppAType args)
pretty (DefaultDecl pos htypes) =
blankline $
markLine pos $
text "default" <+> parenList (map pretty htypes)
pretty (SpliceDecl pos splice) =
blankline $
markLine pos $
pretty splice
pretty (TypeSig pos nameList qualType) =
blankline $
markLine pos $
mySep ((punctuate comma . map pretty $ nameList)
++ [text "::", pretty qualType])
pretty (FunBind matches) = do
e <- fmap layout getPPEnv
case e of PPOffsideRule -> foldr ($$$) empty (map pretty matches)
_ -> foldr (\x y -> x <> semi <> y) empty (map pretty matches)
pretty (PatBind pos pat optsig rhs whereBinds) =
markLine pos $
myFsep [pretty pat, maybePP ppSig optsig, pretty rhs] $$$ ppWhere whereBinds
pretty (InfixDecl pos assoc prec opList) =
blankline $
markLine pos $
mySep ([pretty assoc, int prec]
++ (punctuate comma . map pretty $ opList))
pretty (ForImp pos cconv saf str name typ) =
blankline $
markLine pos $
mySep [text "foreign import", pretty cconv, pretty saf,
text (show str), pretty name, text "::", pretty typ]
pretty (ForExp pos cconv str name typ) =
blankline $
markLine pos $
mySep [text "foreign export", pretty cconv,
text (show str), pretty name, text "::", pretty typ]
pretty (RulePragmaDecl pos rules) =
blankline $
markLine pos $
myVcat $ text "{-# RULES" : map pretty rules ++ [text " #-}"]
pretty (DeprPragmaDecl pos deprs) =
blankline $
markLine pos $
myVcat $ text "{-# DEPRECATED" : map ppWarnDepr deprs ++ [text " #-}"]
pretty (WarnPragmaDecl pos deprs) =
blankline $
markLine pos $
myVcat $ text "{-# WARNING" : map ppWarnDepr deprs ++ [text " #-}"]
pretty (InlineSig pos inl activ name) =
blankline $
markLine pos $
mySep [text (if inl then "{-# INLINE" else "{-# NOINLINE"), pretty activ, pretty name, text "#-}"]
pretty (InlineConlikeSig pos activ name) =
blankline $
markLine pos $
mySep [text "{-# INLINE_CONLIKE", pretty activ, pretty name, text "#-}"]
pretty (SpecSig pos name types) =
blankline $
markLine pos $
mySep $ [text "{-# SPECIALISE", pretty name, text "::"]
++ punctuate comma (map pretty types) ++ [text "#-}"]
pretty (SpecInlineSig pos inl activ name types) =
blankline $
markLine pos $
mySep $ [text "{-# SPECIALISE", text (if inl then "INLINE" else "NOINLINE"),
pretty activ, pretty name, text "::"]
++ (punctuate comma $ map pretty types) ++ [text "#-}"]
pretty (InstSig pos context name args) =
blankline $
markLine pos $
mySep $ [text "{-# SPECIALISE", text "instance", ppContext context, pretty name]
++ map ppAType args ++ [text "#-}"]
pretty (AnnPragma pos ann) =
blankline $
markLine pos $
mySep $ [text "{-# ANN", pretty ann, text "#-}"]
instance Pretty Annotation where
pretty (Ann n e) = myFsep [pretty n, pretty e]
pretty (TypeAnn n e) = myFsep [text "type", pretty n, pretty e]
pretty (ModuleAnn e) = myFsep [text "module", pretty e]
instance Pretty DataOrNew where
pretty DataType = text "data"
pretty NewType = text "newtype"
instance Pretty Assoc where
pretty AssocNone = text "infix"
pretty AssocLeft = text "infixl"
pretty AssocRight = text "infixr"
instance Pretty Match where
pretty (Match pos f ps optsig rhs whereBinds) =
markLine pos $
myFsep (lhs ++ [maybePP ppSig optsig, pretty rhs])
$$$ ppWhere whereBinds
where
lhs = case ps of
l:r:ps' | isSymbolName f ->
let hd = [pretty l, ppName f, pretty r] in
if null ps' then hd
else parens (myFsep hd) : map (prettyPrec 2) ps'
_ -> pretty f : map (prettyPrec 2) ps
ppWhere :: Binds -> Doc
ppWhere (BDecls []) = empty
ppWhere (BDecls l) = nest 2 (text "where" $$$ ppBody whereIndent (map pretty l))
ppWhere (IPBinds b) = nest 2 (text "where" $$$ ppBody whereIndent (map pretty b))
ppSig :: Type -> Doc
ppSig t = text "::" <+> pretty t
instance Pretty ClassDecl where
pretty (ClsDecl decl) = pretty decl
pretty (ClsDataFam loc context name nameList optkind) =
markLine loc $
mySep ( [text "data", ppContext context, pretty name]
++ map pretty nameList ++ ppOptKind optkind)
pretty (ClsTyFam loc name nameList optkind) =
markLine loc $
mySep ( [text "type", pretty name]
++ map pretty nameList ++ ppOptKind optkind)
pretty (ClsTyDef loc ntype htype) =
markLine loc $
mySep [text "type", pretty ntype, equals, pretty htype]
instance Pretty InstDecl where
pretty (InsDecl decl) = pretty decl
pretty (InsType loc ntype htype) =
markLine loc $
mySep [text "type", pretty ntype, equals, pretty htype]
pretty (InsData loc don ntype constrList derives) =
markLine loc $
mySep [pretty don, pretty ntype]
<+> (myVcat (zipWith (<+>) (equals : repeat (char '|'))
(map pretty constrList))
$$$ ppDeriving derives)
pretty (InsGData loc don ntype optkind gadtList derives) =
markLine loc $
mySep ( [pretty don, pretty ntype]
++ ppOptKind optkind ++ [text "where"])
$$$ ppBody classIndent (map pretty gadtList)
$$$ ppDeriving derives
-- pretty (InsInline loc inl activ name) =
-- markLine loc $
-- mySep [text (if inl then "{-# INLINE" else "{-# NOINLINE"), pretty activ, pretty name, text "#-}"]
------------------------- FFI stuff -------------------------------------
instance Pretty Safety where
pretty PlayRisky = text "unsafe"
pretty (PlaySafe b) = text $ if b then "threadsafe" else "safe"
instance Pretty CallConv where
pretty StdCall = text "stdcall"
pretty CCall = text "ccall"
------------------------- Pragmas ---------------------------------------
ppWarnDepr :: ([Name], String) -> Doc
ppWarnDepr (names, txt) = mySep $ (punctuate comma $ map pretty names) ++ [text $ show txt]
instance Pretty Rule where
pretty (Rule tag activ rvs rhs lhs) =
mySep $ [text $ show tag, pretty activ,
maybePP ppRuleVars rvs,
pretty rhs, char '=', pretty lhs]
ppRuleVars :: [RuleVar] -> Doc
ppRuleVars [] = empty
ppRuleVars rvs = mySep $ text "forall" : map pretty rvs ++ [char '.']
instance Pretty Activation where
pretty AlwaysActive = empty
pretty (ActiveFrom i) = char '[' <> int i <> char ']'
pretty (ActiveUntil i) = text "[~" <> int i <> char ']'
instance Pretty RuleVar where
pretty (RuleVar n) = pretty n
pretty (TypedRuleVar n t) = mySep [pretty n, text "::", pretty t]
instance Pretty ModulePragma where
pretty (LanguagePragma _ ns) =
myFsep $ text "{-# LANGUAGE" : punctuate (char ',') (map pretty ns) ++ [text "#-}"]
pretty (OptionsPragma _ (Just tool) s) =
myFsep $ [text "{-# OPTIONS_" <> pretty tool, text s, text "#-}"]
pretty (OptionsPragma _ _ s) =
myFsep $ [text "{-# OPTIONS", text s, text "#-}"]
pretty (AnnModulePragma _ ann) =
myFsep $ [text "{-# ANN", pretty ann, text "#-}"]
instance Pretty Tool where
pretty (UnknownTool s) = text s
pretty t = text $ show t
------------------------- Data & Newtype Bodies -------------------------
instance Pretty QualConDecl where
pretty (QualConDecl _pos tvs ctxt con) =
myFsep [ppForall (Just tvs), ppContext ctxt, pretty con]
instance Pretty GadtDecl where
pretty (GadtDecl _pos name ty) =
myFsep [pretty name, text "::", pretty ty]
instance Pretty ConDecl where
pretty (RecDecl name fieldList) =
pretty name <> (braceList . map ppField $ fieldList)
{- pretty (ConDecl name@(Symbol _) [l, r]) =
myFsep [prettyPrec prec_btype l, ppName name,
prettyPrec prec_btype r] -}
pretty (ConDecl name typeList) =
mySep $ ppName name : map (prettyPrec prec_atype) typeList
pretty (InfixConDecl l name r) =
myFsep [prettyPrec prec_btype l, ppNameInfix name,
prettyPrec prec_btype r]
ppField :: ([Name],BangType) -> Doc
ppField (names, ty) =
myFsepSimple $ (punctuate comma . map pretty $ names) ++
[text "::", pretty ty]
instance Pretty BangType where
prettyPrec _ (BangedTy ty) = char '!' <> ppAType ty
prettyPrec p (UnBangedTy ty) = prettyPrec p ty
prettyPrec p (UnpackedTy ty) = text "{-# UNPACK #-}" <+> char '!' <> prettyPrec p ty
ppDeriving :: [Deriving] -> Doc
ppDeriving [] = empty
ppDeriving [(d, [])] = text "deriving" <+> ppQName d
ppDeriving ds = text "deriving" <+> parenList (map ppDer ds)
where ppDer :: (QName, [Type]) -> Doc
ppDer (n, ts) = mySep (pretty n : map pretty ts)
------------------------- Types -------------------------
ppBType :: Type -> Doc
ppBType = prettyPrec prec_btype
ppAType :: Type -> Doc
ppAType = prettyPrec prec_atype
-- precedences for types
prec_btype, prec_atype :: Int
prec_btype = 1 -- left argument of ->,
-- or either argument of an infix data constructor
prec_atype = 2 -- argument of type or data constructor, or of a class
instance Pretty Type where
prettyPrec p (TyForall mtvs ctxt htype) = parensIf (p > 0) $
myFsep [ppForall mtvs, ppContext ctxt, pretty htype]
prettyPrec p (TyFun a b) = parensIf (p > 0) $
myFsep [ppBType a, text "->", pretty b]
prettyPrec _ (TyTuple bxd l) =
let ds = map pretty l
in case bxd of
Boxed -> parenList ds
Unboxed -> hashParenList ds
prettyPrec _ (TyList t) = brackets $ pretty t
prettyPrec p (TyApp a b) =
{-
| a == list_tycon = brackets $ pretty b -- special case
| otherwise = -} parensIf (p > prec_btype) $
myFsep [pretty a, ppAType b]
prettyPrec _ (TyVar name) = pretty name
prettyPrec _ (TyCon name) = pretty name
prettyPrec _ (TyParen t) = parens (pretty t)
-- prettyPrec _ (TyPred asst) = pretty asst
prettyPrec _ (TyInfix a op b) = myFsep [pretty a, ppQNameInfix op, pretty b]
prettyPrec _ (TyKind t k) = parens (myFsep [pretty t, text "::", pretty k])
instance Pretty TyVarBind where
pretty (KindedVar var kind) = parens $ myFsep [pretty var, text "::", pretty kind]
pretty (UnkindedVar var) = pretty var
ppForall :: Maybe [TyVarBind] -> Doc
ppForall Nothing = empty
ppForall (Just []) = empty
ppForall (Just vs) = myFsep (text "forall" : map pretty vs ++ [char '.'])
---------------------------- Kinds ----------------------------
instance Pretty Kind where
pretty KindStar = text "*"
pretty KindBang = text "!"
pretty (KindFn a b) = myFsep [pretty a, text "->", pretty b]
pretty (KindParen k) = parens $ pretty k
pretty (KindVar n) = pretty n
ppOptKind :: Maybe Kind -> [Doc]
ppOptKind Nothing = []
ppOptKind (Just k) = [text "::", pretty k]
------------------- Functional Dependencies -------------------
instance Pretty FunDep where
pretty (FunDep from to) =
myFsep $ map pretty from ++ [text "->"] ++ map pretty to
ppFunDeps :: [FunDep] -> Doc
ppFunDeps [] = empty
ppFunDeps fds = myFsep $ (char '|':) . punctuate comma . map pretty $ fds
------------------------- Expressions -------------------------
instance Pretty Rhs where
pretty (UnGuardedRhs e) = equals <+> pretty e
pretty (GuardedRhss guardList) = myVcat . map pretty $ guardList
instance Pretty GuardedRhs where
pretty (GuardedRhs _pos guards ppBody) =
myFsep $ [char '|'] ++ (punctuate comma . map pretty $ guards) ++ [equals, pretty ppBody]
instance Pretty Literal where
pretty (Int i) = integer i
pretty (Char c) = text (show c)
pretty (String s) = text (show s)
pretty (Frac r) = double (fromRational r)
-- GHC unboxed literals:
pretty (PrimChar c) = text (show c) <> char '#'
pretty (PrimString s) = text (show s) <> char '#'
pretty (PrimInt i) = integer i <> char '#'
pretty (PrimWord w) = integer w <> text "##"
pretty (PrimFloat r) = float (fromRational r) <> char '#'
pretty (PrimDouble r) = double (fromRational r) <> text "##"
instance Pretty Exp where
prettyPrec _ (Lit l) = pretty l
-- lambda stuff
prettyPrec p (InfixApp a op b) = parensIf (p > 0) $ myFsep [pretty a, pretty op, pretty b]
prettyPrec _ (NegApp e) = parens $ myFsep [char '-', pretty e]
prettyPrec p (App a b) = parensIf (p > 0) $ myFsep [pretty a, prettyPrec 1 b]
prettyPrec p (Lambda _loc expList ppBody) = parensIf (p > 0) $ myFsep $
char '\\' : map pretty expList ++ [text "->", pretty ppBody]
-- keywords
-- two cases for lets
prettyPrec p (Let (BDecls declList) letBody) =
parensIf (p > 0) $ ppLetExp declList letBody
prettyPrec p (Let (IPBinds bindList) letBody) =
parensIf (p > 0) $ ppLetExp bindList letBody
prettyPrec p (If cond thenexp elsexp) = parensIf (p > 0) $
myFsep [text "if", pretty cond,
text "then", pretty thenexp,
text "else", pretty elsexp]
prettyPrec p (Case cond altList) = parensIf (p > 0) $
myFsep [text "case", pretty cond, text "of"]
$$$ ppBody caseIndent (map pretty altList)
prettyPrec p (Do stmtList) = parensIf (p > 0) $
text "do" $$$ ppBody doIndent (map pretty stmtList)
prettyPrec p (MDo stmtList) = parensIf (p > 0) $
text "mdo" $$$ ppBody doIndent (map pretty stmtList)
-- Constructors & Vars
prettyPrec _ (Var name) = pretty name
prettyPrec _ (IPVar ipname) = pretty ipname
prettyPrec _ (Con name) = pretty name
prettyPrec _ (Tuple expList) = parenList . map pretty $ expList
prettyPrec _ (TupleSection mExpList) = parenList . map (maybePP pretty) $ mExpList
-- weird stuff
prettyPrec _ (Paren e) = case e of
SCCPragma _ _ -> pretty e
otherwise -> parens . pretty $ e
prettyPrec _ (LeftSection e op) = parens (pretty e <+> pretty op)
prettyPrec _ (RightSection op e) = parens (pretty op <+> pretty e)
prettyPrec _ (RecConstr c fieldList) =
pretty c <> (braceList . map pretty $ fieldList)
prettyPrec _ (RecUpdate e fieldList) =
pretty e <> (braceList . map pretty $ fieldList)
-- Lists
prettyPrec _ (List list) =
bracketList . punctuate comma . map pretty $ list
prettyPrec _ (EnumFrom e) =
bracketList [pretty e, text ".."]
prettyPrec _ (EnumFromTo from to) =
bracketList [pretty from, text "..", pretty to]
prettyPrec _ (EnumFromThen from thenE) =
bracketList [pretty from <> comma, pretty thenE, text ".."]
prettyPrec _ (EnumFromThenTo from thenE to) =
bracketList [pretty from <> comma, pretty thenE,
text "..", pretty to]
prettyPrec _ (ListComp e qualList) =
bracketList ([pretty e, char '|']
++ (punctuate comma . map pretty $ qualList))
prettyPrec _ (ParComp e qualLists) =
bracketList (intersperse (char '|') $
pretty e : (punctuate comma . concatMap (map pretty) $ qualLists))
prettyPrec p (ExpTypeSig _pos e ty) = parensIf (p > 0) $
myFsep [pretty e, text "::", pretty ty]
-- Template Haskell
prettyPrec _ (BracketExp b) = pretty b
prettyPrec _ (SpliceExp s) = pretty s
prettyPrec _ (TypQuote t) = text "\'\'" <> pretty t
prettyPrec _ (VarQuote x) = text "\'" <> pretty x
prettyPrec _ (QuasiQuote n qt) = text ("[$" ++ n ++ "|" ++ qt ++ "|]")
-- Hsx
prettyPrec _ (XTag _ n attrs mattr cs) =
let ax = maybe [] (return . pretty) mattr
in hcat $
(myFsep $ (char '<' <> pretty n): map pretty attrs ++ ax ++ [char '>']):
map pretty cs ++ [myFsep $ [text "</" <> pretty n, char '>']]
prettyPrec _ (XETag _ n attrs mattr) =
let ax = maybe [] (return . pretty) mattr
in myFsep $ (char '<' <> pretty n): map pretty attrs ++ ax ++ [text "/>"]
prettyPrec _ (XPcdata s) = text s
prettyPrec _ (XExpTag e) =
myFsep $ [text "<%", pretty e, text "%>"]
-- Pragmas
prettyPrec p (CorePragma s e) = myFsep $ map text ["{-# CORE", show s, "#-}"] ++ [pretty e]
prettyPrec _ (SCCPragma s e) = zeroWidthText "<a class=info href=\"#\"><span>" <> zeroWidthText s <> zeroWidthText "</span><font style=\"background-color: #" <> zeroWidthText s <> zeroWidthText "\">" <> pretty e <> zeroWidthText "</font></a>"
prettyPrec _ (GenPragma s (a,b) (c,d) e) =
myFsep $ [text "{-# GENERATED", text $ show s,
int a, char ':', int b, char '-',
int c, char ':', int d, text "#-}", pretty e]
-- Arrows
prettyPrec p (Proc _ pat e) = parensIf (p > 0) $ myFsep $ [text "proc", pretty pat, text "->", pretty e]
prettyPrec p (LeftArrApp l r) = parensIf (p > 0) $ myFsep $ [pretty l, text "-<", pretty r]
prettyPrec p (RightArrApp l r) = parensIf (p > 0) $ myFsep $ [pretty l, text ">-", pretty r]
prettyPrec p (LeftArrHighApp l r) = parensIf (p > 0) $ myFsep $ [pretty l, text "-<<", pretty r]
prettyPrec p (RightArrHighApp l r) = parensIf (p > 0) $ myFsep $ [pretty l, text ">>-", pretty r]
instance Pretty XAttr where
pretty (XAttr n v) =
myFsep [pretty n, char '=', pretty v]
instance Pretty XName where
pretty (XName n) = text n
pretty (XDomName d n) = text d <> char ':' <> text n
--ppLetExp :: [Decl] -> Exp -> Doc
ppLetExp l b = myFsep [text "let" <+> ppBody letIndent (map pretty l),
text "in", pretty b]
ppWith binds = nest 2 (text "with" $$$ ppBody withIndent (map pretty binds))
withIndent = whereIndent
--------------------- Template Haskell -------------------------
instance Pretty Bracket where
pretty (ExpBracket e) = ppBracket "[|" e
pretty (PatBracket p) = ppBracket "[p|" p
pretty (TypeBracket t) = ppBracket "[t|" t
pretty (DeclBracket d) =
myFsep $ text "[d|" : map pretty d ++ [text "|]"]
ppBracket o x = myFsep [text o, pretty x, text "|]"]
instance Pretty Splice where
pretty (IdSplice s) = char '$' <> text s
pretty (ParenSplice e) =
myFsep [text "$(", pretty e, char ')']
------------------------- Patterns -----------------------------
instance Pretty Pat where
prettyPrec _ (PVar name) = pretty name
prettyPrec _ (PLit lit) = pretty lit
prettyPrec _ (PNeg p) = myFsep [char '-', pretty p]
prettyPrec p (PInfixApp a op b) = parensIf (p > 0) $
myFsep [pretty a, pretty (QConOp op), pretty b]
prettyPrec p (PApp n ps) = parensIf (p > 1) $
myFsep (pretty n : map pretty ps)
prettyPrec _ (PTuple ps) = parenList . map pretty $ ps
prettyPrec _ (PList ps) =
bracketList . punctuate comma . map pretty $ ps
prettyPrec _ (PParen p) = parens . pretty $ p
prettyPrec _ (PRec c fields) =
pretty c <> (braceList . map pretty $ fields)
-- special case that would otherwise be buggy
prettyPrec _ (PAsPat name (PIrrPat pat)) =
myFsep [pretty name <> char '@', char '~' <> pretty pat]
prettyPrec _ (PAsPat name pat) =
hcat [pretty name, char '@', pretty pat]
prettyPrec _ PWildCard = char '_'
prettyPrec _ (PIrrPat pat) = char '~' <> pretty pat
prettyPrec _ (PatTypeSig _pos pat ty) =
myFsep [pretty pat, text "::", pretty ty]
prettyPrec p (PViewPat e pat) = parensIf (p > 0) $
myFsep [pretty e, text "->", pretty pat]
prettyPrec p (PNPlusK n k) = parensIf (p > 0) $
myFsep [pretty n, text "+", text $ show k]
-- HaRP
prettyPrec _ (PRPat rs) =
bracketList . punctuate comma . map pretty $ rs
-- Hsx
prettyPrec _ (PXTag _ n attrs mattr cp) =
let ap = maybe [] (return . pretty) mattr
in hcat $ -- TODO: should not introduce blanks
(myFsep $ (char '<' <> pretty n): map pretty attrs ++ ap ++ [char '>']):
map pretty cp ++ [myFsep $ [text "</" <> pretty n, char '>']]
prettyPrec _ (PXETag _ n attrs mattr) =
let ap = maybe [] (return . pretty) mattr
in myFsep $ (char '<' <> pretty n): map pretty attrs ++ ap ++ [text "/>"]
prettyPrec _ (PXPcdata s) = text s
prettyPrec _ (PXPatTag p) =
myFsep $ [text "<%", pretty p, text "%>"]
prettyPrec _ (PXRPats ps) =
myFsep $ text "<[" : map pretty ps ++ [text "%>"]
-- Generics
prettyPrec _ (PExplTypeArg qn t) =
myFsep [pretty qn, text "{|", pretty t, text "|}"]
-- BangPatterns
prettyPrec _ (PBangPat p) = text "!" <> pretty p
instance Pretty PXAttr where
pretty (PXAttr n p) =
myFsep [pretty n, char '=', pretty p]
instance Pretty PatField where
pretty (PFieldPat name pat) =
myFsep [pretty name, equals, pretty pat]
pretty (PFieldPun name) = pretty name
pretty (PFieldWildcard) = text ".."
--------------------- Regular Patterns -------------------------
instance Pretty RPat where
pretty (RPOp r op) = pretty r <> pretty op
pretty (RPEither r1 r2) = parens . myFsep $
[pretty r1, char '|', pretty r2]
pretty (RPSeq rs) =
myFsep $ text "(/" : map pretty rs ++ [text "/)"]
pretty (RPGuard r gs) =
myFsep $ text "(|" : pretty r : char '|' : map pretty gs ++ [text "|)"]
-- special case that would otherwise be buggy
pretty (RPCAs n (RPPat (PIrrPat p))) =
myFsep [pretty n <> text "@:", char '~' <> pretty p]
pretty (RPCAs n r) = hcat [pretty n, text "@:", pretty r]
-- special case that would otherwise be buggy
pretty (RPAs n (RPPat (PIrrPat p))) =
myFsep [pretty n <> text "@:", char '~' <> pretty p]
pretty (RPAs n r) = hcat [pretty n, char '@', pretty r]
pretty (RPPat p) = pretty p
pretty (RPParen rp) = parens . pretty $ rp
instance Pretty RPatOp where
pretty RPStar = char '*'
pretty RPStarG = text "*!"
pretty RPPlus = char '+'
pretty RPPlusG = text "+!"
pretty RPOpt = char '?'
pretty RPOptG = text "?!"
------------------------- Case bodies -------------------------
instance Pretty Alt where
pretty (Alt _pos e gAlts binds) =
pretty e <+> pretty gAlts $$$ ppWhere binds
instance Pretty GuardedAlts where
pretty (UnGuardedAlt e) = text "->" <+> pretty e
pretty (GuardedAlts altList) = myVcat . map pretty $ altList
instance Pretty GuardedAlt where
pretty (GuardedAlt _pos guards body) =
myFsep $ char '|': (punctuate comma . map pretty $ guards) ++ [text "->", pretty body]
------------------------- Statements in monads, guards & list comprehensions -----
instance Pretty Stmt where
pretty (Generator _loc e from) =
pretty e <+> text "<-" <+> pretty from
pretty (Qualifier e) = pretty e
-- two cases for lets
pretty (LetStmt (BDecls declList)) =
ppLetStmt declList
pretty (LetStmt (IPBinds bindList)) =
ppLetStmt bindList
pretty (RecStmt stmtList) =
text "rec" $$$ ppBody letIndent (map pretty stmtList)
ppLetStmt l = text "let" $$$ ppBody letIndent (map pretty l)
instance Pretty QualStmt where
pretty (QualStmt s) = pretty s
pretty (ThenTrans f) = myFsep $ [text "then", pretty f]
pretty (ThenBy f e) = myFsep $ [text "then", pretty f, text "by", pretty e]
pretty (GroupBy e) = myFsep $ [text "then", text "group", text "by", pretty e]
pretty (GroupUsing f) = myFsep $ [text "then", text "group", text "using", pretty f]
pretty (GroupByUsing e f) = myFsep $ [text "then", text "group", text "by",
pretty e, text "using", pretty f]
------------------------- Record updates
instance Pretty FieldUpdate where
pretty (FieldUpdate name e) =
myFsep [pretty name, equals, pretty e]
pretty (FieldPun name) = pretty name
pretty (FieldWildcard) = text ".."
------------------------- Names -------------------------
instance Pretty QOp where
pretty (QVarOp n) = ppQNameInfix n
pretty (QConOp n) = ppQNameInfix n
ppQNameInfix :: QName -> Doc
ppQNameInfix name
| isSymbolName (getName name) = ppQName name
| otherwise = char '`' <> ppQName name <> char '`'
instance Pretty QName where
pretty name = case name of
UnQual (Symbol ('#':_)) -> char '(' <+> ppQName name <+> char ')'
_ -> parensIf (isSymbolName (getName name)) (ppQName name)
ppQName :: QName -> Doc
ppQName (UnQual name) = ppName name
ppQName (Qual m name) = pretty m <> char '.' <> ppName name
ppQName (Special sym) = text (specialName sym)
instance Pretty Op where
pretty (VarOp n) = ppNameInfix n
pretty (ConOp n) = ppNameInfix n
ppNameInfix :: Name -> Doc
ppNameInfix name
| isSymbolName name = ppName name
| otherwise = char '`' <> ppName name <> char '`'
instance Pretty Name where
pretty name = case name of
Symbol ('#':_) -> char '(' <+> ppName name <+> char ')'
_ -> parensIf (isSymbolName name) (ppName name)
ppName :: Name -> Doc
ppName (Ident s) = text s
ppName (Symbol s) = text s
instance Pretty IPName where
pretty (IPDup s) = char '?' <> text s
pretty (IPLin s) = char '%' <> text s
instance Pretty IPBind where
pretty (IPBind _loc ipname exp) =
myFsep [pretty ipname, equals, pretty exp]
instance Pretty CName where
pretty (VarName n) = pretty n
pretty (ConName n) = pretty n
instance Pretty SpecialCon where
pretty sc = text $ specialName sc
isSymbolName :: Name -> Bool
isSymbolName (Symbol _) = True
isSymbolName _ = False
getName :: QName -> Name
getName (UnQual s) = s
getName (Qual _ s) = s
getName (Special Cons) = Symbol ":"
getName (Special FunCon) = Symbol "->"
getName (Special s) = Ident (specialName s)
specialName :: SpecialCon -> String
specialName UnitCon = "()"
specialName ListCon = "[]"
specialName FunCon = "->"
specialName (TupleCon b n) = "(" ++ hash ++ replicate (n-1) ',' ++ hash ++ ")"
where hash = if b == Unboxed then "#" else ""
specialName Cons = ":"
ppContext :: Context -> Doc
ppContext [] = empty
ppContext context = mySep [parenList (map pretty context), text "=>"]
-- hacked for multi-parameter type classes
instance Pretty Asst where
pretty (ClassA a ts) = myFsep $ ppQName a : map ppAType ts
pretty (InfixA a op b) = myFsep $ [pretty a, ppQNameInfix op, pretty b]
pretty (IParam i t) = myFsep $ [pretty i, text "::", pretty t]
pretty (EqualP t1 t2) = myFsep $ [pretty t1, text "~", pretty t2]
-- Pretty print a source location, useful for printing out error messages
instance Pretty SrcLoc where
pretty srcLoc =
return $ P.hsep [ colonFollow (P.text $ srcFilename srcLoc)
, colonFollow (P.int $ srcLine srcLoc)
, P.int $ srcColumn srcLoc
]
colonFollow p = P.hcat [ p, P.colon ]
instance Pretty SrcSpan where
pretty srcSpan =
return $ P.hsep [ colonFollow (P.text $ srcSpanFilename srcSpan)
, P.hcat [ P.text "("
, P.int $ srcSpanStartLine srcSpan
, P.colon
, P.int $ srcSpanStartColumn srcSpan
, P.text ")"
]
, P.text "-"
, P.hcat [ P.text "("
, P.int $ srcSpanEndLine srcSpan
, P.colon
, P.int $ srcSpanEndColumn srcSpan
, P.text ")"
]
]
---------------------------------------------------------------------
-- Annotated version
------------------------- Pretty-Print a Module --------------------
instance SrcInfo pos => Pretty (A.Module pos) where
pretty (A.Module pos mbHead os imp decls) =
markLine pos $
myVcat $ map pretty os ++
(case mbHead of
Nothing -> id
Just h -> \x -> [topLevel (pretty h) x])
(map pretty imp ++ map pretty decls)
pretty (A.XmlPage pos _mn os n attrs mattr cs) =
markLine pos $
myVcat $ map pretty os ++
[let ax = maybe [] (return . pretty) mattr
in hcat $
(myFsep $ (char '<' <> pretty n): map pretty attrs ++ ax ++ [char '>']):
map pretty cs ++ [myFsep $ [text "</" <> pretty n, char '>']]]
pretty (A.XmlHybrid pos mbHead os imp decls n attrs mattr cs) =
markLine pos $
myVcat $ map pretty os ++ [text "<%"] ++
(case mbHead of
Nothing -> id
Just h -> \x -> [topLevel (pretty h) x])
(map pretty imp ++ map pretty decls ++
[let ax = maybe [] (return . pretty) mattr
in hcat $
(myFsep $ (char '<' <> pretty n): map pretty attrs ++ ax ++ [char '>']):
map pretty cs ++ [myFsep $ [text "</" <> pretty n, char '>']]])
-------------------------- Module Header ------------------------------
instance Pretty (A.ModuleHead l) where
pretty (A.ModuleHead _ m mbWarn mbExportList) = mySep [
text "module",
pretty m,
maybePP pretty mbWarn,
maybePP pretty mbExportList,
text "where"]
instance Pretty (A.WarningText l) where
pretty = ppWarnTxt. sWarningText
instance Pretty (A.ModuleName l) where
pretty = pretty . sModuleName
instance Pretty (A.ExportSpecList l) where
pretty (A.ExportSpecList _ especs) = parenList $ map pretty especs
instance Pretty (A.ExportSpec l) where
pretty = pretty . sExportSpec
instance SrcInfo pos => Pretty (A.ImportDecl pos) where
pretty = pretty . sImportDecl
instance Pretty (A.ImportSpecList l) where
pretty (A.ImportSpecList _ b ispecs) =
(if b then text "hiding" else empty)
<+> parenList (map pretty ispecs)
instance Pretty (A.ImportSpec l) where
pretty = pretty . sImportSpec
------------------------- Declarations ------------------------------
instance SrcInfo pos => Pretty (A.Decl pos) where
pretty = pretty . sDecl
instance Pretty (A.DeclHead l) where
pretty (A.DHead l n tvs) = mySep (pretty n : map pretty tvs)
pretty (A.DHInfix l tva n tvb) = mySep [pretty tva, pretty n, pretty tvb]
pretty (A.DHParen l dh) = parens (pretty dh)
instance Pretty (A.InstHead l) where
pretty (A.IHead l qn ts) = mySep (pretty qn : map pretty ts)
pretty (A.IHInfix l ta qn tb) = mySep [pretty ta, pretty qn, pretty tb]
pretty (A.IHParen l ih) = parens (pretty ih)
instance Pretty (A.DataOrNew l) where
pretty = pretty . sDataOrNew
instance Pretty (A.Assoc l) where
pretty = pretty . sAssoc
instance SrcInfo pos => Pretty (A.Match pos) where
pretty = pretty . sMatch
instance SrcInfo loc => Pretty (A.ClassDecl loc) where
pretty = pretty . sClassDecl
instance SrcInfo loc => Pretty (A.InstDecl loc) where
pretty = pretty . sInstDecl
------------------------- FFI stuff -------------------------------------
instance Pretty (A.Safety l) where
pretty = pretty . sSafety
instance Pretty (A.CallConv l) where
pretty = pretty . sCallConv
------------------------- Pragmas ---------------------------------------
instance SrcInfo loc => Pretty (A.Rule loc) where
pretty = pretty . sRule
instance Pretty (A.Activation l) where
pretty = pretty . sActivation
instance Pretty (A.RuleVar l) where
pretty = pretty . sRuleVar
instance SrcInfo loc => Pretty (A.ModulePragma loc) where
pretty (A.LanguagePragma _ ns) =
myFsep $ text "{-# LANGUAGE" : punctuate (char ',') (map pretty ns) ++ [text "#-}"]
pretty (A.OptionsPragma _ (Just tool) s) =
myFsep $ [text "{-# OPTIONS_" <> pretty tool, text s, text "#-}"]
pretty (A.OptionsPragma _ _ s) =
myFsep $ [text "{-# OPTIONS", text s, text "#-}"]
pretty (A.AnnModulePragma _ ann) =
myFsep $ [text "{-# ANN", pretty ann, text "#-}"]
instance SrcInfo loc => Pretty (A.Annotation loc) where
pretty = pretty . sAnnotation
------------------------- Data & Newtype Bodies -------------------------
instance Pretty (A.QualConDecl l) where
pretty (A.QualConDecl _pos mtvs ctxt con) =
myFsep [ppForall (fmap (map sTyVarBind) mtvs), ppContext $ maybe [] sContext ctxt, pretty con]
instance Pretty (A.GadtDecl l) where
pretty (A.GadtDecl _pos name ty) =
myFsep [pretty name, text "::", pretty ty]
instance Pretty (A.ConDecl l) where
pretty = pretty . sConDecl
instance Pretty (A.FieldDecl l) where
pretty (A.FieldDecl _ names ty) =
myFsepSimple $ (punctuate comma . map pretty $ names) ++
[text "::", pretty ty]
instance Pretty (A.BangType l) where
pretty = pretty . sBangType
instance Pretty (A.Deriving l) where
pretty (A.Deriving _ []) = text "deriving" <+> parenList []
pretty (A.Deriving _ [A.IHead _ d []]) = text "deriving" <+> pretty d
pretty (A.Deriving _ ihs) = text "deriving" <+> parenList (map pretty ihs)
------------------------- Types -------------------------
instance Pretty (A.Type l) where
pretty = pretty . sType
instance Pretty (A.TyVarBind l) where
pretty = pretty . sTyVarBind
---------------------------- Kinds ----------------------------
instance Pretty (A.Kind l) where
pretty = pretty . sKind
------------------- Functional Dependencies -------------------
instance Pretty (A.FunDep l) where
pretty = pretty . sFunDep
------------------------- Expressions -------------------------
instance SrcInfo loc => Pretty (A.Rhs loc) where
pretty = pretty . sRhs
instance SrcInfo loc => Pretty (A.GuardedRhs loc) where
pretty = pretty . sGuardedRhs
instance Pretty (A.Literal l) where
pretty = pretty . sLiteral
instance SrcInfo loc => Pretty (A.Exp loc) where
pretty = pretty . sExp
instance SrcInfo loc => Pretty (A.XAttr loc) where
pretty = pretty . sXAttr
instance Pretty (A.XName l) where
pretty = pretty . sXName
--------------------- Template Haskell -------------------------
instance SrcInfo loc => Pretty (A.Bracket loc) where
pretty = pretty . sBracket
instance SrcInfo loc => Pretty (A.Splice loc) where
pretty = pretty . sSplice
------------------------- Patterns -----------------------------
instance SrcInfo loc => Pretty (A.Pat loc) where
pretty = pretty . sPat
instance SrcInfo loc => Pretty (A.PXAttr loc) where
pretty = pretty . sPXAttr
instance SrcInfo loc => Pretty (A.PatField loc) where
pretty = pretty . sPatField
--------------------- Regular Patterns -------------------------
instance SrcInfo loc => Pretty (A.RPat loc) where
pretty = pretty . sRPat
instance Pretty (A.RPatOp l) where
pretty = pretty . sRPatOp
------------------------- Case bodies -------------------------
instance SrcInfo loc => Pretty (A.Alt loc) where
pretty = pretty . sAlt
instance SrcInfo loc => Pretty (A.GuardedAlts loc) where
pretty = pretty . sGuardedAlts
instance SrcInfo loc => Pretty (A.GuardedAlt loc) where
pretty = pretty . sGuardedAlt
------------------------- Statements in monads, guards & list comprehensions -----
instance SrcInfo loc => Pretty (A.Stmt loc) where
pretty = pretty . sStmt
instance SrcInfo loc => Pretty (A.QualStmt loc) where
pretty = pretty . sQualStmt
------------------------- Record updates
instance SrcInfo loc => Pretty (A.FieldUpdate loc) where
pretty = pretty . sFieldUpdate
------------------------- Names -------------------------
instance Pretty (A.QOp l) where
pretty = pretty . sQOp
instance Pretty (A.QName l) where
pretty = pretty . sQName
instance Pretty (A.Op l) where
pretty = pretty . sOp
instance Pretty (A.Name l) where
pretty = pretty . sName
instance Pretty (A.IPName l) where
pretty = pretty . sIPName
instance SrcInfo loc => Pretty (A.IPBind loc) where
pretty = pretty . sIPBind
instance Pretty (A.CName l) where
pretty = pretty . sCName
instance Pretty (A.Context l) where
pretty (A.CxEmpty _) = mySep [text "()", text "=>"]
pretty (A.CxSingle _ asst) = mySep [pretty asst, text "=>"]
pretty (A.CxTuple _ assts) = myFsep $ [parenList (map pretty assts), text "=>"]
pretty (A.CxParen _ asst) = parens (pretty asst)
-- hacked for multi-parameter type classes
instance Pretty (A.Asst l) where
pretty = pretty . sAsst
------------------------- pp utils -------------------------
maybePP :: (a -> Doc) -> Maybe a -> Doc
maybePP pp Nothing = empty
maybePP pp (Just a) = pp a
parenList :: [Doc] -> Doc
parenList = parens . myFsepSimple . punctuate comma
hashParenList :: [Doc] -> Doc
hashParenList = hashParens . myFsepSimple . punctuate comma
where hashParens = parens . hashes
hashes = \doc -> char '#' <> doc <> char '#'
braceList :: [Doc] -> Doc
braceList = braces . myFsepSimple . punctuate comma
bracketList :: [Doc] -> Doc
bracketList = brackets . myFsepSimple
-- Wrap in braces and semicolons, with an extra space at the start in
-- case the first doc begins with "-", which would be scanned as {-
flatBlock :: [Doc] -> Doc
flatBlock = braces . (space <>) . hsep . punctuate semi
-- Same, but put each thing on a separate line
prettyBlock :: [Doc] -> Doc
prettyBlock = braces . (space <>) . vcat . punctuate semi
-- Monadic PP Combinators -- these examine the env
blankline :: Doc -> Doc
blankline dl = do{e<-getPPEnv;if spacing e && layout e /= PPNoLayout
then space $$ dl else dl}
topLevel :: Doc -> [Doc] -> Doc
topLevel header dl = do
e <- fmap layout getPPEnv
case e of
PPOffsideRule -> header $$ vcat dl
PPSemiColon -> header $$ prettyBlock dl
PPInLine -> header $$ prettyBlock dl
PPNoLayout -> header <+> flatBlock dl
ppBody :: (PPHsMode -> Int) -> [Doc] -> Doc
ppBody f dl = do
e <- fmap layout getPPEnv
case e of PPOffsideRule -> indent
PPSemiColon -> indentExplicit
_ -> flatBlock dl
where
indent = do{i <-fmap f getPPEnv;nest i . vcat $ dl}
indentExplicit = do {i <- fmap f getPPEnv;
nest i . prettyBlock $ dl}
($$$) :: Doc -> Doc -> Doc
a $$$ b = layoutChoice (a $$) (a <+>) b
mySep :: [Doc] -> Doc
mySep = layoutChoice mySep' hsep
where
-- ensure paragraph fills with indentation.
mySep' [x] = x
mySep' (x:xs) = x <+> fsep xs
mySep' [] = error "Internal error: mySep"
myVcat :: [Doc] -> Doc
myVcat = layoutChoice vcat hsep
myFsepSimple :: [Doc] -> Doc
myFsepSimple = layoutChoice fsep hsep
-- same, except that continuation lines are indented,
-- which is necessary to avoid triggering the offside rule.
myFsep :: [Doc] -> Doc
myFsep = layoutChoice fsep' hsep
where fsep' [] = empty
fsep' (d:ds) = do
e <- getPPEnv
let n = onsideIndent e
nest n (fsep (nest (-n) d:ds))
myFsepSCC :: [Doc] -> Doc
myFsepSCC = layoutChoice fsep' hsep
where fsep' [] = empty
fsep' (d:ds) = do
e <- getPPEnv
let n = onsideIndent e
nest n (fsep (nest (-n) d:ds))
layoutChoice :: (a -> Doc) -> (a -> Doc) -> a -> Doc
layoutChoice a b dl = do e <- getPPEnv
if layout e == PPOffsideRule ||
layout e == PPSemiColon
then a dl else b dl
-- Prefix something with a LINE pragma, if requested.
-- GHC's LINE pragma actually sets the current line number to n-1, so
-- that the following line is line n. But if there's no newline before
-- the line we're talking about, we need to compensate by adding 1.
markLine :: SrcInfo s => s -> Doc -> Doc
markLine loc doc = do
e <- getPPEnv
let y = startLine loc
let line l =
text ("{-# LINE " ++ show l ++ " \"" ++ fileName loc ++ "\" #-}")
if linePragmas e then layoutChoice (line y $$) (line (y+1) <+>) doc
else doc
| djv/VisualProf | src/Pretty.hs | bsd-2-clause | 59,244 | 0 | 23 | 19,652 | 18,973 | 9,489 | 9,484 | 1,077 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
module Main where
import System.Console.CmdArgs
import qualified Database.SqlServer.Generator as D
import Database.SqlServer.Create.Database
import Data.List.Split
data Arguments = Arguments
{
seed :: Int
, complexity :: Int
, excludeTypes :: String
, approximateObjectsPerType :: Int
} deriving (Show,Data,Typeable)
msg :: [String]
msg = ["More details on the github repo at " ++
" https://github.com/fffej/sql-server-gen"]
defaultArgs :: Arguments
defaultArgs = Arguments
{
seed = def &= help "Seed for random number generator" &= name "s"
, complexity = 100 &= help "Complexity of database (optional)" &= opt (500 :: Int) &= name "c"
, approximateObjectsPerType = 100 &= help "Approximate number of objects per type in rendered database" &= opt (20 :: Int) &= name "n"
, excludeTypes = "*" &= help "List of object types to exclude comma separated initials" &= name "e"
} &= summary "SQL Server Schema Generator"
&= help "Generate arbitrary SQL Server databases"
&= details msg
convert :: Arguments -> D.GenerateOptions
convert a = D.GenerateOptions
{
D.seed = seed a
, D.complexity = complexity a
, D.approximateObjectsPerType = approximateObjectsPerType a
, D.excludeTypes = parseRenderOptions $ excludeTypes a
}
parseRenderOptions :: String -> RenderOptions
parseRenderOptions xs = foldl setFlag defaultRenderOptions (splitOn "," xs)
setFlag :: RenderOptions -> String -> RenderOptions
setFlag ro s =
case s of
"T" -> ro {showTables = False}
"V" -> ro {showViews = False}
"S" -> ro {showSequences = False}
"P" -> ro {showProcedures = False}
"F" -> ro {showFunctions = False}
"U" -> ro {showUsers = False}
"R" -> ro {showRoles = False}
"FTC" -> ro {showFullTextCatalog = False}
"FTS" -> ro {showFullTextStopList = False}
"CRED" -> ro {showCredential = False}
"M" -> ro {showMessageType = False}
"B" -> ro {showBrokerPriority = False}
"PF" -> ro {showPartitionFunction = False}
_ -> ro
header :: Arguments -> String
header a = unlines
[
"-- This code was generated by sql-server-gen"
, "-- Arguments used: seed=" ++ show (seed a) ++ " complexity=" ++ show (complexity a) ++
" excludeTypes=" ++ show (excludeTypes a)
]
main :: IO ()
main = do
a <- cmdArgs defaultArgs
let renderOptions = (parseRenderOptions (excludeTypes a)) { objectsPerType = approximateObjectsPerType a }
putStrLn (header a)
print $ (renderDatabase renderOptions (D.generateEntity (convert a)))
return ()
| fffej/sql-server-gen | cli/Main.hs | bsd-2-clause | 2,607 | 0 | 14 | 564 | 737 | 396 | 341 | 62 | 14 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.GGS
( GGSControl
, startGGS
, stopGGS
) where
import Control.Applicative
import Control.Concurrent
import Control.Concurrent.STM
import Control.Lens
import Control.Logging
import Control.Monad
import Data.Aeson (FromJSON, decode)
import Data.ByteString.Lazy (ByteString)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import qualified Data.Text as T
import GHC.Generics
import Network.HTTP hiding (host, port)
import Network.URI
import Text.Printf
import Config
import Model.Game
import Network.RequestScheduling
import Storage
import Storage.Schema hiding (port)
import qualified Storage.Schema as Schema
data GGSControl = GGSControl
{ pollThread :: ThreadId
, killSwitch :: TMVar ()
}
data GGSGame = GGSGame
{ server :: String
, port :: Int
} deriving (Generic, Show)
instance FromJSON GGSGame
startGGS :: Config -> RequestScheduleControl -> StorageControl -> IO GGSControl
startGGS conf reqSched storage = do
killSwitch <- newEmptyTMVarIO
tid <- forkIO $ ggsThread conf killSwitch reqSched storage
return GGSControl { pollThread = tid
, killSwitch = killSwitch
}
stopGGS :: GGSControl -> IO ()
stopGGS control = do
logS' "ggs" "Toggling GGS thread kill switch"
atomically $ putTMVar (killSwitch control) ()
ggsThread :: Config -> TMVar () -> RequestScheduleControl -> StorageControl -> IO ()
ggsThread conf killSwitch reqSched storage = do
logS' "ggs" "GGS poll thread starting"
-- Heart with an initial beat waiting, so we do first update right away
heart <- newTQueueIO
atomically $ writeTQueue heart ()
loop heart
where
beat heart = do
threadDelay $ conf ^. ggsPollInterval * 1000 * 1000
atomically $ writeTQueue heart ()
loop heart = do
-- Note that we need to read both inputs in same `atomically` block so
-- that kill switch toggle is noticed even if we're blocked on the queue.
(live, _ ) <- atomically $ do
live <- isEmptyTMVar killSwitch
beat <- if live then Just <$> readTQueue heart else return Nothing
return (live, beat)
when (not live) $ do
logS' "ggs" "GGS poll thread terminated"
when live $ do
pollGGS reqSched storage
-- Schedule next heartbeat
forkIO $ beat heart
loop heart
pollGGS :: RequestScheduleControl -> StorageControl -> IO ()
pollGGS reqSched storage = do
logS' "ggs" "Downloading current games list from GGS"
mgames <- loadFromGGS
when (isJust mgames) $ do
let ggsGames = fromJust mgames
tracked <- listTracked storage
-- Determine which games GGS has we aren't tracking yet, and which we are
-- tracking that are not in GGS.
let ggsByAddr = Map.fromList $ map (\g -> ((server g, port g), g)) ggsGames
let trackedByAddr = Map.fromList $ map (\g -> ((host g, Schema.port g), g)) tracked
let new = map snd $ Map.toList $ ggsByAddr `Map.difference` trackedByAddr
let gone = map snd $ Map.toList $ trackedByAddr `Map.difference` ggsByAddr
-- Process new games
handleNew reqSched storage new
-- Process removed games
handleRemoved storage gone
handleNew :: RequestScheduleControl -> StorageControl -> [GGSGame] -> IO ()
handleNew reqSched storage new = do
forM_ new $ \g -> do
logS' "ggs" $ T.pack $ printf "Found new game from GGS, tracking: %s:%d" (server g) (port g)
-- Try to get the game's current status
pollRes <- scheduleStatusRequest reqSched (server g, port g) >>= atomically . readTMVar
case pollRes of
Success {} -> do
debugS' "ggs" $ T.pack $ printf "Got initial status for %s:%d: %s" (server g) (port g) (show $ receivedStatus pollRes)
startTracking storage
(server g) (port g)
(TrackMeta { source = GGS, announce = True })
(requestSent pollRes) (receivedStatus pollRes)
return ()
-- Maybe it will be live later?
_ -> return ()
handleRemoved :: StorageControl -> [TrackedGame] -> IO ()
handleRemoved storage gone = do
forM_ gone $ \g -> do
-- Only remove if we originally got it from GGS, too. It could otherwise be a live
-- game that's just not tracked by GGS.
when ((source $ meta g) == GGS) $ do
logS' "ggs" $ T.pack $ printf "Game is gone from GGS, untracking: %s (%s:%d)" (name $ lastStatus g) (host g) (Schema.port g)
void $ stopTracking storage $ printf "%s:%d" (host g) (Schema.port g)
request :: Request ByteString
request = replaceHeader HdrAccept "application/json, text/javascript, */*; q=0.01" $
replaceHeader HdrAcceptEncoding "gzip, deflate" $
replaceHeader (HdrCustom "X-Requested-With") "XMLHttpRequest" $
mkRequest GET $ fromJust $ parseURI "http://www.brainwrinkle.net/"
loadFromGGS :: IO (Maybe [GGSGame])
loadFromGGS = do
resp <- simpleHTTP request
case resp of
Left err -> do
warnS' "ggs" $ T.pack $ printf "GGS download failed: " (show err)
return Nothing
Right _ -> do
body <- getResponseBody resp
case decode body of
Nothing -> do
warnS' "ggs" $ T.pack $ printf "Failed to decode games from: " (show resp)
return Nothing
Just games -> return $ Just games
| Ornedan/dom4statusbot | src/Network/GGS.hs | bsd-3-clause | 5,666 | 0 | 20 | 1,563 | 1,523 | 758 | 765 | 119 | 3 |
{-# LANGUAGE CPP, NamedFieldPuns, NondecreasingIndentation #-}
{-# OPTIONS_GHC -fno-cse #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-----------------------------------------------------------------------------
--
-- GHC Driver
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module DriverPipeline (
-- Run a series of compilation steps in a pipeline, for a
-- collection of source files.
oneShot, compileFile, mergeRequirement,
-- Interfaces for the batch-mode driver
linkBinary,
-- Interfaces for the compilation manager (interpreted/batch-mode)
preprocess,
compileOne, compileOne',
link,
-- Misc utility
makeMergeRequirementSummary,
-- Exports for hooks to override runPhase and link
PhasePlus(..), CompPipeline(..), PipeEnv(..), PipeState(..),
phaseOutputFilename, getPipeState, getPipeEnv,
hscPostBackendPhase, getLocation, setModLocation, setDynFlags,
runPhase, exeFileName,
mkExtraObjToLinkIntoBinary, mkNoteObjsToLinkIntoBinary,
maybeCreateManifest,
linkingNeeded, checkLinkInfo, writeInterfaceOnlyMode
) where
#include "HsVersions.h"
import PipelineMonad
import Packages
import HeaderInfo
import DriverPhases
import SysTools
import HscMain
import Finder
import HscTypes hiding ( Hsc )
import Outputable
import Module
import UniqFM ( eltsUFM )
import ErrUtils
import DynFlags
import Config
import Panic
import Util
import StringBuffer ( hGetStringBuffer )
import BasicTypes ( SuccessFlag(..) )
import Maybes ( expectJust )
import SrcLoc
import FastString
import LlvmCodeGen ( llvmFixupAsm )
import MonadUtils
import Platform
import TcRnTypes
import Hooks
import Exception
import System.Directory
import System.FilePath
import System.IO
import Control.Monad
import Data.List ( isSuffixOf )
import Data.Maybe
import Data.Char
import Data.Time
-- ---------------------------------------------------------------------------
-- Pre-process
-- | Just preprocess a file, put the result in a temp. file (used by the
-- compilation manager during the summary phase).
--
-- We return the augmented DynFlags, because they contain the result
-- of slurping in the OPTIONS pragmas
preprocess :: HscEnv
-> (FilePath, Maybe Phase) -- ^ filename and starting phase
-> IO (DynFlags, FilePath)
preprocess hsc_env (filename, mb_phase) =
ASSERT2(isJust mb_phase || isHaskellSrcFilename filename, text filename)
runPipeline anyHsc hsc_env (filename, fmap RealPhase mb_phase)
Nothing Temporary Nothing{-no ModLocation-} Nothing{-no stub-}
-- ---------------------------------------------------------------------------
-- | Compile
--
-- Compile a single module, under the control of the compilation manager.
--
-- This is the interface between the compilation manager and the
-- compiler proper (hsc), where we deal with tedious details like
-- reading the OPTIONS pragma from the source file, converting the
-- C or assembly that GHC produces into an object file, and compiling
-- FFI stub files.
--
-- NB. No old interface can also mean that the source has changed.
compileOne :: HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne = compileOne' Nothing (Just batchMsg)
compileOne' :: Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne' m_tc_result mHscMessage
hsc_env0 summary mod_index nmods mb_old_iface maybe_old_linkable
source_modified0
= do
debugTraceMsg dflags1 2 (text "compile: input file" <+> text input_fnpp)
(status, hmi0) <- hscIncrementalCompile
always_do_basic_recompilation_check
m_tc_result mHscMessage
hsc_env summary source_modified mb_old_iface (mod_index, nmods)
case (status, hsc_lang) of
(HscUpToDate, _) ->
ASSERT( isJust maybe_old_linkable || isNoLink (ghcLink dflags) )
return hmi0 { hm_linkable = maybe_old_linkable }
(HscNotGeneratingCode, HscNothing) ->
let mb_linkable = if isHsBoot src_flavour
then Nothing
-- TODO: Questionable.
else Just (LM (ms_hs_date summary) this_mod [])
in return hmi0 { hm_linkable = mb_linkable }
(HscNotGeneratingCode, _) -> panic "compileOne HscNotGeneratingCode"
(_, HscNothing) -> panic "compileOne HscNothing"
(HscUpdateBoot, HscInterpreted) -> do
return hmi0
(HscUpdateBoot, _) -> do
touchObjectFile dflags object_filename
return hmi0
(HscUpdateBootMerge, HscInterpreted) ->
let linkable = LM (ms_hs_date summary) this_mod []
in return hmi0 { hm_linkable = Just linkable }
(HscUpdateBootMerge, _) -> do
output_fn <- getOutputFilename next_phase
Temporary basename dflags next_phase (Just location)
-- #10660: Use the pipeline instead of calling
-- compileEmptyStub directly, so -dynamic-too gets
-- handled properly
_ <- runPipeline StopLn hsc_env
(output_fn,
Just (HscOut src_flavour
mod_name HscUpdateBootMerge))
(Just basename)
Persistent
(Just location)
Nothing
o_time <- getModificationUTCTime object_filename
let linkable = LM o_time this_mod [DotO object_filename]
return hmi0 { hm_linkable = Just linkable }
(HscRecomp cgguts summary, HscInterpreted) -> do
(hasStub, comp_bc, modBreaks) <- hscInteractive hsc_env cgguts summary
stub_o <- case hasStub of
Nothing -> return []
Just stub_c -> do
stub_o <- compileStub hsc_env stub_c
return [DotO stub_o]
let hs_unlinked = [BCOs comp_bc modBreaks]
unlinked_time = ms_hs_date summary
-- Why do we use the timestamp of the source file here,
-- rather than the current time? This works better in
-- the case where the local clock is out of sync
-- with the filesystem's clock. It's just as accurate:
-- if the source is modified, then the linkable will
-- be out of date.
let linkable = LM unlinked_time (ms_mod summary)
(hs_unlinked ++ stub_o)
return hmi0 { hm_linkable = Just linkable }
(HscRecomp cgguts summary, _) -> do
output_fn <- getOutputFilename next_phase
Temporary basename dflags next_phase (Just location)
-- We're in --make mode: finish the compilation pipeline.
_ <- runPipeline StopLn hsc_env
(output_fn,
Just (HscOut src_flavour mod_name (HscRecomp cgguts summary)))
(Just basename)
Persistent
(Just location)
Nothing
-- The object filename comes from the ModLocation
o_time <- getModificationUTCTime object_filename
let linkable = LM o_time this_mod [DotO object_filename]
return hmi0 { hm_linkable = Just linkable }
where dflags0 = ms_hspp_opts summary
location = ms_location summary
input_fn = expectJust "compile:hs" (ml_hs_file location)
input_fnpp = ms_hspp_file summary
mod_graph = hsc_mod_graph hsc_env0
needsTH = any (xopt Opt_TemplateHaskell . ms_hspp_opts) mod_graph
needsQQ = any (xopt Opt_QuasiQuotes . ms_hspp_opts) mod_graph
needsLinker = needsTH || needsQQ
isDynWay = any (== WayDyn) (ways dflags0)
isProfWay = any (== WayProf) (ways dflags0)
src_flavour = ms_hsc_src summary
this_mod = ms_mod summary
mod_name = ms_mod_name summary
next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
object_filename = ml_obj_file location
-- #8180 - when using TemplateHaskell, switch on -dynamic-too so
-- the linker can correctly load the object files.
dflags1 = if needsLinker && dynamicGhc && not isDynWay && not isProfWay
then gopt_set dflags0 Opt_BuildDynamicToo
else dflags0
basename = dropExtension input_fn
-- We add the directory in which the .hs files resides) to the import
-- path. This is needed when we try to compile the .hc file later, if it
-- imports a _stub.h file that we created here.
current_dir = takeDirectory basename
old_paths = includePaths dflags1
dflags = dflags1 { includePaths = current_dir : old_paths }
hsc_env = hsc_env0 {hsc_dflags = dflags}
-- Figure out what lang we're generating
hsc_lang = hscTarget dflags
-- -fforce-recomp should also work with --make
force_recomp = gopt Opt_ForceRecomp dflags
source_modified
| force_recomp = SourceModified
| otherwise = source_modified0
always_do_basic_recompilation_check = case hsc_lang of
HscInterpreted -> True
_ -> False
-----------------------------------------------------------------------------
-- stub .h and .c files (for foreign export support)
-- The _stub.c file is derived from the haskell source file, possibly taking
-- into account the -stubdir option.
--
-- The object file created by compiling the _stub.c file is put into a
-- temporary file, which will be later combined with the main .o file
-- (see the MergeStubs phase).
compileStub :: HscEnv -> FilePath -> IO FilePath
compileStub hsc_env stub_c = do
(_, stub_o) <- runPipeline StopLn hsc_env (stub_c,Nothing) Nothing
Temporary Nothing{-no ModLocation-} Nothing
return stub_o
compileEmptyStub :: DynFlags -> HscEnv -> FilePath -> ModLocation -> IO ()
compileEmptyStub dflags hsc_env basename location = do
-- To maintain the invariant that every Haskell file
-- compiles to object code, we make an empty (but
-- valid) stub object file for signatures
empty_stub <- newTempName dflags "c"
writeFile empty_stub ""
_ <- runPipeline StopLn hsc_env
(empty_stub, Nothing)
(Just basename)
Persistent
(Just location)
Nothing
return ()
-- ---------------------------------------------------------------------------
-- Link
link :: GhcLink -- interactive or batch
-> DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
-- For the moment, in the batch linker, we don't bother to tell doLink
-- which packages to link -- it just tries all that are available.
-- batch_attempt_linking should only be *looked at* in batch mode. It
-- should only be True if the upsweep was successful and someone
-- exports main, i.e., we have good reason to believe that linking
-- will succeed.
link ghcLink dflags
= lookupHook linkHook l dflags ghcLink dflags
where
l LinkInMemory _ _ _
= if cGhcWithInterpreter == "YES"
then -- Not Linking...(demand linker will do the job)
return Succeeded
else panicBadLink LinkInMemory
l NoLink _ _ _
= return Succeeded
l LinkBinary dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkStaticLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkDynLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
panicBadLink :: GhcLink -> a
panicBadLink other = panic ("link: GHC not built to link this way: " ++
show other)
link' :: DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
link' dflags batch_attempt_linking hpt
| batch_attempt_linking
= do
let
staticLink = case ghcLink dflags of
LinkStaticLib -> True
_ -> platformBinariesAreStaticLibs (targetPlatform dflags)
home_mod_infos = eltsUFM hpt
-- the packages we depend on
pkg_deps = concatMap (map fst . dep_pkgs . mi_deps . hm_iface) home_mod_infos
-- the linkables to link
linkables = map (expectJust "link".hm_linkable) home_mod_infos
debugTraceMsg dflags 3 (text "link: linkables are ..." $$ vcat (map ppr linkables))
-- check for the -no-link flag
if isNoLink (ghcLink dflags)
then do debugTraceMsg dflags 3 (text "link(batch): linking omitted (-c flag given).")
return Succeeded
else do
let getOfiles (LM _ _ us) = map nameOfObject (filter isObject us)
obj_files = concatMap getOfiles linkables
exe_file = exeFileName staticLink dflags
linking_needed <- linkingNeeded dflags staticLink linkables pkg_deps
if not (gopt Opt_ForceRecomp dflags) && not linking_needed
then do debugTraceMsg dflags 2 (text exe_file <+> ptext (sLit "is up to date, linking not required."))
return Succeeded
else do
compilationProgressMsg dflags ("Linking " ++ exe_file ++ " ...")
-- Don't showPass in Batch mode; doLink will do that for us.
let link = case ghcLink dflags of
LinkBinary -> linkBinary
LinkStaticLib -> linkStaticLibCheck
LinkDynLib -> linkDynLibCheck
other -> panicBadLink other
link dflags obj_files pkg_deps
debugTraceMsg dflags 3 (text "link: done")
-- linkBinary only returns if it succeeds
return Succeeded
| otherwise
= do debugTraceMsg dflags 3 (text "link(batch): upsweep (partially) failed OR" $$
text " Main.main not exported; not linking.")
return Succeeded
linkingNeeded :: DynFlags -> Bool -> [Linkable] -> [UnitId] -> IO Bool
linkingNeeded dflags staticLink linkables pkg_deps = do
-- if the modification time on the executable is later than the
-- modification times on all of the objects and libraries, then omit
-- linking (unless the -fforce-recomp flag was given).
let exe_file = exeFileName staticLink dflags
e_exe_time <- tryIO $ getModificationUTCTime exe_file
case e_exe_time of
Left _ -> return True
Right t -> do
-- first check object files and extra_ld_inputs
let extra_ld_inputs = [ f | FileOption _ f <- ldInputs dflags ]
e_extra_times <- mapM (tryIO . getModificationUTCTime) extra_ld_inputs
let (errs,extra_times) = splitEithers e_extra_times
let obj_times = map linkableTime linkables ++ extra_times
if not (null errs) || any (t <) obj_times
then return True
else do
-- next, check libraries. XXX this only checks Haskell libraries,
-- not extra_libraries or -l things from the command line.
let pkg_hslibs = [ (libraryDirs c, lib)
| Just c <- map (lookupPackage dflags) pkg_deps,
lib <- packageHsLibs dflags c ]
pkg_libfiles <- mapM (uncurry (findHSLib dflags)) pkg_hslibs
if any isNothing pkg_libfiles then return True else do
e_lib_times <- mapM (tryIO . getModificationUTCTime)
(catMaybes pkg_libfiles)
let (lib_errs,lib_times) = splitEithers e_lib_times
if not (null lib_errs) || any (t <) lib_times
then return True
else checkLinkInfo dflags pkg_deps exe_file
-- Returns 'False' if it was, and we can avoid linking, because the
-- previous binary was linked with "the same options".
checkLinkInfo :: DynFlags -> [UnitId] -> FilePath -> IO Bool
checkLinkInfo dflags pkg_deps exe_file
| not (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
-- ToDo: Windows and OS X do not use the ELF binary format, so
-- readelf does not work there. We need to find another way to do
-- this.
= return False -- conservatively we should return True, but not
-- linking in this case was the behaviour for a long
-- time so we leave it as-is.
| otherwise
= do
link_info <- getLinkInfo dflags pkg_deps
debugTraceMsg dflags 3 $ text ("Link info: " ++ link_info)
m_exe_link_info <- readElfSection dflags ghcLinkInfoSectionName exe_file
debugTraceMsg dflags 3 $ text ("Exe link info: " ++ show m_exe_link_info)
return (Just link_info /= m_exe_link_info)
platformSupportsSavingLinkOpts :: OS -> Bool
platformSupportsSavingLinkOpts os
| os == OSSolaris2 = False -- see #5382
| otherwise = osElfTarget os
ghcLinkInfoSectionName :: String
ghcLinkInfoSectionName = ".debug-ghc-link-info"
-- if we use the ".debug" prefix, then strip will strip it by default
findHSLib :: DynFlags -> [String] -> String -> IO (Maybe FilePath)
findHSLib dflags dirs lib = do
let batch_lib_file = if gopt Opt_Static dflags
then "lib" ++ lib <.> "a"
else mkSOName (targetPlatform dflags) lib
found <- filterM doesFileExist (map (</> batch_lib_file) dirs)
case found of
[] -> return Nothing
(x:_) -> return (Just x)
-- -----------------------------------------------------------------------------
-- Compile files in one-shot mode.
oneShot :: HscEnv -> Phase -> [(String, Maybe Phase)] -> IO ()
oneShot hsc_env stop_phase srcs = do
o_files <- mapM (compileFile hsc_env stop_phase) srcs
doLink (hsc_dflags hsc_env) stop_phase o_files
-- | Constructs a 'ModSummary' for a "signature merge" node.
-- This is a simplified construction function which only checks
-- for a local hs-boot file.
makeMergeRequirementSummary :: HscEnv -> Bool -> ModuleName -> IO ModSummary
makeMergeRequirementSummary hsc_env obj_allowed mod_name = do
let dflags = hsc_dflags hsc_env
location <- liftIO $ mkHomeModLocation2 dflags mod_name
(moduleNameSlashes mod_name) (hiSuf dflags)
obj_timestamp <-
if isObjectTarget (hscTarget dflags) || obj_allowed -- bug #1205
then liftIO $ modificationTimeIfExists (ml_obj_file location)
else return Nothing
r <- findHomeModule hsc_env mod_name
let has_local_boot = case r of
Found _ _ -> True
_ -> False
src_timestamp <- case obj_timestamp of
Just date -> return date
Nothing -> getCurrentTime -- something fake
return ModSummary {
ms_mod = mkModule (thisPackage dflags) mod_name,
ms_hsc_src = HsBootMerge,
ms_location = location,
ms_hs_date = src_timestamp,
ms_obj_date = obj_timestamp,
ms_iface_date = Nothing,
-- TODO: fill this in with all the imports eventually
ms_srcimps = [],
ms_textual_imps = [],
ms_merge_imps = (has_local_boot, []),
ms_hspp_file = "FAKE",
ms_hspp_opts = dflags,
ms_hspp_buf = Nothing
}
-- | Top-level entry point for @ghc -merge-requirement ModName@.
mergeRequirement :: HscEnv -> ModuleName -> IO ()
mergeRequirement hsc_env mod_name = do
mod_summary <- makeMergeRequirementSummary hsc_env True mod_name
-- Based off of GhcMake handling
_ <- liftIO $ compileOne' Nothing Nothing hsc_env mod_summary 1 1 Nothing
Nothing SourceUnmodified
return ()
compileFile :: HscEnv -> Phase -> (FilePath, Maybe Phase) -> IO FilePath
compileFile hsc_env stop_phase (src, mb_phase) = do
exists <- doesFileExist src
when (not exists) $
throwGhcExceptionIO (CmdLineError ("does not exist: " ++ src))
let
dflags = hsc_dflags hsc_env
split = gopt Opt_SplitObjs dflags
mb_o_file = outputFile dflags
ghc_link = ghcLink dflags -- Set by -c or -no-link
-- When linking, the -o argument refers to the linker's output.
-- otherwise, we use it as the name for the pipeline's output.
output
-- If we are dong -fno-code, then act as if the output is
-- 'Temporary'. This stops GHC trying to copy files to their
-- final location.
| HscNothing <- hscTarget dflags = Temporary
| StopLn <- stop_phase, not (isNoLink ghc_link) = Persistent
-- -o foo applies to linker
| isJust mb_o_file = SpecificFile
-- -o foo applies to the file we are compiling now
| otherwise = Persistent
stop_phase' = case stop_phase of
As _ | split -> SplitAs
_ -> stop_phase
( _, out_file) <- runPipeline stop_phase' hsc_env
(src, fmap RealPhase mb_phase) Nothing output
Nothing{-no ModLocation-} Nothing
return out_file
doLink :: DynFlags -> Phase -> [FilePath] -> IO ()
doLink dflags stop_phase o_files
| not (isStopLn stop_phase)
= return () -- We stopped before the linking phase
| otherwise
= case ghcLink dflags of
NoLink -> return ()
LinkBinary -> linkBinary dflags o_files []
LinkStaticLib -> linkStaticLibCheck dflags o_files []
LinkDynLib -> linkDynLibCheck dflags o_files []
other -> panicBadLink other
-- ---------------------------------------------------------------------------
-- | Run a compilation pipeline, consisting of multiple phases.
--
-- This is the interface to the compilation pipeline, which runs
-- a series of compilation steps on a single source file, specifying
-- at which stage to stop.
--
-- The DynFlags can be modified by phases in the pipeline (eg. by
-- OPTIONS_GHC pragmas), and the changes affect later phases in the
-- pipeline.
runPipeline
:: Phase -- ^ When to stop
-> HscEnv -- ^ Compilation environment
-> (FilePath,Maybe PhasePlus) -- ^ Input filename (and maybe -x suffix)
-> Maybe FilePath -- ^ original basename (if different from ^^^)
-> PipelineOutput -- ^ Output filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline stop_phase hsc_env0 (input_fn, mb_phase)
mb_basename output maybe_loc maybe_stub_o
= do let
dflags0 = hsc_dflags hsc_env0
-- Decide where dump files should go based on the pipeline output
dflags = dflags0 { dumpPrefix = Just (basename ++ ".") }
hsc_env = hsc_env0 {hsc_dflags = dflags}
(input_basename, suffix) = splitExtension input_fn
suffix' = drop 1 suffix -- strip off the .
basename | Just b <- mb_basename = b
| otherwise = input_basename
-- If we were given a -x flag, then use that phase to start from
start_phase = fromMaybe (RealPhase (startPhase suffix')) mb_phase
isHaskell (RealPhase (Unlit _)) = True
isHaskell (RealPhase (Cpp _)) = True
isHaskell (RealPhase (HsPp _)) = True
isHaskell (RealPhase (Hsc _)) = True
isHaskell (HscOut {}) = True
isHaskell _ = False
isHaskellishFile = isHaskell start_phase
env = PipeEnv{ stop_phase,
src_filename = input_fn,
src_basename = basename,
src_suffix = suffix',
output_spec = output }
-- We want to catch cases of "you can't get there from here" before
-- we start the pipeline, because otherwise it will just run off the
-- end.
let happensBefore' = happensBefore dflags
case start_phase of
RealPhase start_phase' ->
-- See Note [Partial ordering on phases]
-- Not the same as: (stop_phase `happensBefore` start_phase')
when (not (start_phase' `happensBefore'` stop_phase ||
start_phase' `eqPhase` stop_phase)) $
throwGhcExceptionIO (UsageError
("cannot compile this file to desired target: "
++ input_fn))
HscOut {} -> return ()
debugTraceMsg dflags 4 (text "Running the pipeline")
r <- runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
-- If we are compiling a Haskell module, and doing
-- -dynamic-too, but couldn't do the -dynamic-too fast
-- path, then rerun the pipeline for the dyn way
let dflags = extractDynFlags hsc_env
-- NB: Currently disabled on Windows (ref #7134, #8228, and #5987)
when (not $ platformOS (targetPlatform dflags) == OSMinGW32) $ do
when isHaskellishFile $ whenCannotGenerateDynamicToo dflags $ do
debugTraceMsg dflags 4
(text "Running the pipeline again for -dynamic-too")
let dflags' = dynamicTooMkDynamicDynFlags dflags
hsc_env' <- newHscEnv dflags'
_ <- runPipeline' start_phase hsc_env' env input_fn
maybe_loc maybe_stub_o
return ()
return r
runPipeline'
:: PhasePlus -- ^ When to start
-> HscEnv -- ^ Compilation environment
-> PipeEnv
-> FilePath -- ^ Input filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
= do
-- Execute the pipeline...
let state = PipeState{ hsc_env, maybe_loc, maybe_stub_o = maybe_stub_o }
evalP (pipeLoop start_phase input_fn) env state
-- ---------------------------------------------------------------------------
-- outer pipeline loop
-- | pipeLoop runs phases until we reach the stop phase
pipeLoop :: PhasePlus -> FilePath -> CompPipeline (DynFlags, FilePath)
pipeLoop phase input_fn = do
env <- getPipeEnv
dflags <- getDynFlags
-- See Note [Partial ordering on phases]
let happensBefore' = happensBefore dflags
stopPhase = stop_phase env
case phase of
RealPhase realPhase | realPhase `eqPhase` stopPhase -- All done
-> -- Sometimes, a compilation phase doesn't actually generate any output
-- (eg. the CPP phase when -fcpp is not turned on). If we end on this
-- stage, but we wanted to keep the output, then we have to explicitly
-- copy the file, remembering to prepend a {-# LINE #-} pragma so that
-- further compilation stages can tell what the original filename was.
case output_spec env of
Temporary ->
return (dflags, input_fn)
output ->
do pst <- getPipeState
final_fn <- liftIO $ getOutputFilename
stopPhase output (src_basename env)
dflags stopPhase (maybe_loc pst)
when (final_fn /= input_fn) $ do
let msg = ("Copying `" ++ input_fn ++"' to `" ++ final_fn ++ "'")
line_prag = Just ("{-# LINE 1 \"" ++ src_filename env ++ "\" #-}\n")
liftIO $ copyWithHeader dflags msg line_prag input_fn final_fn
return (dflags, final_fn)
| not (realPhase `happensBefore'` stopPhase)
-- Something has gone wrong. We'll try to cover all the cases when
-- this could happen, so if we reach here it is a panic.
-- eg. it might happen if the -C flag is used on a source file that
-- has {-# OPTIONS -fasm #-}.
-> panic ("pipeLoop: at phase " ++ show realPhase ++
" but I wanted to stop at phase " ++ show stopPhase)
_
-> do liftIO $ debugTraceMsg dflags 4
(ptext (sLit "Running phase") <+> ppr phase)
(next_phase, output_fn) <- runHookedPhase phase input_fn dflags
r <- pipeLoop next_phase output_fn
case phase of
HscOut {} ->
whenGeneratingDynamicToo dflags $ do
setDynFlags $ dynamicTooMkDynamicDynFlags dflags
-- TODO shouldn't ignore result:
_ <- pipeLoop phase input_fn
return ()
_ ->
return ()
return r
runHookedPhase :: PhasePlus -> FilePath -> DynFlags
-> CompPipeline (PhasePlus, FilePath)
runHookedPhase pp input dflags =
lookupHook runPhaseHook runPhase dflags pp input dflags
-- -----------------------------------------------------------------------------
-- In each phase, we need to know into what filename to generate the
-- output. All the logic about which filenames we generate output
-- into is embodied in the following function.
phaseOutputFilename :: Phase{-next phase-} -> CompPipeline FilePath
phaseOutputFilename next_phase = do
PipeEnv{stop_phase, src_basename, output_spec} <- getPipeEnv
PipeState{maybe_loc, hsc_env} <- getPipeState
let dflags = hsc_dflags hsc_env
liftIO $ getOutputFilename stop_phase output_spec
src_basename dflags next_phase maybe_loc
getOutputFilename
:: Phase -> PipelineOutput -> String
-> DynFlags -> Phase{-next phase-} -> Maybe ModLocation -> IO FilePath
getOutputFilename stop_phase output basename dflags next_phase maybe_location
| is_last_phase, Persistent <- output = persistent_fn
| is_last_phase, SpecificFile <- output = case outputFile dflags of
Just f -> return f
Nothing ->
panic "SpecificFile: No filename"
| keep_this_output = persistent_fn
| otherwise = newTempName dflags suffix
where
hcsuf = hcSuf dflags
odir = objectDir dflags
osuf = objectSuf dflags
keep_hc = gopt Opt_KeepHcFiles dflags
keep_s = gopt Opt_KeepSFiles dflags
keep_bc = gopt Opt_KeepLlvmFiles dflags
myPhaseInputExt HCc = hcsuf
myPhaseInputExt MergeStub = osuf
myPhaseInputExt StopLn = osuf
myPhaseInputExt other = phaseInputExt other
is_last_phase = next_phase `eqPhase` stop_phase
-- sometimes, we keep output from intermediate stages
keep_this_output =
case next_phase of
As _ | keep_s -> True
LlvmOpt | keep_bc -> True
HCc | keep_hc -> True
_other -> False
suffix = myPhaseInputExt next_phase
-- persistent object files get put in odir
persistent_fn
| StopLn <- next_phase = return odir_persistent
| otherwise = return persistent
persistent = basename <.> suffix
odir_persistent
| Just loc <- maybe_location = ml_obj_file loc
| Just d <- odir = d </> persistent
| otherwise = persistent
-- -----------------------------------------------------------------------------
-- | Each phase in the pipeline returns the next phase to execute, and the
-- name of the file in which the output was placed.
--
-- We must do things dynamically this way, because we often don't know
-- what the rest of the phases will be until part-way through the
-- compilation: for example, an {-# OPTIONS -fasm #-} at the beginning
-- of a source file can change the latter stages of the pipeline from
-- taking the LLVM route to using the native code generator.
--
runPhase :: PhasePlus -- ^ Run this phase
-> FilePath -- ^ name of the input file
-> DynFlags -- ^ for convenience, we pass the current dflags in
-> CompPipeline (PhasePlus, -- next phase to run
FilePath) -- output filename
-- Invariant: the output filename always contains the output
-- Interesting case: Hsc when there is no recompilation to do
-- Then the output filename is still a .o file
-------------------------------------------------------------------------------
-- Unlit phase
runPhase (RealPhase (Unlit sf)) input_fn dflags
= do
output_fn <- phaseOutputFilename (Cpp sf)
let flags = [ -- The -h option passes the file name for unlit to
-- put in a #line directive
SysTools.Option "-h"
-- See Note [Don't normalise input filenames].
, SysTools.Option $ escape input_fn
, SysTools.FileOption "" input_fn
, SysTools.FileOption "" output_fn
]
liftIO $ SysTools.runUnlit dflags flags
return (RealPhase (Cpp sf), output_fn)
where
-- escape the characters \, ", and ', but don't try to escape
-- Unicode or anything else (so we don't use Util.charToC
-- here). If we get this wrong, then in
-- Coverage.isGoodTickSrcSpan where we check that the filename in
-- a SrcLoc is the same as the source filenaame, the two will
-- look bogusly different. See test:
-- libraries/hpc/tests/function/subdir/tough2.hs
escape ('\\':cs) = '\\':'\\': escape cs
escape ('\"':cs) = '\\':'\"': escape cs
escape ('\'':cs) = '\\':'\'': escape cs
escape (c:cs) = c : escape cs
escape [] = []
-------------------------------------------------------------------------------
-- Cpp phase : (a) gets OPTIONS out of file
-- (b) runs cpp if necessary
runPhase (RealPhase (Cpp sf)) input_fn dflags0
= do
src_opts <- liftIO $ getOptionsFromFile dflags0 input_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
if not (xopt Opt_Cpp dflags1) then do
-- we have to be careful to emit warnings only once.
unless (gopt Opt_Pp dflags1) $
liftIO $ handleFlagWarnings dflags1 warns
-- no need to preprocess CPP, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (HsPp sf), input_fn)
else do
output_fn <- phaseOutputFilename (HsPp sf)
liftIO $ doCpp dflags1 True{-raw-}
input_fn output_fn
-- re-read the pragmas now that we've preprocessed the file
-- See #2464,#3457
src_opts <- liftIO $ getOptionsFromFile dflags0 output_fn
(dflags2, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
liftIO $ checkProcessArgsResult dflags2 unhandled_flags
unless (gopt Opt_Pp dflags2) $
liftIO $ handleFlagWarnings dflags2 warns
-- the HsPp pass below will emit warnings
setDynFlags dflags2
return (RealPhase (HsPp sf), output_fn)
-------------------------------------------------------------------------------
-- HsPp phase
runPhase (RealPhase (HsPp sf)) input_fn dflags
= do
if not (gopt Opt_Pp dflags) then
-- no need to preprocess, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (Hsc sf), input_fn)
else do
PipeEnv{src_basename, src_suffix} <- getPipeEnv
let orig_fn = src_basename <.> src_suffix
output_fn <- phaseOutputFilename (Hsc sf)
liftIO $ SysTools.runPp dflags
( [ SysTools.Option orig_fn
, SysTools.Option input_fn
, SysTools.FileOption "" output_fn
]
)
-- re-read pragmas now that we've parsed the file (see #3674)
src_opts <- liftIO $ getOptionsFromFile dflags output_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
liftIO $ handleFlagWarnings dflags1 warns
return (RealPhase (Hsc sf), output_fn)
-----------------------------------------------------------------------------
-- Hsc phase
-- Compilation of a single module, in "legacy" mode (_not_ under
-- the direction of the compilation manager).
runPhase (RealPhase (Hsc src_flavour)) input_fn dflags0
= do -- normal Hsc mode, not mkdependHS
PipeEnv{ stop_phase=stop,
src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- we add the current directory (i.e. the directory in which
-- the .hs files resides) to the include path, since this is
-- what gcc does, and it's probably what you want.
let current_dir = takeDirectory basename
paths = includePaths dflags0
dflags = dflags0 { includePaths = current_dir : paths }
setDynFlags dflags
-- gather the imports and module name
(hspp_buf,mod_name,imps,src_imps) <- liftIO $ do
do
buf <- hGetStringBuffer input_fn
(src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff)
return (Just buf, mod_name, imps, src_imps)
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
location <- getLocation src_flavour mod_name
let o_file = ml_obj_file location -- The real object file
hi_file = ml_hi_file location
dest_file | writeInterfaceOnlyMode dflags
= hi_file
| otherwise
= o_file
-- Figure out if the source has changed, for recompilation avoidance.
--
-- Setting source_unchanged to True means that M.o seems
-- to be up to date wrt M.hs; so no need to recompile unless imports have
-- changed (which the compiler itself figures out).
-- Setting source_unchanged to False tells the compiler that M.o is out of
-- date wrt M.hs (or M.o doesn't exist) so we must recompile regardless.
src_timestamp <- liftIO $ getModificationUTCTime (basename <.> suff)
source_unchanged <- liftIO $
if not (isStopLn stop)
-- SourceModified unconditionally if
-- (a) recompilation checker is off, or
-- (b) we aren't going all the way to .o file (e.g. ghc -S)
then return SourceModified
-- Otherwise look at file modification dates
else do dest_file_exists <- doesFileExist dest_file
if not dest_file_exists
then return SourceModified -- Need to recompile
else do t2 <- getModificationUTCTime dest_file
if t2 > src_timestamp
then return SourceUnmodified
else return SourceModified
PipeState{hsc_env=hsc_env'} <- getPipeState
-- Tell the finder cache about this module
mod <- liftIO $ addHomeModuleToFinder hsc_env' mod_name location
-- Make the ModSummary to hand to hscMain
let
mod_summary = ModSummary { ms_mod = mod,
ms_hsc_src = src_flavour,
ms_hspp_file = input_fn,
ms_hspp_opts = dflags,
ms_hspp_buf = hspp_buf,
ms_location = location,
ms_hs_date = src_timestamp,
ms_obj_date = Nothing,
ms_iface_date = Nothing,
ms_textual_imps = imps,
ms_srcimps = src_imps,
ms_merge_imps = (False, []) }
-- run the compiler!
let msg hsc_env _ what _ = oneShotMsg hsc_env what
(result, _) <- liftIO $ hscIncrementalCompile True Nothing (Just msg) hsc_env'
mod_summary source_unchanged Nothing (1,1)
return (HscOut src_flavour mod_name result,
panic "HscOut doesn't have an input filename")
runPhase (HscOut src_flavour mod_name result) _ dflags = do
location <- getLocation src_flavour mod_name
setModLocation location
let o_file = ml_obj_file location -- The real object file
hsc_lang = hscTarget dflags
next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
case result of
HscNotGeneratingCode ->
return (RealPhase StopLn,
panic "No output filename from Hsc when no-code")
HscUpToDate ->
do liftIO $ touchObjectFile dflags o_file
-- The .o file must have a later modification date
-- than the source file (else we wouldn't get Nothing)
-- but we touch it anyway, to keep 'make' happy (we think).
return (RealPhase StopLn, o_file)
HscUpdateBoot ->
do -- In the case of hs-boot files, generate a dummy .o-boot
-- stamp file for the benefit of Make
liftIO $ touchObjectFile dflags o_file
return (RealPhase StopLn, o_file)
HscUpdateBootMerge ->
do -- We need to create a REAL but empty .o file
-- because we are going to attempt to put it in a library
PipeState{hsc_env=hsc_env'} <- getPipeState
let input_fn = expectJust "runPhase" (ml_hs_file location)
basename = dropExtension input_fn
liftIO $ compileEmptyStub dflags hsc_env' basename location
return (RealPhase StopLn, o_file)
HscRecomp cgguts mod_summary
-> do output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env=hsc_env'} <- getPipeState
(outputFilename, mStub) <- liftIO $ hscGenHardCode hsc_env' cgguts mod_summary output_fn
case mStub of
Nothing -> return ()
Just stub_c ->
do stub_o <- liftIO $ compileStub hsc_env' stub_c
setStubO stub_o
return (RealPhase next_phase, outputFilename)
-----------------------------------------------------------------------------
-- Cmm phase
runPhase (RealPhase CmmCpp) input_fn dflags
= do
output_fn <- phaseOutputFilename Cmm
liftIO $ doCpp dflags False{-not raw-}
input_fn output_fn
return (RealPhase Cmm, output_fn)
runPhase (RealPhase Cmm) input_fn dflags
= do
let hsc_lang = hscTarget dflags
let next_phase = hscPostBackendPhase dflags HsSrcFile hsc_lang
output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env} <- getPipeState
liftIO $ hscCompileCmmFile hsc_env input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Cc phase
-- we don't support preprocessing .c files (with -E) now. Doing so introduces
-- way too many hacks, and I can't say I've ever used it anyway.
runPhase (RealPhase cc_phase) input_fn dflags
| any (cc_phase `eqPhase`) [Cc, Ccxx, HCc, Cobjc, Cobjcxx]
= do
let platform = targetPlatform dflags
hcc = cc_phase `eqPhase` HCc
let cmdline_include_paths = includePaths dflags
-- HC files have the dependent packages stamped into them
pkgs <- if hcc then liftIO $ getHCFilePackages input_fn else return []
-- add package include paths even if we're just compiling .c
-- files; this is the Value Add(TM) that using ghc instead of
-- gcc gives you :)
pkg_include_dirs <- liftIO $ getPackageIncludePath dflags pkgs
let include_paths = foldr (\ x xs -> ("-I" ++ x) : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let gcc_extra_viac_flags = extraGccViaCFlags dflags
let pic_c_flags = picCCOpts dflags
let verbFlags = getVerbFlags dflags
-- cc-options are not passed when compiling .hc files. Our
-- hc code doesn't not #include any header files anyway, so these
-- options aren't necessary.
pkg_extra_cc_opts <- liftIO $
if cc_phase `eqPhase` HCc
then return []
else getPackageExtraCcOpts dflags pkgs
framework_paths <-
if platformUsesFrameworks platform
then do pkgFrameworkPaths <- liftIO $ getPackageFrameworkPath dflags pkgs
let cmdlineFrameworkPaths = frameworkPaths dflags
return $ map ("-F"++)
(cmdlineFrameworkPaths ++ pkgFrameworkPaths)
else return []
let split_objs = gopt Opt_SplitObjs dflags
split_opt | hcc && split_objs = [ "-DUSE_SPLIT_MARKERS" ]
| otherwise = [ ]
let cc_opt | optLevel dflags >= 2 = [ "-O2" ]
| optLevel dflags >= 1 = [ "-O" ]
| otherwise = []
-- Decide next phase
let next_phase = As False
output_fn <- phaseOutputFilename next_phase
let
more_hcc_opts =
-- on x86 the floating point regs have greater precision
-- than a double, which leads to unpredictable results.
-- By default, we turn this off with -ffloat-store unless
-- the user specified -fexcess-precision.
(if platformArch platform == ArchX86 &&
not (gopt Opt_ExcessPrecision dflags)
then [ "-ffloat-store" ]
else []) ++
-- gcc's -fstrict-aliasing allows two accesses to memory
-- to be considered non-aliasing if they have different types.
-- This interacts badly with the C code we generate, which is
-- very weakly typed, being derived from C--.
["-fno-strict-aliasing"]
ghcVersionH <- liftIO $ getGhcVersionPathName dflags
let gcc_lang_opt | cc_phase `eqPhase` Ccxx = "c++"
| cc_phase `eqPhase` Cobjc = "objective-c"
| cc_phase `eqPhase` Cobjcxx = "objective-c++"
| otherwise = "c"
liftIO $ SysTools.runCc dflags (
-- force the C compiler to interpret this file as C when
-- compiling .hc files, by adding the -x c option.
-- Also useful for plain .c files, just in case GHC saw a
-- -x c option.
[ SysTools.Option "-x", SysTools.Option gcc_lang_opt
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
pic_c_flags
-- Stub files generated for foreign exports references the runIO_closure
-- and runNonIO_closure symbols, which are defined in the base package.
-- These symbols are imported into the stub.c file via RtsAPI.h, and the
-- way we do the import depends on whether we're currently compiling
-- the base package or not.
++ (if platformOS platform == OSMinGW32 &&
thisPackage dflags == baseUnitId
then [ "-DCOMPILING_BASE_PACKAGE" ]
else [])
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc) as GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack. See #2872, commit
-- 5bd3072ac30216a505151601884ac88bf404c9f2
++ (if platformArch platform == ArchSPARC
then ["-mcpu=v9"]
else [])
-- GCC 4.6+ doesn't like -Wimplicit when compiling C++.
++ (if (cc_phase /= Ccxx && cc_phase /= Cobjcxx)
then ["-Wimplicit"]
else [])
++ (if hcc
then gcc_extra_viac_flags ++ more_hcc_opts
else [])
++ verbFlags
++ [ "-S" ]
++ cc_opt
++ [ "-D__GLASGOW_HASKELL__="++cProjectVersionInt
, "-include", ghcVersionH
]
++ framework_paths
++ split_opt
++ include_paths
++ pkg_extra_cc_opts
))
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Splitting phase
runPhase (RealPhase Splitter) input_fn dflags
= do -- tmp_pfx is the prefix used for the split .s files
split_s_prefix <- liftIO $ SysTools.newTempName dflags "split"
let n_files_fn = split_s_prefix
liftIO $ SysTools.runSplit dflags
[ SysTools.FileOption "" input_fn
, SysTools.FileOption "" split_s_prefix
, SysTools.FileOption "" n_files_fn
]
-- Save the number of split files for future references
s <- liftIO $ readFile n_files_fn
let n_files = read s :: Int
dflags' = dflags { splitInfo = Just (split_s_prefix, n_files) }
setDynFlags dflags'
-- Remember to delete all these files
liftIO $ addFilesToClean dflags'
[ split_s_prefix ++ "__" ++ show n ++ ".s"
| n <- [1..n_files]]
return (RealPhase SplitAs,
"**splitter**") -- we don't use the filename in SplitAs
-----------------------------------------------------------------------------
-- As, SpitAs phase : Assembler
-- This is for calling the assembler on a regular assembly file (not split).
runPhase (RealPhase (As with_cpp)) input_fn dflags
= do
-- LLVM from version 3.0 onwards doesn't support the OS X system
-- assembler, so we use clang as the assembler instead. (#5636)
let whichAsProg | hscTarget dflags == HscLlvm &&
platformOS (targetPlatform dflags) == OSDarwin
= return SysTools.runClang
| otherwise = return SysTools.runAs
as_prog <- whichAsProg
let cmdline_include_paths = includePaths dflags
let pic_c_flags = picCCOpts dflags
next_phase <- maybeMergeStub
output_fn <- phaseOutputFilename next_phase
-- we create directories for the object file, because it
-- might be a hierarchical module.
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
ccInfo <- liftIO $ getCompilerInfo dflags
let runAssembler inputFilename outputFilename
= liftIO $ as_prog dflags
([ SysTools.Option ("-I" ++ p) | p <- cmdline_include_paths ]
-- See Note [-fPIC for assembler]
++ map SysTools.Option pic_c_flags
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else [])
++ (if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [SysTools.Option "-Qunused-arguments"]
else [])
++ [ SysTools.Option "-x"
, if with_cpp
then SysTools.Option "assembler-with-cpp"
else SysTools.Option "assembler"
, SysTools.Option "-c"
, SysTools.FileOption "" inputFilename
, SysTools.Option "-o"
, SysTools.FileOption "" outputFilename
])
liftIO $ debugTraceMsg dflags 4 (text "Running the assembler")
runAssembler input_fn output_fn
return (RealPhase next_phase, output_fn)
-- This is for calling the assembler on a split assembly file (so a collection
-- of assembly files)
runPhase (RealPhase SplitAs) _input_fn dflags
= do
-- we'll handle the stub_o file in this phase, so don't MergeStub,
-- just jump straight to StopLn afterwards.
let next_phase = StopLn
output_fn <- phaseOutputFilename next_phase
let base_o = dropExtension output_fn
osuf = objectSuf dflags
split_odir = base_o ++ "_" ++ osuf ++ "_split"
let pic_c_flags = picCCOpts dflags
-- this also creates the hierarchy
liftIO $ createDirectoryIfMissing True split_odir
-- remove M_split/ *.o, because we're going to archive M_split/ *.o
-- later and we don't want to pick up any old objects.
fs <- liftIO $ getDirectoryContents split_odir
liftIO $ mapM_ removeFile $
map (split_odir </>) $ filter (osuf `isSuffixOf`) fs
let (split_s_prefix, n) = case splitInfo dflags of
Nothing -> panic "No split info"
Just x -> x
let split_s n = split_s_prefix ++ "__" ++ show n <.> "s"
split_obj :: Int -> FilePath
split_obj n = split_odir </>
takeFileName base_o ++ "__" ++ show n <.> osuf
let assemble_file n
= SysTools.runAs dflags (
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
(if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else []) ++
-- See Note [-fPIC for assembler]
map SysTools.Option pic_c_flags ++
[ SysTools.Option "-c"
, SysTools.Option "-o"
, SysTools.FileOption "" (split_obj n)
, SysTools.FileOption "" (split_s n)
])
liftIO $ mapM_ assemble_file [1..n]
-- Note [pipeline-split-init]
-- If we have a stub file, it may contain constructor
-- functions for initialisation of this module. We can't
-- simply leave the stub as a separate object file, because it
-- will never be linked in: nothing refers to it. We need to
-- ensure that if we ever refer to the data in this module
-- that needs initialisation, then we also pull in the
-- initialisation routine.
--
-- To that end, we make a DANGEROUS ASSUMPTION here: the data
-- that needs to be initialised is all in the FIRST split
-- object. See Note [codegen-split-init].
PipeState{maybe_stub_o} <- getPipeState
case maybe_stub_o of
Nothing -> return ()
Just stub_o -> liftIO $ do
tmp_split_1 <- newTempName dflags osuf
let split_1 = split_obj 1
copyFile split_1 tmp_split_1
removeFile split_1
joinObjectFiles dflags [tmp_split_1, stub_o] split_1
-- join them into a single .o file
liftIO $ joinObjectFiles dflags (map split_obj [1..n]) output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- LlvmOpt phase
runPhase (RealPhase LlvmOpt) input_fn dflags
= do
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- don't specify anything if user has specified commands. We do this
-- for opt but not llc since opt is very specifically for optimisation
-- passes only, so if the user is passing us extra options we assume
-- they know what they are doing and don't get in the way.
optFlag = if null (getOpts dflags opt_lo)
then map SysTools.Option $ words (llvmOpts !! opt_lvl)
else []
tbaa | gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
output_fn <- phaseOutputFilename LlvmLlc
liftIO $ SysTools.runLlvmOpt dflags
([ SysTools.FileOption "" input_fn,
SysTools.Option "-o",
SysTools.FileOption "" output_fn]
++ optFlag
++ [SysTools.Option tbaa])
return (RealPhase LlvmLlc, output_fn)
where
-- we always (unless -optlo specified) run Opt since we rely on it to
-- fix up some pretty big deficiencies in the code we generate
llvmOpts = [ "-mem2reg -globalopt"
, "-O1 -globalopt"
, "-O2"
]
-----------------------------------------------------------------------------
-- LlvmLlc phase
runPhase (RealPhase LlvmLlc) input_fn dflags
= do
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- iOS requires external references to be loaded indirectly from the
-- DATA segment or dyld traps at runtime writing into TEXT: see #7722
rmodel | platformOS (targetPlatform dflags) == OSiOS = "dynamic-no-pic"
| gopt Opt_PIC dflags = "pic"
| not (gopt Opt_Static dflags) = "dynamic-no-pic"
| otherwise = "static"
tbaa | gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
-- hidden debugging flag '-dno-llvm-mangler' to skip mangling
let next_phase = case gopt Opt_NoLlvmMangler dflags of
False -> LlvmMangle
True | gopt Opt_SplitObjs dflags -> Splitter
True -> As False
output_fn <- phaseOutputFilename next_phase
liftIO $ SysTools.runLlvmLlc dflags
([ SysTools.Option (llvmOpts !! opt_lvl),
SysTools.Option $ "-relocation-model=" ++ rmodel,
SysTools.FileOption "" input_fn,
SysTools.Option "-o", SysTools.FileOption "" output_fn]
++ [SysTools.Option tbaa]
++ map SysTools.Option fpOpts
++ map SysTools.Option abiOpts
++ map SysTools.Option sseOpts
++ map SysTools.Option avxOpts
++ map SysTools.Option avx512Opts
++ map SysTools.Option stackAlignOpts)
return (RealPhase next_phase, output_fn)
where
-- Bug in LLVM at O3 on OSX.
llvmOpts = if platformOS (targetPlatform dflags) == OSDarwin
then ["-O1", "-O2", "-O2"]
else ["-O1", "-O2", "-O3"]
-- On ARMv7 using LLVM, LLVM fails to allocate floating point registers
-- while compiling GHC source code. It's probably due to fact that it
-- does not enable VFP by default. Let's do this manually here
fpOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 ext _ -> if (elem VFPv3 ext)
then ["-mattr=+v7,+vfp3"]
else if (elem VFPv3D16 ext)
then ["-mattr=+v7,+vfp3,+d16"]
else []
ArchARM ARMv6 ext _ -> if (elem VFPv2 ext)
then ["-mattr=+v6,+vfp2"]
else ["-mattr=+v6"]
_ -> []
-- On Ubuntu/Debian with ARM hard float ABI, LLVM's llc still
-- compiles into soft-float ABI. We need to explicitly set abi
-- to hard
abiOpts = case platformArch (targetPlatform dflags) of
ArchARM _ _ HARD -> ["-float-abi=hard"]
ArchARM _ _ _ -> []
_ -> []
sseOpts | isSse4_2Enabled dflags = ["-mattr=+sse42"]
| isSse2Enabled dflags = ["-mattr=+sse2"]
| isSseEnabled dflags = ["-mattr=+sse"]
| otherwise = []
avxOpts | isAvx512fEnabled dflags = ["-mattr=+avx512f"]
| isAvx2Enabled dflags = ["-mattr=+avx2"]
| isAvxEnabled dflags = ["-mattr=+avx"]
| otherwise = []
avx512Opts =
[ "-mattr=+avx512cd" | isAvx512cdEnabled dflags ] ++
[ "-mattr=+avx512er" | isAvx512erEnabled dflags ] ++
[ "-mattr=+avx512pf" | isAvx512pfEnabled dflags ]
stackAlignOpts =
case platformArch (targetPlatform dflags) of
ArchX86_64 | isAvxEnabled dflags -> ["-stack-alignment=32"]
_ -> []
-----------------------------------------------------------------------------
-- LlvmMangle phase
runPhase (RealPhase LlvmMangle) input_fn dflags
= do
let next_phase = if gopt Opt_SplitObjs dflags then Splitter else As False
output_fn <- phaseOutputFilename next_phase
liftIO $ llvmFixupAsm dflags input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- merge in stub objects
runPhase (RealPhase MergeStub) input_fn dflags
= do
PipeState{maybe_stub_o} <- getPipeState
output_fn <- phaseOutputFilename StopLn
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
case maybe_stub_o of
Nothing ->
panic "runPhase(MergeStub): no stub"
Just stub_o -> do
liftIO $ joinObjectFiles dflags [input_fn, stub_o] output_fn
return (RealPhase StopLn, output_fn)
-- warning suppression
runPhase (RealPhase other) _input_fn _dflags =
panic ("runPhase: don't know how to run phase " ++ show other)
maybeMergeStub :: CompPipeline Phase
maybeMergeStub
= do
PipeState{maybe_stub_o} <- getPipeState
if isJust maybe_stub_o then return MergeStub else return StopLn
getLocation :: HscSource -> ModuleName -> CompPipeline ModLocation
getLocation src_flavour mod_name = do
dflags <- getDynFlags
PipeEnv{ src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- Build a ModLocation to pass to hscMain.
-- The source filename is rather irrelevant by now, but it's used
-- by hscMain for messages. hscMain also needs
-- the .hi and .o filenames, and this is as good a way
-- as any to generate them, and better than most. (e.g. takes
-- into account the -osuf flags)
location1 <- liftIO $ mkHomeModLocation2 dflags mod_name basename suff
-- Boot-ify it if necessary
let location2 | HsBootFile <- src_flavour = addBootSuffixLocn location1
| otherwise = location1
-- Take -ohi into account if present
-- This can't be done in mkHomeModuleLocation because
-- it only applies to the module being compiles
let ohi = outputHi dflags
location3 | Just fn <- ohi = location2{ ml_hi_file = fn }
| otherwise = location2
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
let expl_o_file = outputFile dflags
location4 | Just ofile <- expl_o_file
, isNoLink (ghcLink dflags)
= location3 { ml_obj_file = ofile }
| otherwise = location3
return location4
mkExtraObj :: DynFlags -> Suffix -> String -> IO FilePath
mkExtraObj dflags extn xs
= do cFile <- newTempName dflags extn
oFile <- newTempName dflags "o"
writeFile cFile xs
let rtsDetails = getPackageDetails dflags rtsUnitId
pic_c_flags = picCCOpts dflags
SysTools.runCc dflags
([Option "-c",
FileOption "" cFile,
Option "-o",
FileOption "" oFile]
++ map (FileOption "-I") (includeDirs rtsDetails)
++ map Option pic_c_flags)
return oFile
-- When linking a binary, we need to create a C main() function that
-- starts everything off. This used to be compiled statically as part
-- of the RTS, but that made it hard to change the -rtsopts setting,
-- so now we generate and compile a main() stub as part of every
-- binary and pass the -rtsopts setting directly to the RTS (#5373)
--
mkExtraObjToLinkIntoBinary :: DynFlags -> IO FilePath
mkExtraObjToLinkIntoBinary dflags = do
when (gopt Opt_NoHsMain dflags && haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -no-hs-main." $$
text " Call hs_init_ghc() from your main() function to set these options.")
mkExtraObj dflags "c" (showSDoc dflags main)
where
main
| gopt Opt_NoHsMain dflags = Outputable.empty
| otherwise = vcat [
text "#include \"Rts.h\"",
text "extern StgClosure ZCMain_main_closure;",
text "int main(int argc, char *argv[])",
char '{',
text " RtsConfig __conf = defaultRtsConfig;",
text " __conf.rts_opts_enabled = "
<> text (show (rtsOptsEnabled dflags)) <> semi,
text " __conf.rts_opts_suggestions = "
<> text (if rtsOptsSuggestions dflags
then "rtsTrue"
else "rtsFalse") <> semi,
case rtsOpts dflags of
Nothing -> Outputable.empty
Just opts -> ptext (sLit " __conf.rts_opts= ") <>
text (show opts) <> semi,
text " __conf.rts_hs_main = rtsTrue;",
text " return hs_main(argc,argv,&ZCMain_main_closure,__conf);",
char '}',
char '\n' -- final newline, to keep gcc happy
]
-- Write out the link info section into a new assembly file. Previously
-- this was included as inline assembly in the main.c file but this
-- is pretty fragile. gas gets upset trying to calculate relative offsets
-- that span the .note section (notably .text) when debug info is present
mkNoteObjsToLinkIntoBinary :: DynFlags -> [UnitId] -> IO [FilePath]
mkNoteObjsToLinkIntoBinary dflags dep_packages = do
link_info <- getLinkInfo dflags dep_packages
if (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
then fmap (:[]) $ mkExtraObj dflags "s" (showSDoc dflags (link_opts link_info))
else return []
where
link_opts info = hcat [
text "\t.section ", text ghcLinkInfoSectionName,
text ",\"\",",
text elfSectionNote,
text "\n",
text "\t.ascii \"", info', text "\"\n",
-- ALL generated assembly must have this section to disable
-- executable stacks. See also
-- compiler/nativeGen/AsmCodeGen.hs for another instance
-- where we need to do this.
(if platformHasGnuNonexecStack (targetPlatform dflags)
then text ".section .note.GNU-stack,\"\",@progbits\n"
else Outputable.empty)
]
where
info' = text $ escape info
escape :: String -> String
escape = concatMap (charToC.fromIntegral.ord)
elfSectionNote :: String
elfSectionNote = case platformArch (targetPlatform dflags) of
ArchARM _ _ _ -> "%note"
_ -> "@note"
-- The "link info" is a string representing the parameters of the
-- link. We save this information in the binary, and the next time we
-- link, if nothing else has changed, we use the link info stored in
-- the existing binary to decide whether to re-link or not.
getLinkInfo :: DynFlags -> [UnitId] -> IO String
getLinkInfo dflags dep_packages = do
package_link_opts <- getPackageLinkOpts dflags dep_packages
pkg_frameworks <- if platformUsesFrameworks (targetPlatform dflags)
then getPackageFrameworks dflags dep_packages
else return []
let extra_ld_inputs = ldInputs dflags
let
link_info = (package_link_opts,
pkg_frameworks,
rtsOpts dflags,
rtsOptsEnabled dflags,
gopt Opt_NoHsMain dflags,
map showOpt extra_ld_inputs,
getOpts dflags opt_l)
--
return (show link_info)
-----------------------------------------------------------------------------
-- Look for the /* GHC_PACKAGES ... */ comment at the top of a .hc file
getHCFilePackages :: FilePath -> IO [UnitId]
getHCFilePackages filename =
Exception.bracket (openFile filename ReadMode) hClose $ \h -> do
l <- hGetLine h
case l of
'/':'*':' ':'G':'H':'C':'_':'P':'A':'C':'K':'A':'G':'E':'S':rest ->
return (map stringToUnitId (words rest))
_other ->
return []
-----------------------------------------------------------------------------
-- Static linking, of .o files
-- The list of packages passed to link is the list of packages on
-- which this program depends, as discovered by the compilation
-- manager. It is combined with the list of packages that the user
-- specifies on the command line with -package flags.
--
-- In one-shot linking mode, we can't discover the package
-- dependencies (because we haven't actually done any compilation or
-- read any interface files), so the user must explicitly specify all
-- the packages.
linkBinary :: DynFlags -> [FilePath] -> [UnitId] -> IO ()
linkBinary = linkBinary' False
linkBinary' :: Bool -> DynFlags -> [FilePath] -> [UnitId] -> IO ()
linkBinary' staticLink dflags o_files dep_packages = do
let platform = targetPlatform dflags
mySettings = settings dflags
verbFlags = getVerbFlags dflags
output_fn = exeFileName staticLink dflags
-- get the full list of packages to link with, by combining the
-- explicit packages with the auto packages and all of their
-- dependencies, and eliminating duplicates.
full_output_fn <- if isAbsolute output_fn
then return output_fn
else do d <- getCurrentDirectory
return $ normalise (d </> output_fn)
pkg_lib_paths <- getPackageLibraryPath dflags dep_packages
let pkg_lib_path_opts = concatMap get_pkg_lib_path_opts pkg_lib_paths
get_pkg_lib_path_opts l
| osElfTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags)
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "$ORIGIN" </>
(l `makeRelativeTo` full_output_fn)
else l
rpath = if gopt Opt_RPath dflags
then ["-Wl,-rpath", "-Wl," ++ libpath]
else []
-- Solaris 11's linker does not support -rpath-link option. It silently
-- ignores it and then complains about next option which is -l<some
-- dir> as being a directory and not expected object file, E.g
-- ld: elf error: file
-- /tmp/ghc-src/libraries/base/dist-install/build:
-- elf_begin: I/O error: region read: Is a directory
rpathlink = if (platformOS platform) == OSSolaris2
then []
else ["-Wl,-rpath-link", "-Wl," ++ l]
in ["-L" ++ l] ++ rpathlink ++ rpath
| osMachOTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags) &&
gopt Opt_RPath dflags
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "@loader_path" </>
(l `makeRelativeTo` full_output_fn)
else l
in ["-L" ++ l] ++ ["-Wl,-rpath", "-Wl," ++ libpath]
| otherwise = ["-L" ++ l]
let lib_paths = libraryPaths dflags
let lib_path_opts = map ("-L"++) lib_paths
extraLinkObj <- mkExtraObjToLinkIntoBinary dflags
noteLinkObjs <- mkNoteObjsToLinkIntoBinary dflags dep_packages
pkg_link_opts <- do
(package_hs_libs, extra_libs, other_flags) <- getPackageLinkOpts dflags dep_packages
return $ if staticLink
then package_hs_libs -- If building an executable really means making a static
-- library (e.g. iOS), then we only keep the -l options for
-- HS packages, because libtool doesn't accept other options.
-- In the case of iOS these need to be added by hand to the
-- final link in Xcode.
else other_flags ++ package_hs_libs ++ extra_libs -- -Wl,-u,<sym> contained in other_flags
-- needs to be put before -l<package>,
-- otherwise Solaris linker fails linking
-- a binary with unresolved symbols in RTS
-- which are defined in base package
-- the reason for this is a note in ld(1) about
-- '-u' option: "The placement of this option
-- on the command line is significant.
-- This option must be placed before the library
-- that defines the symbol."
-- frameworks
pkg_framework_opts <- getPkgFrameworkOpts dflags platform dep_packages
let framework_opts = getFrameworkOpts dflags platform
-- probably _stub.o files
let extra_ld_inputs = ldInputs dflags
-- Here are some libs that need to be linked at the *end* of
-- the command line, because they contain symbols that are referred to
-- by the RTS. We can't therefore use the ordinary way opts for these.
let
debug_opts | WayDebug `elem` ways dflags = [
#if defined(HAVE_LIBBFD)
"-lbfd", "-liberty"
#endif
]
| otherwise = []
let thread_opts
| WayThreaded `elem` ways dflags =
let os = platformOS (targetPlatform dflags)
in if os == OSOsf3 then ["-lpthread", "-lexc"]
else if os `elem` [OSMinGW32, OSFreeBSD, OSOpenBSD,
OSNetBSD, OSHaiku, OSQNXNTO, OSiOS, OSDarwin]
then []
else ["-lpthread"]
| otherwise = []
rc_objs <- maybeCreateManifest dflags output_fn
let link = if staticLink
then SysTools.runLibtool
else SysTools.runLink
link dflags (
map SysTools.Option verbFlags
++ [ SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
[]
-- Permit the linker to auto link _symbol to _imp_symbol.
-- This lets us link against DLLs without needing an "import library".
++ (if platformOS platform == OSMinGW32
then ["-Wl,--enable-auto-import"]
else [])
-- '-no_compact_unwind'
-- C++/Objective-C exceptions cannot use optimised
-- stack unwinding code. The optimised form is the
-- default in Xcode 4 on at least x86_64, and
-- without this flag we're also seeing warnings
-- like
-- ld: warning: could not create compact unwind for .LFB3: non-standard register 5 being saved in prolog
-- on x86.
++ (if sLdSupportsCompactUnwind mySettings &&
not staticLink &&
(platformOS platform == OSDarwin || platformOS platform == OSiOS) &&
case platformArch platform of
ArchX86 -> True
ArchX86_64 -> True
ArchARM {} -> True
ArchARM64 -> True
_ -> False
then ["-Wl,-no_compact_unwind"]
else [])
-- '-no_pie'
-- iOS uses 'dynamic-no-pic', so we must pass this to ld to suppress a warning; see #7722
++ (if platformOS platform == OSiOS &&
not staticLink
then ["-Wl,-no_pie"]
else [])
-- '-Wl,-read_only_relocs,suppress'
-- ld gives loads of warnings like:
-- ld: warning: text reloc in _base_GHCziArr_unsafeArray_info to _base_GHCziArr_unsafeArray_closure
-- when linking any program. We're not sure
-- whether this is something we ought to fix, but
-- for now this flags silences them.
++ (if platformOS platform == OSDarwin &&
platformArch platform == ArchX86 &&
not staticLink
then ["-Wl,-read_only_relocs,suppress"]
else [])
++ o_files
++ lib_path_opts)
++ extra_ld_inputs
++ map SysTools.Option (
rc_objs
++ framework_opts
++ pkg_lib_path_opts
++ extraLinkObj:noteLinkObjs
++ pkg_link_opts
++ pkg_framework_opts
++ debug_opts
++ thread_opts
))
exeFileName :: Bool -> DynFlags -> FilePath
exeFileName staticLink dflags
| Just s <- outputFile dflags =
case platformOS (targetPlatform dflags) of
OSMinGW32 -> s <?.> "exe"
_ -> if staticLink
then s <?.> "a"
else s
| otherwise =
if platformOS (targetPlatform dflags) == OSMinGW32
then "main.exe"
else if staticLink
then "liba.a"
else "a.out"
where s <?.> ext | null (takeExtension s) = s <.> ext
| otherwise = s
maybeCreateManifest
:: DynFlags
-> FilePath -- filename of executable
-> IO [FilePath] -- extra objects to embed, maybe
maybeCreateManifest dflags exe_filename
| platformOS (targetPlatform dflags) == OSMinGW32 &&
gopt Opt_GenManifest dflags
= do let manifest_filename = exe_filename <.> "manifest"
writeFile manifest_filename $
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n"++
" <assembly xmlns=\"urn:schemas-microsoft-com:asm.v1\" manifestVersion=\"1.0\">\n"++
" <assemblyIdentity version=\"1.0.0.0\"\n"++
" processorArchitecture=\"X86\"\n"++
" name=\"" ++ dropExtension exe_filename ++ "\"\n"++
" type=\"win32\"/>\n\n"++
" <trustInfo xmlns=\"urn:schemas-microsoft-com:asm.v3\">\n"++
" <security>\n"++
" <requestedPrivileges>\n"++
" <requestedExecutionLevel level=\"asInvoker\" uiAccess=\"false\"/>\n"++
" </requestedPrivileges>\n"++
" </security>\n"++
" </trustInfo>\n"++
"</assembly>\n"
-- Windows will find the manifest file if it is named
-- foo.exe.manifest. However, for extra robustness, and so that
-- we can move the binary around, we can embed the manifest in
-- the binary itself using windres:
if not (gopt Opt_EmbedManifest dflags) then return [] else do
rc_filename <- newTempName dflags "rc"
rc_obj_filename <- newTempName dflags (objectSuf dflags)
writeFile rc_filename $
"1 24 MOVEABLE PURE " ++ show manifest_filename ++ "\n"
-- magic numbers :-)
-- show is a bit hackish above, but we need to escape the
-- backslashes in the path.
runWindres dflags $ map SysTools.Option $
["--input="++rc_filename,
"--output="++rc_obj_filename,
"--output-format=coff"]
-- no FileOptions here: windres doesn't like seeing
-- backslashes, apparently
removeFile manifest_filename
return [rc_obj_filename]
| otherwise = return []
linkDynLibCheck :: DynFlags -> [String] -> [UnitId] -> IO ()
linkDynLibCheck dflags o_files dep_packages
= do
when (haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -shared." $$
text " Call hs_init_ghc() from your main() function to set these options.")
linkDynLib dflags o_files dep_packages
linkStaticLibCheck :: DynFlags -> [String] -> [UnitId] -> IO ()
linkStaticLibCheck dflags o_files dep_packages
= do
when (platformOS (targetPlatform dflags) `notElem` [OSiOS, OSDarwin]) $
throwGhcExceptionIO (ProgramError "Static archive creation only supported on Darwin/OS X/iOS")
linkBinary' True dflags o_files dep_packages
-- -----------------------------------------------------------------------------
-- Running CPP
doCpp :: DynFlags -> Bool -> FilePath -> FilePath -> IO ()
doCpp dflags raw input_fn output_fn = do
let hscpp_opts = picPOpts dflags
let cmdline_include_paths = includePaths dflags
pkg_include_dirs <- getPackageIncludePath dflags []
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let verbFlags = getVerbFlags dflags
let cpp_prog args | raw = SysTools.runCpp dflags args
| otherwise = SysTools.runCc dflags (SysTools.Option "-E" : args)
let target_defs =
[ "-D" ++ HOST_OS ++ "_BUILD_OS=1",
"-D" ++ HOST_ARCH ++ "_BUILD_ARCH=1",
"-D" ++ TARGET_OS ++ "_HOST_OS=1",
"-D" ++ TARGET_ARCH ++ "_HOST_ARCH=1" ]
-- remember, in code we *compile*, the HOST is the same our TARGET,
-- and BUILD is the same as our HOST.
let sse_defs =
[ "-D__SSE__=1" | isSseEnabled dflags ] ++
[ "-D__SSE2__=1" | isSse2Enabled dflags ] ++
[ "-D__SSE4_2__=1" | isSse4_2Enabled dflags ]
let avx_defs =
[ "-D__AVX__=1" | isAvxEnabled dflags ] ++
[ "-D__AVX2__=1" | isAvx2Enabled dflags ] ++
[ "-D__AVX512CD__=1" | isAvx512cdEnabled dflags ] ++
[ "-D__AVX512ER__=1" | isAvx512erEnabled dflags ] ++
[ "-D__AVX512F__=1" | isAvx512fEnabled dflags ] ++
[ "-D__AVX512PF__=1" | isAvx512pfEnabled dflags ]
backend_defs <- getBackendDefs dflags
#ifdef GHCI
let th_defs = [ "-D__GLASGOW_HASKELL_TH__=YES" ]
#else
let th_defs = [ "-D__GLASGOW_HASKELL_TH__=NO" ]
#endif
-- Default CPP defines in Haskell source
ghcVersionH <- getGhcVersionPathName dflags
let hsSourceCppOpts =
[ "-D__GLASGOW_HASKELL__="++cProjectVersionInt
, "-include", ghcVersionH
]
cpp_prog ( map SysTools.Option verbFlags
++ map SysTools.Option include_paths
++ map SysTools.Option hsSourceCppOpts
++ map SysTools.Option target_defs
++ map SysTools.Option backend_defs
++ map SysTools.Option th_defs
++ map SysTools.Option hscpp_opts
++ map SysTools.Option sse_defs
++ map SysTools.Option avx_defs
-- Set the language mode to assembler-with-cpp when preprocessing. This
-- alleviates some of the C99 macro rules relating to whitespace and the hash
-- operator, which we tend to abuse. Clang in particular is not very happy
-- about this.
++ [ SysTools.Option "-x"
, SysTools.Option "assembler-with-cpp"
, SysTools.Option input_fn
-- We hackily use Option instead of FileOption here, so that the file
-- name is not back-slashed on Windows. cpp is capable of
-- dealing with / in filenames, so it works fine. Furthermore
-- if we put in backslashes, cpp outputs #line directives
-- with *double* backslashes. And that in turn means that
-- our error messages get double backslashes in them.
-- In due course we should arrange that the lexer deals
-- with these \\ escapes properly.
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
getBackendDefs :: DynFlags -> IO [String]
getBackendDefs dflags | hscTarget dflags == HscLlvm = do
llvmVer <- figureLlvmVersion dflags
return $ case llvmVer of
Just n -> [ "-D__GLASGOW_HASKELL_LLVM__="++show n ]
_ -> []
getBackendDefs _ =
return []
-- ---------------------------------------------------------------------------
-- join object files into a single relocatable object file, using ld -r
joinObjectFiles :: DynFlags -> [FilePath] -> FilePath -> IO ()
joinObjectFiles dflags o_files output_fn = do
let mySettings = settings dflags
ldIsGnuLd = sLdIsGnuLd mySettings
osInfo = platformOS (targetPlatform dflags)
ld_r args cc = SysTools.runLink dflags ([
SysTools.Option "-nostdlib",
SysTools.Option "-Wl,-r"
]
++ (if any (cc ==) [Clang, AppleClang, AppleClang51]
then []
else [SysTools.Option "-nodefaultlibs"])
++ (if osInfo == OSFreeBSD
then [SysTools.Option "-L/usr/lib"]
else [])
-- gcc on sparc sets -Wl,--relax implicitly, but
-- -r and --relax are incompatible for ld, so
-- disable --relax explicitly.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
&& ldIsGnuLd
then [SysTools.Option "-Wl,-no-relax"]
else [])
++ map SysTools.Option ld_build_id
++ [ SysTools.Option "-o",
SysTools.FileOption "" output_fn ]
++ args)
-- suppress the generation of the .note.gnu.build-id section,
-- which we don't need and sometimes causes ld to emit a
-- warning:
ld_build_id | sLdSupportsBuildId mySettings = ["-Wl,--build-id=none"]
| otherwise = []
ccInfo <- getCompilerInfo dflags
if ldIsGnuLd
then do
script <- newTempName dflags "ldscript"
cwd <- getCurrentDirectory
let o_files_abs = map (cwd </>) o_files
writeFile script $ "INPUT(" ++ unwords o_files_abs ++ ")"
ld_r [SysTools.FileOption "" script] ccInfo
else if sLdSupportsFilelist mySettings
then do
filelist <- newTempName dflags "filelist"
writeFile filelist $ unlines o_files
ld_r [SysTools.Option "-Wl,-filelist",
SysTools.FileOption "-Wl," filelist] ccInfo
else do
ld_r (map (SysTools.FileOption "") o_files) ccInfo
-- -----------------------------------------------------------------------------
-- Misc.
writeInterfaceOnlyMode :: DynFlags -> Bool
writeInterfaceOnlyMode dflags =
gopt Opt_WriteInterface dflags &&
HscNothing == hscTarget dflags
-- | What phase to run after one of the backend code generators has run
hscPostBackendPhase :: DynFlags -> HscSource -> HscTarget -> Phase
hscPostBackendPhase _ HsBootFile _ = StopLn
hscPostBackendPhase _ HsBootMerge _ = StopLn
hscPostBackendPhase dflags _ hsc_lang =
case hsc_lang of
HscC -> HCc
HscAsm | gopt Opt_SplitObjs dflags -> Splitter
| otherwise -> As False
HscLlvm -> LlvmOpt
HscNothing -> StopLn
HscInterpreted -> StopLn
touchObjectFile :: DynFlags -> FilePath -> IO ()
touchObjectFile dflags path = do
createDirectoryIfMissing True $ takeDirectory path
SysTools.touch dflags "Touching object file" path
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
-- | Find out path to @ghcversion.h@ file
getGhcVersionPathName :: DynFlags -> IO FilePath
getGhcVersionPathName dflags = do
dirs <- getPackageIncludePath dflags [rtsUnitId]
found <- filterM doesFileExist (map (</> "ghcversion.h") dirs)
case found of
[] -> throwGhcExceptionIO (InstallationError ("ghcversion.h missing"))
(x:_) -> return x
-- Note [-fPIC for assembler]
-- When compiling .c source file GHC's driver pipeline basically
-- does the following two things:
-- 1. ${CC} -S 'PIC_CFLAGS' source.c
-- 2. ${CC} -x assembler -c 'PIC_CFLAGS' source.S
--
-- Why do we need to pass 'PIC_CFLAGS' both to C compiler and assembler?
-- Because on some architectures (at least sparc32) assembler also chooses
-- the relocation type!
-- Consider the following C module:
--
-- /* pic-sample.c */
-- int v;
-- void set_v (int n) { v = n; }
-- int get_v (void) { return v; }
--
-- $ gcc -S -fPIC pic-sample.c
-- $ gcc -c pic-sample.s -o pic-sample.no-pic.o # incorrect binary
-- $ gcc -c -fPIC pic-sample.s -o pic-sample.pic.o # correct binary
--
-- $ objdump -r -d pic-sample.pic.o > pic-sample.pic.o.od
-- $ objdump -r -d pic-sample.no-pic.o > pic-sample.no-pic.o.od
-- $ diff -u pic-sample.pic.o.od pic-sample.no-pic.o.od
--
-- Most of architectures won't show any difference in this test, but on sparc32
-- the following assembly snippet:
--
-- sethi %hi(_GLOBAL_OFFSET_TABLE_-8), %l7
--
-- generates two kinds or relocations, only 'R_SPARC_PC22' is correct:
--
-- 3c: 2f 00 00 00 sethi %hi(0), %l7
-- - 3c: R_SPARC_PC22 _GLOBAL_OFFSET_TABLE_-0x8
-- + 3c: R_SPARC_HI22 _GLOBAL_OFFSET_TABLE_-0x8
{- Note [Don't normalise input filenames]
Summary
We used to normalise input filenames when starting the unlit phase. This
broke hpc in `--make` mode with imported literate modules (#2991).
Introduction
1) --main
When compiling a module with --main, GHC scans its imports to find out which
other modules it needs to compile too. It turns out that there is a small
difference between saying `ghc --make A.hs`, when `A` imports `B`, and
specifying both modules on the command line with `ghc --make A.hs B.hs`. In
the former case, the filename for B is inferred to be './B.hs' instead of
'B.hs'.
2) unlit
When GHC compiles a literate haskell file, the source code first needs to go
through unlit, which turns it into normal Haskell source code. At the start
of the unlit phase, in `Driver.Pipeline.runPhase`, we call unlit with the
option `-h` and the name of the original file. We used to normalise this
filename using System.FilePath.normalise, which among other things removes
an initial './'. unlit then uses that filename in #line directives that it
inserts in the transformed source code.
3) SrcSpan
A SrcSpan represents a portion of a source code file. It has fields
linenumber, start column, end column, and also a reference to the file it
originated from. The SrcSpans for a literate haskell file refer to the
filename that was passed to unlit -h.
4) -fhpc
At some point during compilation with -fhpc, in the function
`deSugar.Coverage.isGoodTickSrcSpan`, we compare the filename that a
`SrcSpan` refers to with the name of the file we are currently compiling.
For some reason I don't yet understand, they can sometimes legitimally be
different, and then hpc ignores that SrcSpan.
Problem
When running `ghc --make -fhpc A.hs`, where `A.hs` imports the literate
module `B.lhs`, `B` is inferred to be in the file `./B.lhs` (1). At the
start of the unlit phase, the name `./B.lhs` is normalised to `B.lhs` (2).
Therefore the SrcSpans of `B` refer to the file `B.lhs` (3), but we are
still compiling `./B.lhs`. Hpc thinks these two filenames are different (4),
doesn't include ticks for B, and we have unhappy customers (#2991).
Solution
Do not normalise `input_fn` when starting the unlit phase.
Alternative solution
Another option would be to not compare the two filenames on equality, but to
use System.FilePath.equalFilePath. That function first normalises its
arguments. The problem is that by the time we need to do the comparison, the
filenames have been turned into FastStrings, probably for performance
reasons, so System.FilePath.equalFilePath can not be used directly.
Archeology
The call to `normalise` was added in a commit called "Fix slash
direction on Windows with the new filePath code" (c9b6b5e8). The problem
that commit was addressing has since been solved in a different manner, in a
commit called "Fix the filename passed to unlit" (1eedbc6b). So the
`normalise` is no longer necessary.
-}
| siddhanathan/ghc | compiler/main/DriverPipeline.hs | bsd-3-clause | 99,249 | 0 | 31 | 33,789 | 15,985 | 8,080 | 7,905 | 1,367 | 43 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.