code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Route53.GetHealthCheckLastFailureReason
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | If you want to learn why a health check is currently failing or why it failed
-- most recently (if at all), you can get the failure reason for the most recent
-- failure. Send a 'GET' request to the '2013-04-01/healthcheck//health check ID//lastfailurereason
-- resource.
--
-- <http://docs.aws.amazon.com/Route53/latest/APIReference/API_GetHealthCheckLastFailureReason.html>
module Network.AWS.Route53.GetHealthCheckLastFailureReason
(
-- * Request
GetHealthCheckLastFailureReason
-- ** Request constructor
, getHealthCheckLastFailureReason
-- ** Request lenses
, ghclfrHealthCheckId
-- * Response
, GetHealthCheckLastFailureReasonResponse
-- ** Response constructor
, getHealthCheckLastFailureReasonResponse
-- ** Response lenses
, ghclfrrHealthCheckObservations
) where
import Network.AWS.Prelude
import Network.AWS.Request.RestXML
import Network.AWS.Route53.Types
import qualified GHC.Exts
newtype GetHealthCheckLastFailureReason = GetHealthCheckLastFailureReason
{ _ghclfrHealthCheckId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'GetHealthCheckLastFailureReason' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ghclfrHealthCheckId' @::@ 'Text'
--
getHealthCheckLastFailureReason :: Text -- ^ 'ghclfrHealthCheckId'
-> GetHealthCheckLastFailureReason
getHealthCheckLastFailureReason p1 = GetHealthCheckLastFailureReason
{ _ghclfrHealthCheckId = p1
}
-- | The ID of the health check for which you want to retrieve the reason for the
-- most recent failure.
ghclfrHealthCheckId :: Lens' GetHealthCheckLastFailureReason Text
ghclfrHealthCheckId =
lens _ghclfrHealthCheckId (\s a -> s { _ghclfrHealthCheckId = a })
newtype GetHealthCheckLastFailureReasonResponse = GetHealthCheckLastFailureReasonResponse
{ _ghclfrrHealthCheckObservations :: List "HealthCheckObservation" HealthCheckObservation
} deriving (Eq, Read, Show, Monoid, Semigroup)
instance GHC.Exts.IsList GetHealthCheckLastFailureReasonResponse where
type Item GetHealthCheckLastFailureReasonResponse = HealthCheckObservation
fromList = GetHealthCheckLastFailureReasonResponse . GHC.Exts.fromList
toList = GHC.Exts.toList . _ghclfrrHealthCheckObservations
-- | 'GetHealthCheckLastFailureReasonResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ghclfrrHealthCheckObservations' @::@ ['HealthCheckObservation']
--
getHealthCheckLastFailureReasonResponse :: GetHealthCheckLastFailureReasonResponse
getHealthCheckLastFailureReasonResponse = GetHealthCheckLastFailureReasonResponse
{ _ghclfrrHealthCheckObservations = mempty
}
-- | A list that contains one 'HealthCheckObservation' element for each Route 53
-- health checker.
ghclfrrHealthCheckObservations :: Lens' GetHealthCheckLastFailureReasonResponse [HealthCheckObservation]
ghclfrrHealthCheckObservations =
lens _ghclfrrHealthCheckObservations
(\s a -> s { _ghclfrrHealthCheckObservations = a })
. _List
instance ToPath GetHealthCheckLastFailureReason where
toPath GetHealthCheckLastFailureReason{..} = mconcat
[ "/2013-04-01/healthcheck/"
, toText _ghclfrHealthCheckId
, "/lastfailurereason"
]
instance ToQuery GetHealthCheckLastFailureReason where
toQuery = const mempty
instance ToHeaders GetHealthCheckLastFailureReason
instance ToXMLRoot GetHealthCheckLastFailureReason where
toXMLRoot = const (namespaced ns "GetHealthCheckLastFailureReason" [])
instance ToXML GetHealthCheckLastFailureReason
instance AWSRequest GetHealthCheckLastFailureReason where
type Sv GetHealthCheckLastFailureReason = Route53
type Rs GetHealthCheckLastFailureReason = GetHealthCheckLastFailureReasonResponse
request = get
response = xmlResponse
instance FromXML GetHealthCheckLastFailureReasonResponse where
parseXML x = GetHealthCheckLastFailureReasonResponse
<$> x .@? "HealthCheckObservations" .!@ mempty
|
romanb/amazonka
|
amazonka-route53/gen/Network/AWS/Route53/GetHealthCheckLastFailureReason.hs
|
mpl-2.0
| 5,114 | 0 | 10 | 909 | 514 | 310 | 204 | 66 | 1 |
{-# language ScopedTypeVariables #-}
module Top.Initialisation where
import Prelude hiding (catch)
import Safe
import qualified Data.Indexable as I
import Data.Indexable (Indexable)
import Data.Indexable.Range (Range, calculateRange)
import Data.Initial
import Data.SelectTree
import Data.Maybe
import Data.List
import Text.Logging
import Control.Monad
import Control.Monad.CatchIO
import Control.Exception (IOException)
import Physics.Chipmunk
import Utils
import Base
import Object
import qualified Editor.Scene.RenderOrdering as RenderOrdering
import Sorts.Tiles (isTileSort)
import qualified Sorts.Nikki
import qualified Sorts.Nikki.Batteries
import qualified Sorts.Terminal
import qualified Sorts.Tiles
import qualified Sorts.Sign
import qualified Sorts.FallingTiles
import qualified Sorts.DeathStones
import qualified Sorts.Box
import qualified Sorts.Battery
import qualified Sorts.Grids
import qualified Sorts.Switch
import qualified Sorts.Background
import qualified Sorts.LowerLimit
import qualified Sorts.Robots.Jetpack
import qualified Sorts.Robots.PathRobots
import qualified Sorts.Robots.Cannon
import qualified Sorts.Robots.Laser
-- import qualified Sorts.DebugObject
-- sort loaders are given individually to be able
-- to surround every single loader with an exception
-- handler.
sortLoaders :: [RM (Maybe Sort_)]
sortLoaders =
Sorts.Tiles.sorts ++
Sorts.Robots.Jetpack.sorts ++
Sorts.Robots.PathRobots.sorts ++
Sorts.Robots.Cannon.sorts ++
Sorts.Robots.Laser.sorts ++
Sorts.Terminal.sorts ++
Sorts.Battery.sorts ++
Sorts.Switch.sorts ++
Sorts.Sign.sorts ++
Sorts.Box.sorts ++
Sorts.FallingTiles.sorts ++
Sorts.DeathStones.sorts ++
Sorts.LowerLimit.sorts ++
Sorts.Background.sorts ++
Sorts.Grids.sorts ++
Sorts.Nikki.sorts ++
-- Sorts.DebugObject.sorts :
[]
withAllSorts :: (SelectTree Sort_ -> RM a) -> RM a
withAllSorts cmd = do
sorts <- getAllSorts
cmd sorts `finally` (io $ freeAllSorts sorts)
-- | returns all sorts in a nicely sorted SelectTree
getAllSorts :: RM (SelectTree Sort_)
getAllSorts = do
sorts <- catMaybes <$> mapM catchExceptions sortLoaders
io $ checkUniqueSortIds sorts
return $ mkSortsSelectTree sorts
where
catchExceptions :: RM (Maybe a) -> RM (Maybe a)
catchExceptions action =
catch action $ \ (e :: IOException) -> io $ do
logg Error ("cannot load all sorts: " ++ show e)
return Nothing
checkUniqueSortIds :: [Sort_] -> IO ()
checkUniqueSortIds sorts =
when (not $ null $ ds) $
fail ("duplicate sort ids found: " ++ unwords ds)
where
ds = duplicates $ map (getSortId . sortId) sorts
freeAllSorts :: SelectTree Sort_ -> IO ()
freeAllSorts sorts = do
fmapM_ freeSort sorts
initScene :: Application -> LevelFile -> Space -> CachedTiles
-> Grounds (EditorObject Sort_) -> RM (Scene Object_)
initScene app file space cachedTiles =
return . (mainLayer .> content ^: RenderOrdering.sortMainLayer) >=>
return . groundsMergeLayers >=>
return . groundsMergeTiles >=>
initializeObjects app file space cachedTiles >=>
io . mkScene file space >=>
return . Sorts.LowerLimit.promoteLowerLimit
initializeObjects :: Application -> LevelFile -> Space -> CachedTiles
-> Grounds (EditorObject Sort_) -> RM (Grounds Object_)
initializeObjects app file space cachedTiles (Grounds backgrounds mainLayer foregrounds) = do
bgs' <- fmapM (fmapM (editorObject2Object app file Nothing cachedTiles)) backgrounds
ml' <- fmapM (editorObject2Object app file (Just space) cachedTiles) mainLayer
fgs' <- fmapM (fmapM (editorObject2Object app file Nothing cachedTiles)) foregrounds
return $ Grounds bgs' ml' fgs'
editorObject2Object :: Application -> LevelFile -> Maybe Space -> CachedTiles
-> EditorObject Sort_ -> RM Object_
editorObject2Object app file mspace cachedTiles (EditorObject sort pos state) =
initialize app file mspace sort pos state cachedTiles
mkScene :: LevelFile -> Space -> Grounds Object_ -> IO (Scene Object_)
mkScene levelFile space objects = do
contactRef <- initContactRef space initial watchedContacts
let nikki = Sorts.Nikki.searchNikki objects
optObjects = mkGameGrounds objects (mainLayerUpdatingRange objects)
totalSwitches = Sorts.Switch.countSwitches (objects ^. mainLayer ^. content)
totalBatteries =
fromIntegral $
Sorts.Battery.countBatteries $
fmap sort_ $
(objects ^. mainLayer ^. content)
return $ Scene {
levelFile = levelFile,
spaceTime_ = 0,
objects_ = optObjects,
lowerLimit_ = Nothing,
batteryPower_ = 0 :!: totalBatteries,
batteryMap = Sorts.Nikki.Batteries.mkBatteryMap (optObjects ^. gameMainLayer),
switches_ = 0 :!: totalSwitches,
contactRef = contactRef,
contacts_ = initial,
mode_ = NikkiMode nikki
}
mainLayerUpdatingRange :: Grounds Object_ -> Range
mainLayerUpdatingRange gs =
calculateRange isUpdating (gs ^. mainLayer ^. content)
groundsMergeTiles :: Grounds (EditorObject Sort_) -> Grounds (EditorObject Sort_)
groundsMergeTiles =
(backgrounds ^: fmap (content ^: mergeObjects)) .
(mainLayer .> content ^: mergeObjects) .
(foregrounds ^: fmap (content ^: mergeObjects))
mergeObjects :: Indexable (EditorObject Sort_) -> Indexable (EditorObject Sort_)
mergeObjects = Sorts.Tiles.mergeTiles >>> Sorts.DeathStones.mergeDeathStones
-- | Merge consecutive foreground and background layers
-- when it wouldn't change the rendering. Allows for more baking.
-- Conditions:
-- Both layers must have the same layer distance.
-- The lower layer mustn't have non-tile objects above tile objects.
-- The upper layer mustn't have non-tile objects below tile objects.
-- Indexes of the multilayers will be lost.
groundsMergeLayers :: Grounds (EditorObject Sort_) -> Grounds (EditorObject Sort_)
groundsMergeLayers =
(backgrounds ^: mergeLayers) >>>
(foregrounds ^: mergeLayers)
where
mergeLayers :: Indexable (Layer (EditorObject Sort_))
-> Indexable (Layer (EditorObject Sort_))
mergeLayers = ftoList >>> it >>> I.fromList
it :: [Layer (EditorObject Sort_)] -> [Layer (EditorObject Sort_)]
it (a : b : r) = case merge a b of
Just n -> it (n : r)
Nothing -> a : it (b : r)
it x = x
merge :: Layer (EditorObject Sort_) -> Layer (EditorObject Sort_)
-> Maybe (Layer (EditorObject Sort_))
merge a b =
if sameDistance a b && tilesOnTop a && tilesOnBottom b
then Just $ concatLayers a b
else Nothing
concatLayers a b = Layer {
content_ = I.append (a ^. content) (ftoList (b ^. content)),
xDistance = xDistance a,
yDistance = yDistance a
}
sameDistance a b = xDistance a == xDistance b && yDistance a == yDistance b
tilesOnTop, tilesOnBottom :: Layer (EditorObject Sort_) -> Bool
tilesOnTop l = maybe False (isTileSort . editorSort) (lastMay $ ftoList (l ^. content))
tilesOnBottom l = maybe False (isTileSort . editorSort) (headMay $ ftoList (l ^. content))
|
changlinli/nikki
|
src/Top/Initialisation.hs
|
lgpl-3.0
| 7,198 | 0 | 21 | 1,474 | 1,978 | 1,046 | 932 | -1 | -1 |
{-# LANGUAGE RecordWildCards, BangPatterns, TypeFamilies, StandaloneDeriving, FlexibleContexts, FlexibleInstances, UndecidableInstances, TypeSynonymInstances #-}
-- | This module declares (low-level) data types for Java .class files
-- structures, and Binary instances to read/write them.
module JVM.ClassFile
(-- * About
-- $about
--
--
-- * Internal class file structures
Attribute (..),
FieldType (..),
-- * Signatures
FieldSignature, MethodSignature (..), ReturnSignature (..),
ArgumentSignature (..),
-- * Stage types
File, Direct,
-- * Staged structures
Pool, Link,
Method (..), Field (..), Class (..),
Constant (..),
AccessFlag (..), AccessFlags,
Attributes (..),
defaultClass,
-- * Misc
HasSignature (..), HasAttributes (..),
NameType (..),
fieldNameType, methodNameType,
lookupField, lookupMethod,
long,
toString,
className,
apsize, arsize, arlist
)
where
import Control.Monad
import Control.Monad.Trans (lift)
import Control.Applicative
import qualified Control.Monad.State as St
import Data.Binary
import Data.Binary.IEEE754
import Data.Binary.Get
import Data.Binary.Put
import Data.Char
import Data.List
import Data.Default
import qualified Data.Set as S
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as B
import Codec.Binary.UTF8.String hiding (encode, decode)
-- $about
--
-- Java .class file uses constants pool, which stores almost all source-code-level
-- constants (strings, integer literals etc), and also all identifiers (class,
-- method, field names etc). All other structures contain indexes of constants in
-- the pool instead of constants theirself.
--
-- It's not convient to use that indexes programmatically. So, .class file is represented
-- at two stages: File and Direct. At File stage, all data structures contain only indexes,
-- not constants theirself. When we read a class from a file, we get structure at File stage.
-- We only can write File stage structure to file.
--
-- At Direct stage, structures conain constants, not indexes. Convertion functions (File <-> Direct)
-- are located in the JVM.Converter module.
--
-- | Read one-byte Char
getChar8 :: Get Char
getChar8 = do
x <- getWord8
return $ chr (fromIntegral x)
toString :: B.ByteString -> String
toString bstr = decodeString $ map (chr . fromIntegral) $ B.unpack bstr
-- | File stage
data File = File
-- | Direct representation stage
data Direct = Direct
-- | Link to some object
type family Link stage a
-- | At File stage, Link contain index of object in the constants pool.
type instance Link File a = Word16
-- | At Direct stage, Link contain object itself.
type instance Link Direct a = a
-- | Object (class, method, field …) access flags
type family AccessFlags stage
-- | At File stage, access flags are represented as Word16
type instance AccessFlags File = Word16
-- | At Direct stage, access flags are represented as set of flags.
type instance AccessFlags Direct = S.Set AccessFlag
-- | Object (class, method, field) attributes
data family Attributes stage
-- | At File stage, attributes are represented as list of Attribute structures.
data instance Attributes File = AP {attributesList :: [Attribute]}
deriving (Eq, Show)
instance Default (Attributes File) where
def = AP []
-- | At Direct stage, attributes are represented as a Map.
data instance Attributes Direct = AR (M.Map B.ByteString B.ByteString)
deriving (Eq, Show)
instance Default (Attributes Direct) where
def = AR M.empty
-- | Size of attributes set at Direct stage
arsize :: Attributes Direct -> Int
arsize (AR m) = M.size m
-- | Associative list of attributes at Direct stage
arlist :: Attributes Direct -> [(B.ByteString, B.ByteString)]
arlist (AR m) = M.assocs m
-- | Size of attributes set at File stage
apsize :: Attributes File -> Int
apsize (AP list) = length list
-- | Access flags. Used for classess, methods, variables.
data AccessFlag =
ACC_PUBLIC -- ^ 0x0001 Visible for all
| ACC_PRIVATE -- ^ 0x0002 Visible only for defined class
| ACC_PROTECTED -- ^ 0x0004 Visible only for subclasses
| ACC_STATIC -- ^ 0x0008 Static method or variable
| ACC_FINAL -- ^ 0x0010 No further subclassing or assignments
| ACC_SYNCHRONIZED -- ^ 0x0020 Uses monitors
| ACC_VOLATILE -- ^ 0x0040 Could not be cached
| ACC_TRANSIENT -- ^ 0x0080
| ACC_NATIVE -- ^ 0x0100 Implemented in other language
| ACC_INTERFACE -- ^ 0x0200 Class is interface
| ACC_ABSTRACT -- ^ 0x0400
deriving (Eq, Show, Ord, Enum)
-- | Fields and methods have signatures.
class (Binary (Signature a), Show (Signature a), Eq (Signature a))
=> HasSignature a where
type Signature a
instance HasSignature (Field Direct) where
type Signature (Field Direct) = FieldSignature
instance HasSignature (Method Direct) where
type Signature (Method Direct) = MethodSignature
-- | Name and signature pair. Used for methods and fields.
data NameType a = NameType {
ntName :: B.ByteString,
ntSignature :: Signature a }
instance (HasSignature a) => Show (NameType a) where
show (NameType n t) = toString n ++ ": " ++ show t
deriving instance HasSignature a => Eq (NameType a)
instance HasSignature a => Binary (NameType a) where
put (NameType n t) = putLazyByteString n >> put t
get = NameType <$> get <*> get
-- | Constant pool item
data Constant stage =
CClass (Link stage B.ByteString)
| CField (Link stage B.ByteString) (Link stage (NameType (Field stage)))
| CMethod (Link stage B.ByteString) (Link stage (NameType (Method stage)))
| CIfaceMethod (Link stage B.ByteString) (Link stage (NameType (Method stage)))
| CString (Link stage B.ByteString)
| CInteger Word32
| CFloat Float
| CLong Word64
| CDouble Double
| CNameType (Link stage B.ByteString) (Link stage B.ByteString)
| CUTF8 {getString :: B.ByteString}
| CUnicode {getString :: B.ByteString}
-- | Name of the CClass. Error on any other constant.
className :: Constant Direct -> B.ByteString
className (CClass s) = s
className x = error $ "Not a class: " ++ show x
instance Show (Constant Direct) where
show (CClass name) = "class " ++ toString name
show (CField cls nt) = "field " ++ toString cls ++ "." ++ show nt
show (CMethod cls nt) = "method " ++ toString cls ++ "." ++ show nt
show (CIfaceMethod cls nt) = "interface method " ++ toString cls ++ "." ++ show nt
show (CString s) = "String \"" ++ toString s ++ "\""
show (CInteger x) = show x
show (CFloat x) = show x
show (CLong x) = show x
show (CDouble x) = show x
show (CNameType name tp) = toString name ++ ": " ++ toString tp
show (CUTF8 s) = "UTF8 \"" ++ toString s ++ "\""
show (CUnicode s) = "Unicode \"" ++ toString s ++ "\""
-- | Constant pool
type Pool stage = M.Map Word16 (Constant stage)
-- | Generic .class file format
data Class stage = Class {
magic :: Word32, -- ^ Magic value: 0xCAFEBABE
minorVersion :: Word16,
majorVersion :: Word16,
constsPoolSize :: Word16, -- ^ Number of items in constants pool
constsPool :: Pool stage, -- ^ Constants pool itself
accessFlags :: AccessFlags stage, -- ^ See @JVM.Types.AccessFlag@
thisClass :: Link stage B.ByteString, -- ^ Constants pool item index for this class
superClass :: Link stage B.ByteString, -- ^ --/-- for super class, zero for java.lang.Object
interfacesCount :: Word16, -- ^ Number of implemented interfaces
interfaces :: [Link stage B.ByteString], -- ^ Constants pool item indexes for implemented interfaces
classFieldsCount :: Word16, -- ^ Number of class fileds
classFields :: [Field stage], -- ^ Class fields
classMethodsCount :: Word16, -- ^ Number of class methods
classMethods :: [Method stage], -- ^ Class methods
classAttributesCount :: Word16, -- ^ Number of class attributes
classAttributes :: Attributes stage -- ^ Class attributes
}
deriving instance Eq (Class File)
deriving instance Eq (Class Direct)
deriving instance Show (Class File)
deriving instance Show (Class Direct)
deriving instance Eq (Constant File)
deriving instance Eq (Constant Direct)
deriving instance Show (Constant File)
-- | Default (empty) class file definition.
defaultClass :: (Default (AccessFlags stage), Default (Link stage B.ByteString), Default (Attributes stage))
=> Class stage
defaultClass = Class {
magic = 0xCAFEBABE,
minorVersion = 0,
majorVersion = 50,
constsPoolSize = 0,
constsPool = def,
accessFlags = def,
thisClass = def,
superClass = def,
interfacesCount = 0,
interfaces = [],
classFieldsCount = 0,
classFields = [],
classMethodsCount = 0,
classMethods = [],
classAttributesCount = 0,
classAttributes = def }
instance Binary (Class File) where
put (Class {..}) = do
put magic
put minorVersion
put majorVersion
putPool constsPool
put accessFlags
put thisClass
put superClass
put interfacesCount
forM_ interfaces put
put classFieldsCount
forM_ classFields put
put classMethodsCount
forM_ classMethods put
put classAttributesCount
forM_ (attributesList classAttributes) put
get = do
magic <- get
when (magic /= 0xCAFEBABE) $
fail $ "Invalid .class file MAGIC value: " ++ show magic
minor <- get
major <- get
when (major > 51) $
fail $ "Too new .class file format: " ++ show major
poolsize <- getWord16be
pool <- getPool (poolsize - 1)
af <- get
this <- get
super <- get
interfacesCount <- get
ifaces <- replicateM (fromIntegral interfacesCount) get
classFieldsCount <- getWord16be
classFields <- replicateM (fromIntegral classFieldsCount) get
classMethodsCount <- get
classMethods <- replicateM (fromIntegral classMethodsCount) get
asCount <- get
as <- replicateM (fromIntegral $ asCount) get
return $ Class magic minor major poolsize pool af this super
interfacesCount ifaces classFieldsCount classFields
classMethodsCount classMethods asCount (AP as)
-- | Field signature format
data FieldType =
SignedByte -- ^ B
| CharByte -- ^ C
| DoubleType -- ^ D
| FloatType -- ^ F
| IntType -- ^ I
| LongInt -- ^ J
| ShortInt -- ^ S
| BoolType -- ^ Z
| ObjectType String -- ^ L @{class name}@
| Array (Maybe Int) FieldType -- ^ @[{type}@
deriving (Eq, Ord)
instance Show FieldType where
show SignedByte = "byte"
show CharByte = "char"
show DoubleType = "double"
show FloatType = "float"
show IntType = "int"
show LongInt = "long"
show ShortInt = "short"
show BoolType = "bool"
show (ObjectType s) = "Object " ++ s
show (Array Nothing t) = show t ++ "[]"
show (Array (Just n) t) = show t ++ "[" ++ show n ++ "]"
-- | Class field signature
type FieldSignature = FieldType
-- | Try to read integer value from decimal representation
getInt :: Get (Maybe Int)
getInt = do
s <- getDigits
if null s
then return Nothing
else return $ Just (read s)
where
getDigits :: Get [Char]
getDigits = do
c <- lookAhead getChar8
if isDigit c
then do
skip 1
next <- getDigits
return (c: next)
else return []
putString :: String -> Put
putString str = forM_ str put
instance Binary FieldType where
put SignedByte = put 'B'
put CharByte = put 'C'
put DoubleType = put 'D'
put FloatType = put 'F'
put IntType = put 'I'
put LongInt = put 'J'
put ShortInt = put 'S'
put BoolType = put 'Z'
put (ObjectType name) = put 'L' >> putString name >> put ';'
put (Array Nothing sig) = put '[' >> put sig
put (Array (Just n) sig) = put '[' >> put (show n) >> put sig
get = do
b <- getChar8
case b of
'B' -> return SignedByte
'C' -> return CharByte
'D' -> return DoubleType
'F' -> return FloatType
'I' -> return IntType
'J' -> return LongInt
'S' -> return ShortInt
'Z' -> return BoolType
'L' -> do
name <- getToSemicolon
return (ObjectType name)
'[' -> do
mbSize <- getInt
sig <- get
return (Array mbSize sig)
_ -> fail $ "Unknown signature opening symbol: " ++ [b]
-- | Read string up to `;'
getToSemicolon :: Get String
getToSemicolon = do
x <- get
if x == ';'
then return []
else do
next <- getToSemicolon
return (x: next)
-- | Return value signature
data ReturnSignature =
Returns FieldType
| ReturnsVoid
deriving (Eq, Ord)
instance Show ReturnSignature where
show (Returns t) = show t
show ReturnsVoid = "Void"
instance Binary ReturnSignature where
put (Returns sig) = put sig
put ReturnsVoid = put 'V'
get = do
x <- lookAhead getChar8
case x of
'V' -> skip 1 >> return ReturnsVoid
_ -> Returns <$> get
-- | Method argument signature
type ArgumentSignature = FieldType
-- | Class method argument signature
data MethodSignature =
MethodSignature [ArgumentSignature] ReturnSignature
deriving (Eq, Ord)
instance Show MethodSignature where
show (MethodSignature args ret) = "(" ++ intercalate ", " (map show args) ++ ") returns " ++ show ret
instance Binary MethodSignature where
put (MethodSignature args ret) = do
put '('
forM_ args put
put ')'
put ret
get = do
x <- getChar8
when (x /= '(') $
fail "Cannot parse method signature: no starting `(' !"
args <- getArgs
y <- getChar8
when (y /= ')') $
fail "Internal error: method signature without `)' !?"
ret <- get
return (MethodSignature args ret)
-- | Read arguments signatures (up to `)')
getArgs :: Get [ArgumentSignature]
getArgs = whileJust getArg
where
getArg :: Get (Maybe ArgumentSignature)
getArg = do
x <- lookAhead getChar8
if x == ')'
then return Nothing
else Just <$> get
whileJust :: (Monad m) => m (Maybe a) -> m [a]
whileJust m = do
r <- m
case r of
Just x -> do
next <- whileJust m
return (x: next)
Nothing -> return []
long :: Constant stage -> Bool
long (CLong _) = True
long (CDouble _) = True
long _ = False
putPool :: Pool File -> Put
putPool pool = do
let list = M.elems pool
d = length $ filter long list
putWord16be $ fromIntegral (M.size pool + d + 1)
forM_ list putC
where
putC (CClass i) = putWord8 7 >> put i
putC (CField i j) = putWord8 9 >> put i >> put j
putC (CMethod i j) = putWord8 10 >> put i >> put j
putC (CIfaceMethod i j) = putWord8 11 >> put i >> put j
putC (CString i) = putWord8 8 >> put i
putC (CInteger x) = putWord8 3 >> put x
putC (CFloat x) = putWord8 4 >> putFloat32be x
putC (CLong x) = putWord8 5 >> put x
putC (CDouble x) = putWord8 6 >> putFloat64be x
putC (CNameType i j) = putWord8 12 >> put i >> put j
putC (CUTF8 bs) = do
putWord8 1
put (fromIntegral (B.length bs) :: Word16)
putLazyByteString bs
putC (CUnicode bs) = do
putWord8 2
put (fromIntegral (B.length bs) :: Word16)
putLazyByteString bs
getPool :: Word16 -> Get (Pool File)
getPool n = do
items <- St.evalStateT go 1
return $ M.fromList items
where
go :: St.StateT Word16 Get [(Word16, Constant File)]
go = do
i <- St.get
if i > n
then return []
else do
c <- lift getC
let i' = if long c
then i+2
else i+1
St.put i'
next <- go
return $ (i,c): next
getC = do
!offset <- bytesRead
tag <- getWord8
case tag of
1 -> do
l <- get
bs <- getLazyByteString (fromIntegral (l :: Word16))
return $ CUTF8 bs
2 -> do
l <- get
bs <- getLazyByteString (fromIntegral (l :: Word16))
return $ CUnicode bs
3 -> CInteger <$> get
4 -> CFloat <$> getFloat32be
5 -> CLong <$> get
6 -> CDouble <$> getFloat64be
7 -> CClass <$> get
8 -> CString <$> get
9 -> CField <$> get <*> get
10 -> CMethod <$> get <*> get
11 -> CIfaceMethod <$> get <*> get
12 -> CNameType <$> get <*> get
_ -> fail $ "Unknown constants pool entry tag: " ++ show tag
-- _ -> return $ CInteger 0
-- | Class field format
data Field stage = Field {
fieldAccessFlags :: AccessFlags stage,
fieldName :: Link stage B.ByteString,
fieldSignature :: Link stage FieldSignature,
fieldAttributesCount :: Word16,
fieldAttributes :: Attributes stage }
deriving instance Eq (Field File)
deriving instance Eq (Field Direct)
deriving instance Show (Field File)
deriving instance Show (Field Direct)
lookupField :: B.ByteString -> Class Direct -> Maybe (Field Direct)
lookupField name cls = look (classFields cls)
where
look [] = Nothing
look (f:fs)
| fieldName f == name = Just f
| otherwise = look fs
fieldNameType :: Field Direct -> NameType (Field Direct)
fieldNameType f = NameType (fieldName f) (fieldSignature f)
instance Binary (Field File) where
put (Field {..}) = do
put fieldAccessFlags
put fieldName
put fieldSignature
put fieldAttributesCount
forM_ (attributesList fieldAttributes) put
get = do
af <- get
ni <- getWord16be
si <- get
n <- getWord16be
as <- replicateM (fromIntegral n) get
return $ Field af ni si n (AP as)
-- | Class method format
data Method stage = Method {
methodAccessFlags :: AccessFlags stage,
methodName :: Link stage B.ByteString,
methodSignature :: Link stage MethodSignature,
methodAttributesCount :: Word16,
methodAttributes :: Attributes stage }
deriving instance Eq (Method File)
deriving instance Eq (Method Direct)
deriving instance Show (Method File)
deriving instance Show (Method Direct)
methodNameType :: Method Direct -> NameType (Method Direct)
methodNameType m = NameType (methodName m) (methodSignature m)
lookupMethod :: B.ByteString -> Class Direct -> Maybe (Method Direct)
lookupMethod name cls = look (classMethods cls)
where
look [] = Nothing
look (f:fs)
| methodName f == name = Just f
| otherwise = look fs
instance Binary (Method File) where
put (Method {..}) = do
put methodAccessFlags
put methodName
put methodSignature
put methodAttributesCount
forM_ (attributesList methodAttributes) put
get = do
offset <- bytesRead
af <- get
ni <- get
si <- get
n <- get
as <- replicateM (fromIntegral n) get
return $ Method {
methodAccessFlags = af,
methodName = ni,
methodSignature = si,
methodAttributesCount = n,
methodAttributes = AP as }
-- | Any (class/ field/ method/ ...) attribute format.
-- Some formats specify special formats for @attributeValue@.
data Attribute = Attribute {
attributeName :: Word16,
attributeLength :: Word32,
attributeValue :: B.ByteString }
deriving (Eq, Show)
instance Binary Attribute where
put (Attribute {..}) = do
put attributeName
putWord32be attributeLength
putLazyByteString attributeValue
get = do
offset <- bytesRead
name <- getWord16be
len <- getWord32be
value <- getLazyByteString (fromIntegral len)
return $ Attribute name len value
class HasAttributes a where
attributes :: a stage -> Attributes stage
instance HasAttributes Class where
attributes = classAttributes
instance HasAttributes Field where
attributes = fieldAttributes
instance HasAttributes Method where
attributes = methodAttributes
|
ledyba/hs-java
|
JVM/ClassFile.hs
|
lgpl-3.0
| 19,963 | 0 | 18 | 5,141 | 5,788 | 2,949 | 2,839 | 505 | 15 |
{- |
Module : $Header$
Description : Parser for CspCASL specifications
Copyright : (c) Uni Bremen 2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
Parser for CSP-CASL specifications.
-}
module CspCASL.Parse_CspCASL (cspBasicExt) where
import Text.ParserCombinators.Parsec
import Common.AnnoState
import Common.Lexer
import CspCASL.AS_CspCASL
import CspCASL.CspCASL_Keywords
import CspCASL.Parse_CspCASL_Process
instance AParsable CspBasicExt where
aparser = cspBasicExt
cspBasicExt :: AParser st CspBasicExt
cspBasicExt =
itemList cspKeywords channelS (const chanDecl) Channels
<|> do
p <- asKey processS
auxItemList cspStartKeys [p] procItem ProcItems
chanDecl :: AParser st CHANNEL_DECL
chanDecl = do
vs <- commaSep1 channel_name
colonT
es <- cspSortId
return (ChannelDecl vs es)
procItem :: AParser st PROC_ITEM
procItem = do
ep <- procDeclOrDefn
case ep of
Left (fpn, vs) -> do
p <- csp_casl_process
return $ Proc_Eq (ParmProcname fpn vs) p
Right (pn, eas, al) -> case eas of
Left ss -> return $ Proc_Decl pn ss al
Right vds -> do
p <- csp_casl_process
return $ Proc_Defn pn vds al p
|
mariefarrell/Hets
|
CspCASL/Parse_CspCASL.hs
|
gpl-2.0
| 1,276 | 0 | 16 | 269 | 309 | 153 | 156 | 33 | 3 |
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
-- | Expand out synthetic instructions into single machine instrs.
module SPARC.CodeGen.Expand (
expandTop
)
where
import SPARC.Instr
import SPARC.Imm
import SPARC.AddrMode
import SPARC.Regs
import SPARC.Ppr ()
import Instruction
import Reg
import Size
import Cmm
import Outputable
import OrdList
-- | Expand out synthetic instructions in this top level thing
expandTop :: NatCmmDecl CmmStatics Instr -> NatCmmDecl CmmStatics Instr
expandTop top@(CmmData{})
= top
expandTop (CmmProc info lbl live (ListGraph blocks))
= CmmProc info lbl live (ListGraph $ map expandBlock blocks)
-- | Expand out synthetic instructions in this block
expandBlock :: NatBasicBlock Instr -> NatBasicBlock Instr
expandBlock (BasicBlock label instrs)
= let instrs_ol = expandBlockInstrs instrs
instrs' = fromOL instrs_ol
in BasicBlock label instrs'
-- | Expand out some instructions
expandBlockInstrs :: [Instr] -> OrdList Instr
expandBlockInstrs [] = nilOL
expandBlockInstrs (ii:is)
= let ii_doubleRegs = remapRegPair ii
is_misaligned = expandMisalignedDoubles ii_doubleRegs
in is_misaligned `appOL` expandBlockInstrs is
-- | In the SPARC instruction set the FP register pairs that are used
-- to hold 64 bit floats are refered to by just the first reg
-- of the pair. Remap our internal reg pairs to the appropriate reg.
--
-- For example:
-- ldd [%l1], (%f0 | %f1)
--
-- gets mapped to
-- ldd [$l1], %f0
--
remapRegPair :: Instr -> Instr
remapRegPair instr
= let patchF reg
= case reg of
RegReal (RealRegSingle _)
-> reg
RegReal (RealRegPair r1 r2)
-- sanity checking
| r1 >= 32
, r1 <= 63
, r1 `mod` 2 == 0
, r2 == r1 + 1
-> RegReal (RealRegSingle r1)
| otherwise
-> pprPanic "SPARC.CodeGen.Expand: not remapping dodgy looking reg pair " (ppr reg)
RegVirtual _
-> pprPanic "SPARC.CodeGen.Expand: not remapping virtual reg " (ppr reg)
in patchRegsOfInstr instr patchF
-- Expand out 64 bit load/stores into individual instructions to handle
-- possible double alignment problems.
--
-- TODO: It'd be better to use a scratch reg instead of the add/sub thing.
-- We might be able to do this faster if we use the UA2007 instr set
-- instead of restricting ourselves to SPARC V9.
--
expandMisalignedDoubles :: Instr -> OrdList Instr
expandMisalignedDoubles instr
-- Translate to:
-- add g1,g2,g1
-- ld [g1],%fn
-- ld [g1+4],%f(n+1)
-- sub g1,g2,g1 -- to restore g1
| LD FF64 (AddrRegReg r1 r2) fReg <- instr
= toOL [ ADD False False r1 (RIReg r2) r1
, LD FF32 (AddrRegReg r1 g0) fReg
, LD FF32 (AddrRegImm r1 (ImmInt 4)) (fRegHi fReg)
, SUB False False r1 (RIReg r2) r1 ]
-- Translate to
-- ld [addr],%fn
-- ld [addr+4],%f(n+1)
| LD FF64 addr fReg <- instr
= let Just addr' = addrOffset addr 4
in toOL [ LD FF32 addr fReg
, LD FF32 addr' (fRegHi fReg) ]
-- Translate to:
-- add g1,g2,g1
-- st %fn,[g1]
-- st %f(n+1),[g1+4]
-- sub g1,g2,g1 -- to restore g1
| ST FF64 fReg (AddrRegReg r1 r2) <- instr
= toOL [ ADD False False r1 (RIReg r2) r1
, ST FF32 fReg (AddrRegReg r1 g0)
, ST FF32 (fRegHi fReg) (AddrRegImm r1 (ImmInt 4))
, SUB False False r1 (RIReg r2) r1 ]
-- Translate to
-- ld [addr],%fn
-- ld [addr+4],%f(n+1)
| ST FF64 fReg addr <- instr
= let Just addr' = addrOffset addr 4
in toOL [ ST FF32 fReg addr
, ST FF32 (fRegHi fReg) addr' ]
-- some other instr
| otherwise
= unitOL instr
-- | The the high partner for this float reg.
fRegHi :: Reg -> Reg
fRegHi (RegReal (RealRegSingle r1))
| r1 >= 32
, r1 <= 63
, r1 `mod` 2 == 0
= (RegReal $ RealRegSingle (r1 + 1))
-- Can't take high partner for non-low reg.
fRegHi reg
= pprPanic "SPARC.CodeGen.Expand: can't take fRegHi from " (ppr reg)
|
lukexi/ghc-7.8-arm64
|
compiler/nativeGen/SPARC/CodeGen/Expand.hs
|
bsd-3-clause
| 4,236 | 108 | 15 | 1,015 | 1,041 | 557 | 484 | 77 | 3 |
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://ghc.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
-- | One ounce of sanity checking is worth 10000000000000000 ounces
-- of staring blindly at assembly code trying to find the problem..
--
module SPARC.CodeGen.Sanity (
checkBlock
)
where
import SPARC.Instr
import SPARC.Ppr ()
import Instruction
import Cmm
import Outputable
-- | Enforce intra-block invariants.
--
checkBlock :: CmmBlock
-> NatBasicBlock Instr
-> NatBasicBlock Instr
checkBlock cmm block@(BasicBlock _ instrs)
| checkBlockInstrs instrs
= block
| otherwise
= pprPanic
("SPARC.CodeGen: bad block\n")
( vcat [ text " -- cmm -----------------\n"
, ppr cmm
, text " -- native code ---------\n"
, ppr block ])
checkBlockInstrs :: [Instr] -> Bool
checkBlockInstrs ii
-- An unconditional jumps end the block.
-- There must be an unconditional jump in the block, otherwise
-- the register liveness determinator will get the liveness
-- information wrong.
--
-- If the block ends with a cmm call that never returns
-- then there can be unreachable instructions after the jump,
-- but we don't mind here.
--
| instr : NOP : _ <- ii
, isUnconditionalJump instr
= True
-- All jumps must have a NOP in their branch delay slot.
-- The liveness determinator and register allocators aren't smart
-- enough to handle branch delay slots.
--
| instr : NOP : is <- ii
, isJumpishInstr instr
= checkBlockInstrs is
-- keep checking
| _:i2:is <- ii
= checkBlockInstrs (i2:is)
-- this block is no good
| otherwise
= False
|
lukexi/ghc-7.8-arm64
|
compiler/nativeGen/SPARC/CodeGen/Sanity.hs
|
bsd-3-clause
| 1,830 | 31 | 10 | 378 | 303 | 165 | 138 | 33 | 1 |
module Main where
import Data.Foldable
import Data.List
main = do
x <- getContents
let f s = if isInfixOf ">>" s then 1 else (-1)
g n (s:ss) = let n' = n + f s in (min n' n, s) : g n' ss
g _ [] = []
traverse_ (\(n, s) -> putStrLn (replicate n ' ' ++ s)) . g 0 $ lines x
|
alexander-at-github/eta
|
utils/trace/Filter.hs
|
bsd-3-clause
| 290 | 0 | 15 | 87 | 174 | 88 | 86 | 9 | 3 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="el-GR">
<title>Retire.js Add-on</title>
<maps>
<homeID>retire</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/retire/src/main/javahelp/org/zaproxy/addon/retire/resources/help_el_GR/helpset_el_GR.hs
|
apache-2.0
| 964 | 86 | 29 | 156 | 392 | 212 | 180 | -1 | -1 |
{-
This module handles generation of position independent code and
dynamic-linking related issues for the native code generator.
This depends both the architecture and OS, so we define it here
instead of in one of the architecture specific modules.
Things outside this module which are related to this:
+ module CLabel
- PIC base label (pretty printed as local label 1)
- DynamicLinkerLabels - several kinds:
CodeStub, SymbolPtr, GotSymbolPtr, GotSymbolOffset
- labelDynamic predicate
+ module Cmm
- The GlobalReg datatype has a PicBaseReg constructor
- The CmmLit datatype has a CmmLabelDiffOff constructor
+ codeGen & RTS
- When tablesNextToCode, no absolute addresses are stored in info tables
any more. Instead, offsets from the info label are used.
- For Win32 only, SRTs might contain addresses of __imp_ symbol pointers
because Win32 doesn't support external references in data sections.
TODO: make sure this still works, it might be bitrotted
+ NCG
- The cmmToCmm pass in AsmCodeGen calls cmmMakeDynamicReference for all
labels.
- nativeCodeGen calls pprImportedSymbol and pprGotDeclaration to output
all the necessary stuff for imported symbols.
- The NCG monad keeps track of a list of imported symbols.
- MachCodeGen invokes initializePicBase to generate code to initialize
the PIC base register when needed.
- MachCodeGen calls cmmMakeDynamicReference whenever it uses a CLabel
that wasn't in the original Cmm code (e.g. floating point literals).
-}
module PIC (
cmmMakeDynamicReference,
CmmMakeDynamicReferenceM(..),
ReferenceKind(..),
needImportedSymbols,
pprImportedSymbol,
pprGotDeclaration,
initializePicBase_ppc,
initializePicBase_x86
)
where
import qualified PPC.Instr as PPC
import qualified PPC.Regs as PPC
import qualified X86.Instr as X86
import Platform
import Instruction
import Reg
import NCGMonad
import Hoopl
import Cmm
import CLabel ( CLabel, ForeignLabelSource(..), pprCLabel,
mkDynamicLinkerLabel, DynamicLinkerLabelInfo(..),
dynamicLinkerLabelInfo, mkPicBaseLabel,
labelDynamic, externallyVisibleCLabel )
import CLabel ( mkForeignLabel )
import BasicTypes
import Module
import Outputable
import DynFlags
import FastString
--------------------------------------------------------------------------------
-- It gets called by the cmmToCmm pass for every CmmLabel in the Cmm
-- code. It does The Right Thing(tm) to convert the CmmLabel into a
-- position-independent, dynamic-linking-aware reference to the thing
-- in question.
-- Note that this also has to be called from MachCodeGen in order to
-- access static data like floating point literals (labels that were
-- created after the cmmToCmm pass).
-- The function must run in a monad that can keep track of imported symbols
-- A function for recording an imported symbol must be passed in:
-- - addImportCmmOpt for the CmmOptM monad
-- - addImportNat for the NatM monad.
data ReferenceKind
= DataReference
| CallReference
| JumpReference
deriving(Eq)
class Monad m => CmmMakeDynamicReferenceM m where
addImport :: CLabel -> m ()
getThisModule :: m Module
instance CmmMakeDynamicReferenceM NatM where
addImport = addImportNat
getThisModule = getThisModuleNat
cmmMakeDynamicReference
:: CmmMakeDynamicReferenceM m
=> DynFlags
-> ReferenceKind -- whether this is the target of a jump
-> CLabel -- the label
-> m CmmExpr
cmmMakeDynamicReference dflags referenceKind lbl
| Just _ <- dynamicLinkerLabelInfo lbl
= return $ CmmLit $ CmmLabel lbl -- already processed it, pass through
| otherwise
= do this_mod <- getThisModule
case howToAccessLabel
dflags
(platformArch $ targetPlatform dflags)
(platformOS $ targetPlatform dflags)
this_mod
referenceKind lbl of
AccessViaStub -> do
let stub = mkDynamicLinkerLabel CodeStub lbl
addImport stub
return $ CmmLit $ CmmLabel stub
AccessViaSymbolPtr -> do
let symbolPtr = mkDynamicLinkerLabel SymbolPtr lbl
addImport symbolPtr
return $ CmmLoad (cmmMakePicReference dflags symbolPtr) (bWord dflags)
AccessDirectly -> case referenceKind of
-- for data, we might have to make some calculations:
DataReference -> return $ cmmMakePicReference dflags lbl
-- all currently supported processors support
-- PC-relative branch and call instructions,
-- so just jump there if it's a call or a jump
_ -> return $ CmmLit $ CmmLabel lbl
-- -----------------------------------------------------------------------------
-- Create a position independent reference to a label.
-- (but do not bother with dynamic linking).
-- We calculate the label's address by adding some (platform-dependent)
-- offset to our base register; this offset is calculated by
-- the function picRelative in the platform-dependent part below.
cmmMakePicReference :: DynFlags -> CLabel -> CmmExpr
cmmMakePicReference dflags lbl
-- Windows doesn't need PIC,
-- everything gets relocated at runtime
| OSMinGW32 <- platformOS $ targetPlatform dflags
= CmmLit $ CmmLabel lbl
| OSAIX <- platformOS $ targetPlatform dflags
= CmmMachOp (MO_Add W32)
[ CmmReg (CmmGlobal PicBaseReg)
, CmmLit $ picRelative
(platformArch $ targetPlatform dflags)
(platformOS $ targetPlatform dflags)
lbl ]
-- both ABI versions default to medium code model
| ArchPPC_64 _ <- platformArch $ targetPlatform dflags
= CmmMachOp (MO_Add W32) -- code model medium
[ CmmReg (CmmGlobal PicBaseReg)
, CmmLit $ picRelative
(platformArch $ targetPlatform dflags)
(platformOS $ targetPlatform dflags)
lbl ]
| (gopt Opt_PIC dflags || WayDyn `elem` ways dflags) && absoluteLabel lbl
= CmmMachOp (MO_Add (wordWidth dflags))
[ CmmReg (CmmGlobal PicBaseReg)
, CmmLit $ picRelative
(platformArch $ targetPlatform dflags)
(platformOS $ targetPlatform dflags)
lbl ]
| otherwise
= CmmLit $ CmmLabel lbl
absoluteLabel :: CLabel -> Bool
absoluteLabel lbl
= case dynamicLinkerLabelInfo lbl of
Just (GotSymbolPtr, _) -> False
Just (GotSymbolOffset, _) -> False
_ -> True
--------------------------------------------------------------------------------
-- Knowledge about how special dynamic linker labels like symbol
-- pointers, code stubs and GOT offsets look like is located in the
-- module CLabel.
-- We have to decide which labels need to be accessed
-- indirectly or via a piece of stub code.
data LabelAccessStyle
= AccessViaStub
| AccessViaSymbolPtr
| AccessDirectly
howToAccessLabel
:: DynFlags -> Arch -> OS -> Module -> ReferenceKind -> CLabel -> LabelAccessStyle
-- Windows
-- In Windows speak, a "module" is a set of objects linked into the
-- same Portable Exectuable (PE) file. (both .exe and .dll files are PEs).
--
-- If we're compiling a multi-module program then symbols from other modules
-- are accessed by a symbol pointer named __imp_SYMBOL. At runtime we have the
-- following.
--
-- (in the local module)
-- __imp_SYMBOL: addr of SYMBOL
--
-- (in the other module)
-- SYMBOL: the real function / data.
--
-- To access the function at SYMBOL from our local module, we just need to
-- dereference the local __imp_SYMBOL.
--
-- If not compiling with -dynamic we assume that all our code will be linked
-- into the same .exe file. In this case we always access symbols directly,
-- and never use __imp_SYMBOL.
--
howToAccessLabel dflags _ OSMinGW32 this_mod _ lbl
-- Assume all symbols will be in the same PE, so just access them directly.
| WayDyn `notElem` ways dflags
= AccessDirectly
-- If the target symbol is in another PE we need to access it via the
-- appropriate __imp_SYMBOL pointer.
| labelDynamic dflags (thisPackage dflags) this_mod lbl
= AccessViaSymbolPtr
-- Target symbol is in the same PE as the caller, so just access it directly.
| otherwise
= AccessDirectly
-- Mach-O (Darwin, Mac OS X)
--
-- Indirect access is required in the following cases:
-- * things imported from a dynamic library
-- * (not on x86_64) data from a different module, if we're generating PIC code
-- It is always possible to access something indirectly,
-- even when it's not necessary.
--
howToAccessLabel dflags arch OSDarwin this_mod DataReference lbl
-- data access to a dynamic library goes via a symbol pointer
| labelDynamic dflags (thisPackage dflags) this_mod lbl
= AccessViaSymbolPtr
-- when generating PIC code, all cross-module data references must
-- must go via a symbol pointer, too, because the assembler
-- cannot generate code for a label difference where one
-- label is undefined. Doesn't apply t x86_64.
-- Unfortunately, we don't know whether it's cross-module,
-- so we do it for all externally visible labels.
-- This is a slight waste of time and space, but otherwise
-- we'd need to pass the current Module all the way in to
-- this function.
| arch /= ArchX86_64
, gopt Opt_PIC dflags && externallyVisibleCLabel lbl
= AccessViaSymbolPtr
| otherwise
= AccessDirectly
howToAccessLabel dflags arch OSDarwin this_mod JumpReference lbl
-- dyld code stubs don't work for tailcalls because the
-- stack alignment is only right for regular calls.
-- Therefore, we have to go via a symbol pointer:
| arch == ArchX86 || arch == ArchX86_64
, labelDynamic dflags (thisPackage dflags) this_mod lbl
= AccessViaSymbolPtr
howToAccessLabel dflags arch OSDarwin this_mod _ lbl
-- Code stubs are the usual method of choice for imported code;
-- not needed on x86_64 because Apple's new linker, ld64, generates
-- them automatically.
| arch /= ArchX86_64
, labelDynamic dflags (thisPackage dflags) this_mod lbl
= AccessViaStub
| otherwise
= AccessDirectly
----------------------------------------------------------------------------
-- AIX
-- quite simple (for now)
howToAccessLabel _dflags _arch OSAIX _this_mod kind _lbl
= case kind of
DataReference -> AccessViaSymbolPtr
CallReference -> AccessDirectly
JumpReference -> AccessDirectly
-- ELF (Linux)
--
-- ELF tries to pretend to the main application code that dynamic linking does
-- not exist. While this may sound convenient, it tends to mess things up in
-- very bad ways, so we have to be careful when we generate code for the main
-- program (-dynamic but no -fPIC).
--
-- Indirect access is required for references to imported symbols
-- from position independent code. It is also required from the main program
-- when dynamic libraries containing Haskell code are used.
howToAccessLabel _ (ArchPPC_64 _) os _ kind _
| osElfTarget os
= case kind of
-- ELF PPC64 (powerpc64-linux), AIX, MacOS 9, BeOS/PPC
DataReference -> AccessViaSymbolPtr
-- RTLD does not generate stubs for function descriptors
-- in tail calls. Create a symbol pointer and generate
-- the code to load the function descriptor at the call site.
JumpReference -> AccessViaSymbolPtr
-- regular calls are handled by the runtime linker
_ -> AccessDirectly
howToAccessLabel dflags _ os _ _ _
-- no PIC -> the dynamic linker does everything for us;
-- if we don't dynamically link to Haskell code,
-- it actually manages to do so without messing things up.
| osElfTarget os
, not (gopt Opt_PIC dflags) && WayDyn `notElem` ways dflags
= AccessDirectly
howToAccessLabel dflags arch os this_mod DataReference lbl
| osElfTarget os
= case () of
-- A dynamic label needs to be accessed via a symbol pointer.
_ | labelDynamic dflags (thisPackage dflags) this_mod lbl
-> AccessViaSymbolPtr
-- For PowerPC32 -fPIC, we have to access even static data
-- via a symbol pointer (see below for an explanation why
-- PowerPC32 Linux is especially broken).
| arch == ArchPPC
, gopt Opt_PIC dflags
-> AccessViaSymbolPtr
| otherwise
-> AccessDirectly
-- In most cases, we have to avoid symbol stubs on ELF, for the following reasons:
-- on i386, the position-independent symbol stubs in the Procedure Linkage Table
-- require the address of the GOT to be loaded into register %ebx on entry.
-- The linker will take any reference to the symbol stub as a hint that
-- the label in question is a code label. When linking executables, this
-- will cause the linker to replace even data references to the label with
-- references to the symbol stub.
-- This leaves calling a (foreign) function from non-PIC code
-- (AccessDirectly, because we get an implicit symbol stub)
-- and calling functions from PIC code on non-i386 platforms (via a symbol stub)
howToAccessLabel dflags arch os this_mod CallReference lbl
| osElfTarget os
, labelDynamic dflags (thisPackage dflags) this_mod lbl && not (gopt Opt_PIC dflags)
= AccessDirectly
| osElfTarget os
, arch /= ArchX86
, labelDynamic dflags (thisPackage dflags) this_mod lbl && gopt Opt_PIC dflags
= AccessViaStub
howToAccessLabel dflags _ os this_mod _ lbl
| osElfTarget os
= if labelDynamic dflags (thisPackage dflags) this_mod lbl
then AccessViaSymbolPtr
else AccessDirectly
-- all other platforms
howToAccessLabel dflags _ _ _ _ _
| not (gopt Opt_PIC dflags)
= AccessDirectly
| otherwise
= panic "howToAccessLabel: PIC not defined for this platform"
-- -------------------------------------------------------------------
-- | Says what we we have to add to our 'PIC base register' in order to
-- get the address of a label.
picRelative :: Arch -> OS -> CLabel -> CmmLit
-- Darwin, but not x86_64:
-- The PIC base register points to the PIC base label at the beginning
-- of the current CmmDecl. We just have to use a label difference to
-- get the offset.
-- We have already made sure that all labels that are not from the current
-- module are accessed indirectly ('as' can't calculate differences between
-- undefined labels).
picRelative arch OSDarwin lbl
| arch /= ArchX86_64
= CmmLabelDiffOff lbl mkPicBaseLabel 0
-- On AIX we use an indirect local TOC anchored by 'gotLabel'.
-- This way we use up only one global TOC entry per compilation-unit
-- (this is quite similiar to GCC's @-mminimal-toc@ compilation mode)
picRelative _ OSAIX lbl
= CmmLabelDiffOff lbl gotLabel 0
-- PowerPC Linux:
-- The PIC base register points to our fake GOT. Use a label difference
-- to get the offset.
-- We have made sure that *everything* is accessed indirectly, so this
-- is only used for offsets from the GOT to symbol pointers inside the
-- GOT.
picRelative ArchPPC os lbl
| osElfTarget os
= CmmLabelDiffOff lbl gotLabel 0
-- Most Linux versions:
-- The PIC base register points to the GOT. Use foo@got for symbol
-- pointers, and foo@gotoff for everything else.
-- Linux and Darwin on x86_64:
-- The PIC base register is %rip, we use foo@gotpcrel for symbol pointers,
-- and a GotSymbolOffset label for other things.
-- For reasons of tradition, the symbol offset label is written as a plain label.
picRelative arch os lbl
| osElfTarget os || (os == OSDarwin && arch == ArchX86_64)
= let result
| Just (SymbolPtr, lbl') <- dynamicLinkerLabelInfo lbl
= CmmLabel $ mkDynamicLinkerLabel GotSymbolPtr lbl'
| otherwise
= CmmLabel $ mkDynamicLinkerLabel GotSymbolOffset lbl
in result
picRelative _ _ _
= panic "PositionIndependentCode.picRelative undefined for this platform"
--------------------------------------------------------------------------------
needImportedSymbols :: DynFlags -> Arch -> OS -> Bool
needImportedSymbols dflags arch os
| os == OSDarwin
, arch /= ArchX86_64
= True
| os == OSAIX
= True
-- PowerPC Linux: -fPIC or -dynamic
| osElfTarget os
, arch == ArchPPC
= gopt Opt_PIC dflags || WayDyn `elem` ways dflags
-- PowerPC 64 Linux: always
| osElfTarget os
, arch == ArchPPC_64 ELF_V1 || arch == ArchPPC_64 ELF_V2
= True
-- i386 (and others?): -dynamic but not -fPIC
| osElfTarget os
, arch /= ArchPPC_64 ELF_V1 && arch /= ArchPPC_64 ELF_V2
= WayDyn `elem` ways dflags && not (gopt Opt_PIC dflags)
| otherwise
= False
-- gotLabel
-- The label used to refer to our "fake GOT" from
-- position-independent code.
gotLabel :: CLabel
gotLabel
-- HACK: this label isn't really foreign
= mkForeignLabel
(fsLit ".LCTOC1")
Nothing ForeignLabelInThisPackage IsData
--------------------------------------------------------------------------------
-- We don't need to declare any offset tables.
-- However, for PIC on x86, we need a small helper function.
pprGotDeclaration :: DynFlags -> Arch -> OS -> SDoc
pprGotDeclaration dflags ArchX86 OSDarwin
| gopt Opt_PIC dflags
= vcat [
text ".section __TEXT,__textcoal_nt,coalesced,no_toc",
text ".weak_definition ___i686.get_pc_thunk.ax",
text ".private_extern ___i686.get_pc_thunk.ax",
text "___i686.get_pc_thunk.ax:",
text "\tmovl (%esp), %eax",
text "\tret" ]
pprGotDeclaration _ _ OSDarwin
= empty
-- Emit XCOFF TOC section
pprGotDeclaration _ _ OSAIX
= vcat $ [ text ".toc"
, text ".tc ghc_toc_table[TC],.LCTOC1"
, text ".csect ghc_toc_table[RW]"
-- See Note [.LCTOC1 in PPC PIC code]
, text ".set .LCTOC1,$+0x8000"
]
-- PPC 64 ELF v1needs a Table Of Contents (TOC) on Linux
pprGotDeclaration _ (ArchPPC_64 ELF_V1) OSLinux
= text ".section \".toc\",\"aw\""
-- In ELF v2 we also need to tell the assembler that we want ABI
-- version 2. This would normally be done at the top of the file
-- right after a file directive, but I could not figure out how
-- to do that.
pprGotDeclaration _ (ArchPPC_64 ELF_V2) OSLinux
= vcat [ text ".abiversion 2",
text ".section \".toc\",\"aw\""
]
pprGotDeclaration _ (ArchPPC_64 _) _
= panic "pprGotDeclaration: ArchPPC_64 only Linux supported"
-- Emit GOT declaration
-- Output whatever needs to be output once per .s file.
pprGotDeclaration dflags arch os
| osElfTarget os
, arch /= ArchPPC_64 ELF_V1 && arch /= ArchPPC_64 ELF_V2
, not (gopt Opt_PIC dflags)
= empty
| osElfTarget os
, arch /= ArchPPC_64 ELF_V1 && arch /= ArchPPC_64 ELF_V2
= vcat [
-- See Note [.LCTOC1 in PPC PIC code]
text ".section \".got2\",\"aw\"",
text ".LCTOC1 = .+32768" ]
pprGotDeclaration _ _ _
= panic "pprGotDeclaration: no match"
--------------------------------------------------------------------------------
-- On Darwin, we have to generate our own stub code for lazy binding..
-- For each processor architecture, there are two versions, one for PIC
-- and one for non-PIC.
--
-- Whenever you change something in this assembler output, make sure
-- the splitter in driver/split/ghc-split.lprl recognizes the new output
pprImportedSymbol :: DynFlags -> Platform -> CLabel -> SDoc
pprImportedSymbol dflags platform@(Platform { platformArch = ArchPPC, platformOS = OSDarwin }) importedLbl
| Just (CodeStub, lbl) <- dynamicLinkerLabelInfo importedLbl
= case gopt Opt_PIC dflags of
False ->
vcat [
text ".symbol_stub",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$stub:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\tlis r11,ha16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr)",
text "\tlwz r12,lo16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr)(r11)",
text "\tmtctr r12",
text "\taddi r11,r11,lo16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr)",
text "\tbctr"
]
True ->
vcat [
text ".section __TEXT,__picsymbolstub1,"
<> text "symbol_stubs,pure_instructions,32",
text "\t.align 2",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$stub:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\tmflr r0",
text "\tbcl 20,31,L0$" <> pprCLabel platform lbl,
text "L0$" <> pprCLabel platform lbl <> char ':',
text "\tmflr r11",
text "\taddis r11,r11,ha16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr-L0$" <> pprCLabel platform lbl <> char ')',
text "\tmtlr r0",
text "\tlwzu r12,lo16(L" <> pprCLabel platform lbl
<> text "$lazy_ptr-L0$" <> pprCLabel platform lbl
<> text ")(r11)",
text "\tmtctr r12",
text "\tbctr"
]
$+$ vcat [
text ".lazy_symbol_pointer",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$lazy_ptr:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\t.long dyld_stub_binding_helper"]
| Just (SymbolPtr, lbl) <- dynamicLinkerLabelInfo importedLbl
= vcat [
text ".non_lazy_symbol_pointer",
char 'L' <> pprCLabel platform lbl <> text "$non_lazy_ptr:",
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\t.long\t0"]
| otherwise
= empty
pprImportedSymbol dflags platform@(Platform { platformArch = ArchX86, platformOS = OSDarwin }) importedLbl
| Just (CodeStub, lbl) <- dynamicLinkerLabelInfo importedLbl
= case gopt Opt_PIC dflags of
False ->
vcat [
text ".symbol_stub",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$stub:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\tjmp *L" <> pprCLabel platform lbl
<> text "$lazy_ptr",
text "L" <> pprCLabel platform lbl
<> text "$stub_binder:",
text "\tpushl $L" <> pprCLabel platform lbl
<> text "$lazy_ptr",
text "\tjmp dyld_stub_binding_helper"
]
True ->
vcat [
text ".section __TEXT,__picsymbolstub2,"
<> text "symbol_stubs,pure_instructions,25",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$stub:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\tcall ___i686.get_pc_thunk.ax",
text "1:",
text "\tmovl L" <> pprCLabel platform lbl
<> text "$lazy_ptr-1b(%eax),%edx",
text "\tjmp *%edx",
text "L" <> pprCLabel platform lbl
<> text "$stub_binder:",
text "\tlea L" <> pprCLabel platform lbl
<> text "$lazy_ptr-1b(%eax),%eax",
text "\tpushl %eax",
text "\tjmp dyld_stub_binding_helper"
]
$+$ vcat [ text ".section __DATA, __la_sym_ptr"
<> (if gopt Opt_PIC dflags then int 2 else int 3)
<> text ",lazy_symbol_pointers",
text "L" <> pprCLabel platform lbl <> ptext (sLit "$lazy_ptr:"),
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\t.long L" <> pprCLabel platform lbl
<> text "$stub_binder"]
| Just (SymbolPtr, lbl) <- dynamicLinkerLabelInfo importedLbl
= vcat [
text ".non_lazy_symbol_pointer",
char 'L' <> pprCLabel platform lbl <> text "$non_lazy_ptr:",
text "\t.indirect_symbol" <+> pprCLabel platform lbl,
text "\t.long\t0"]
| otherwise
= empty
pprImportedSymbol _ (Platform { platformOS = OSDarwin }) _
= empty
-- XCOFF / AIX
--
-- Similiar to PPC64 ELF v1, there's dedicated TOC register (r2). To
-- workaround the limitation of a global TOC we use an indirect TOC
-- with the label `ghc_toc_table`.
--
-- See also GCC's `-mminimal-toc` compilation mode or
-- http://www.ibm.com/developerworks/rational/library/overview-toc-aix/
--
-- NB: No DSO-support yet
pprImportedSymbol _ platform@(Platform { platformOS = OSAIX }) importedLbl
= case dynamicLinkerLabelInfo importedLbl of
Just (SymbolPtr, lbl)
-> vcat [
text "LC.." <> pprCLabel platform lbl <> char ':',
text "\t.long" <+> pprCLabel platform lbl ]
_ -> empty
-- ELF / Linux
--
-- In theory, we don't need to generate any stubs or symbol pointers
-- by hand for Linux.
--
-- Reality differs from this in two areas.
--
-- 1) If we just use a dynamically imported symbol directly in a read-only
-- section of the main executable (as GCC does), ld generates R_*_COPY
-- relocations, which are fundamentally incompatible with reversed info
-- tables. Therefore, we need a table of imported addresses in a writable
-- section.
-- The "official" GOT mechanism (label@got) isn't intended to be used
-- in position dependent code, so we have to create our own "fake GOT"
-- when not Opt_PIC && WayDyn `elem` ways dflags.
--
-- 2) PowerPC Linux is just plain broken.
-- While it's theoretically possible to use GOT offsets larger
-- than 16 bit, the standard crt*.o files don't, which leads to
-- linker errors as soon as the GOT size exceeds 16 bit.
-- Also, the assembler doesn't support @gotoff labels.
-- In order to be able to use a larger GOT, we have to circumvent the
-- entire GOT mechanism and do it ourselves (this is also what GCC does).
-- When needImportedSymbols is defined,
-- the NCG will keep track of all DynamicLinkerLabels it uses
-- and output each of them using pprImportedSymbol.
pprImportedSymbol _ platform@(Platform { platformArch = ArchPPC_64 _ })
importedLbl
| osElfTarget (platformOS platform)
= case dynamicLinkerLabelInfo importedLbl of
Just (SymbolPtr, lbl)
-> vcat [
text ".section \".toc\", \"aw\"",
text ".LC_" <> pprCLabel platform lbl <> char ':',
text "\t.quad" <+> pprCLabel platform lbl ]
_ -> empty
pprImportedSymbol dflags platform importedLbl
| osElfTarget (platformOS platform)
= case dynamicLinkerLabelInfo importedLbl of
Just (SymbolPtr, lbl)
-> let symbolSize = case wordWidth dflags of
W32 -> sLit "\t.long"
W64 -> sLit "\t.quad"
_ -> panic "Unknown wordRep in pprImportedSymbol"
in vcat [
text ".section \".got2\", \"aw\"",
text ".LC_" <> pprCLabel platform lbl <> char ':',
ptext symbolSize <+> pprCLabel platform lbl ]
-- PLT code stubs are generated automatically by the dynamic linker.
_ -> empty
pprImportedSymbol _ _ _
= panic "PIC.pprImportedSymbol: no match"
--------------------------------------------------------------------------------
-- Generate code to calculate the address that should be put in the
-- PIC base register.
-- This is called by MachCodeGen for every CmmProc that accessed the
-- PIC base register. It adds the appropriate instructions to the
-- top of the CmmProc.
-- It is assumed that the first NatCmmDecl in the input list is a Proc
-- and the rest are CmmDatas.
-- Darwin is simple: just fetch the address of a local label.
-- The FETCHPC pseudo-instruction is expanded to multiple instructions
-- during pretty-printing so that we don't have to deal with the
-- local label:
-- PowerPC version:
-- bcl 20,31,1f.
-- 1: mflr picReg
-- i386 version:
-- call 1f
-- 1: popl %picReg
-- Get a pointer to our own fake GOT, which is defined on a per-module basis.
-- This is exactly how GCC does it in linux.
initializePicBase_ppc
:: Arch -> OS -> Reg
-> [NatCmmDecl CmmStatics PPC.Instr]
-> NatM [NatCmmDecl CmmStatics PPC.Instr]
initializePicBase_ppc ArchPPC os picReg
(CmmProc info lab live (ListGraph blocks) : statics)
| osElfTarget os
= do
let
gotOffset = PPC.ImmConstantDiff
(PPC.ImmCLbl gotLabel)
(PPC.ImmCLbl mkPicBaseLabel)
blocks' = case blocks of
[] -> []
(b:bs) -> fetchPC b : map maybeFetchPC bs
maybeFetchPC b@(BasicBlock bID _)
| bID `mapMember` info = fetchPC b
| otherwise = b
-- GCC does PIC prologs thusly:
-- bcl 20,31,.L1
-- .L1:
-- mflr 30
-- addis 30,30,.LCTOC1-.L1@ha
-- addi 30,30,.LCTOC1-.L1@l
-- TODO: below we use it over temporary register,
-- it can and should be optimised by picking
-- correct PIC reg.
fetchPC (BasicBlock bID insns) =
BasicBlock bID (PPC.FETCHPC picReg
: PPC.ADDIS picReg picReg (PPC.HA gotOffset)
: PPC.ADDI picReg picReg (PPC.LO gotOffset)
: PPC.MR PPC.r30 picReg
: insns)
return (CmmProc info lab live (ListGraph blocks') : statics)
initializePicBase_ppc ArchPPC OSDarwin picReg
(CmmProc info lab live (ListGraph (entry:blocks)) : statics) -- just one entry because of splitting
= return (CmmProc info lab live (ListGraph (b':blocks)) : statics)
where BasicBlock bID insns = entry
b' = BasicBlock bID (PPC.FETCHPC picReg : insns)
-------------------------------------------------------------------------
-- Load TOC into register 2
-- PowerPC 64-bit ELF ABI 2.0 requires the address of the callee
-- in register 12.
-- We pass the label to FETCHTOC and create a .localentry too.
-- TODO: Explain this better and refer to ABI spec!
{-
We would like to do approximately this, but spill slot allocation
might be added before the first BasicBlock. That violates the ABI.
For now we will emit the prologue code in the pretty printer,
which is also what we do for ELF v1.
initializePicBase_ppc (ArchPPC_64 ELF_V2) OSLinux picReg
(CmmProc info lab live (ListGraph (entry:blocks)) : statics)
= do
bID <-getUniqueM
return (CmmProc info lab live (ListGraph (b':entry:blocks))
: statics)
where BasicBlock entryID _ = entry
b' = BasicBlock bID [PPC.FETCHTOC picReg lab,
PPC.BCC PPC.ALWAYS entryID]
-}
initializePicBase_ppc _ _ _ _
= panic "initializePicBase_ppc: not needed"
-- We cheat a bit here by defining a pseudo-instruction named FETCHGOT
-- which pretty-prints as:
-- call 1f
-- 1: popl %picReg
-- addl __GLOBAL_OFFSET_TABLE__+.-1b, %picReg
-- (See PprMach.hs)
initializePicBase_x86
:: Arch -> OS -> Reg
-> [NatCmmDecl (Alignment, CmmStatics) X86.Instr]
-> NatM [NatCmmDecl (Alignment, CmmStatics) X86.Instr]
initializePicBase_x86 ArchX86 os picReg
(CmmProc info lab live (ListGraph blocks) : statics)
| osElfTarget os
= return (CmmProc info lab live (ListGraph blocks') : statics)
where blocks' = case blocks of
[] -> []
(b:bs) -> fetchGOT b : map maybeFetchGOT bs
-- we want to add a FETCHGOT instruction to the beginning of
-- every block that is an entry point, which corresponds to
-- the blocks that have entries in the info-table mapping.
maybeFetchGOT b@(BasicBlock bID _)
| bID `mapMember` info = fetchGOT b
| otherwise = b
fetchGOT (BasicBlock bID insns) =
BasicBlock bID (X86.FETCHGOT picReg : insns)
initializePicBase_x86 ArchX86 OSDarwin picReg
(CmmProc info lab live (ListGraph (entry:blocks)) : statics)
= return (CmmProc info lab live (ListGraph (block':blocks)) : statics)
where BasicBlock bID insns = entry
block' = BasicBlock bID (X86.FETCHPC picReg : insns)
initializePicBase_x86 _ _ _ _
= panic "initializePicBase_x86: not needed"
|
tjakway/ghcjvm
|
compiler/nativeGen/PIC.hs
|
bsd-3-clause
| 34,688 | 0 | 20 | 10,560 | 4,972 | 2,531 | 2,441 | 437 | 9 |
module PPC.Cond (
Cond(..),
condNegate,
condUnsigned,
condToSigned,
condToUnsigned,
)
where
import GhcPrelude
import Panic
data Cond
= ALWAYS
| EQQ
| GE
| GEU
| GTT
| GU
| LE
| LEU
| LTT
| LU
| NE
deriving Eq
condNegate :: Cond -> Cond
condNegate ALWAYS = panic "condNegate: ALWAYS"
condNegate EQQ = NE
condNegate GE = LTT
condNegate GEU = LU
condNegate GTT = LE
condNegate GU = LEU
condNegate LE = GTT
condNegate LEU = GU
condNegate LTT = GE
condNegate LU = GEU
condNegate NE = EQQ
-- Condition utils
condUnsigned :: Cond -> Bool
condUnsigned GU = True
condUnsigned LU = True
condUnsigned GEU = True
condUnsigned LEU = True
condUnsigned _ = False
condToSigned :: Cond -> Cond
condToSigned GU = GTT
condToSigned LU = LTT
condToSigned GEU = GE
condToSigned LEU = LE
condToSigned x = x
condToUnsigned :: Cond -> Cond
condToUnsigned GTT = GU
condToUnsigned LTT = LU
condToUnsigned GE = GEU
condToUnsigned LE = LEU
condToUnsigned x = x
|
shlevy/ghc
|
compiler/nativeGen/PPC/Cond.hs
|
bsd-3-clause
| 1,135 | 0 | 5 | 372 | 322 | 175 | 147 | 51 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Copyright : (C) 2014 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : PatternSynonyms
--
-- Half-precision floating-point values. These arise commonly in GPU work
-- and it is useful to be able to compute them and compute with them on the
-- CPU as well.
----------------------------------------------------------------------------
module T9857
( Half(..)
, isZero
, fromHalf
, toHalf
, pattern POS_INF
, pattern NEG_INF
, pattern QNaN
, pattern SNaN
, pattern HALF_MIN
, pattern HALF_NRM_MIN
, pattern HALF_MAX
, pattern HALF_EPSILON
, pattern HALF_DIG
, pattern HALF_MIN_10_EXP
, pattern HALF_MAX_10_EXP
) where
import Data.Bits
import Data.Function (on)
import Data.Typeable
import Foreign.C.Types
import Foreign.Storable
import Text.Read
-- | Convert a 'Float' to a 'Half' with proper rounding, while preserving NaN and dealing appropriately with infinity
foreign import ccall unsafe "hs_floatToHalf" toHalf :: Float -> Half
{-# RULES "toHalf" realToFrac = toHalf #-}
-- | Convert a 'Half' to a 'Float' while preserving NaN
foreign import ccall unsafe "hs_halfToFloat" fromHalf :: Half -> Float
{-# RULES "fromHalf" realToFrac = fromHalf #-}
newtype {-# CTYPE "unsigned short" #-} Half = Half { getHalf :: CUShort } deriving (Storable, Typeable)
instance Show Half where
showsPrec d h = showsPrec d (fromHalf h)
instance Read Half where
readPrec = fmap toHalf readPrec
instance Eq Half where
(==) = (==) `on` fromHalf
instance Ord Half where
compare = compare `on` fromHalf
instance Real Half where
toRational = toRational . fromHalf
instance Fractional Half where
fromRational = toHalf . fromRational
recip = toHalf . recip . fromHalf
a / b = toHalf $ fromHalf a / fromHalf b
instance RealFrac Half where
properFraction a = case properFraction (fromHalf a) of
(b, c) -> (b, toHalf c)
truncate = truncate . fromHalf
round = round . fromHalf
ceiling = ceiling . fromHalf
floor = floor . fromHalf
instance Floating Half where
pi = toHalf pi
exp = toHalf . exp . fromHalf
sqrt = toHalf . sqrt . fromHalf
log = toHalf . log . fromHalf
a ** b = toHalf $ fromHalf a ** fromHalf b
logBase a b = toHalf $ logBase (fromHalf a) (fromHalf b)
sin = toHalf . sin . fromHalf
tan = toHalf . tan . fromHalf
cos = toHalf . cos . fromHalf
asin = toHalf . asin . fromHalf
atan = toHalf . atan . fromHalf
acos = toHalf . acos . fromHalf
sinh = toHalf . sinh . fromHalf
tanh = toHalf . tanh . fromHalf
cosh = toHalf . cosh . fromHalf
asinh = toHalf . asinh . fromHalf
atanh = toHalf . atanh . fromHalf
acosh = toHalf . acosh . fromHalf
instance RealFloat Half where
floatRadix _ = 2
floatDigits _ = 11
decodeFloat = decodeFloat . fromHalf
isInfinite (Half h) = unsafeShiftR h 10 .&. 0x1f >= 32
isIEEE _ = isIEEE (undefined :: Float)
atan2 a b = toHalf $ atan2 (fromHalf a) (fromHalf b)
isDenormalized (Half h) = unsafeShiftR h 10 .&. 0x1f == 0 && h .&. 0x3ff /= 0
isNaN (Half h) = unsafeShiftR h 10 .&. 0x1f == 0x1f && h .&. 0x3ff /= 0
isNegativeZero (Half h) = h == 0x8000
floatRange _ = (16,-13)
encodeFloat i j = toHalf $ encodeFloat i j
exponent = exponent . fromHalf
significand = toHalf . significand . fromHalf
scaleFloat n = toHalf . scaleFloat n . fromHalf
-- | Is this 'Half' equal to 0?
isZero :: Half -> Bool
isZero (Half h) = h .&. 0x7fff == 0
-- | Positive infinity
pattern POS_INF = Half 0x7c00
-- | Negative infinity
pattern NEG_INF = Half 0xfc00
-- | Quiet NaN
pattern QNaN = Half 0x7fff
-- | Signalling NaN
pattern SNaN = Half 0x7dff
-- | Smallest positive half
pattern HALF_MIN = 5.96046448e-08 :: Half
-- | Smallest positive normalized half
pattern HALF_NRM_MIN = 6.10351562e-05 :: Half
-- | Largest positive half
pattern HALF_MAX = 65504.0 :: Half
-- | Smallest positive e for which half (1.0 + e) != half (1.0)
pattern HALF_EPSILON = 0.00097656 :: Half
-- | Number of base 10 digits that can be represented without change
pattern HALF_DIG = 2
-- Minimum positive integer such that 10 raised to that power is a normalized half
pattern HALF_MIN_10_EXP = -4
-- Maximum positive integer such that 10 raised to that power is a normalized half
pattern HALF_MAX_10_EXP = 4
instance Num Half where
a * b = toHalf (fromHalf a * fromHalf b)
a - b = toHalf (fromHalf a - fromHalf b)
a + b = toHalf (fromHalf a + fromHalf b)
negate (Half a) = Half (xor 0x8000 a)
abs = toHalf . abs . fromHalf
signum = toHalf . signum . fromHalf
fromInteger a = toHalf (fromInteger a)
|
urbanslug/ghc
|
testsuite/tests/patsyn/should_compile/T9857.hs
|
bsd-3-clause
| 4,934 | 2 | 11 | 1,014 | 1,282 | 692 | 590 | 108 | 1 |
-- demonstrates a bug in mulIntMayOflo in GHC 6.5 on 64-bit arches
-- (trac #867).
-- It thought it could represent 3049800625 * 3049800625 in an I#.
i :: Integer
i = 3049800625
main :: IO ()
main = print (i * i)
|
hferreiro/replay
|
testsuite/tests/numeric/should_run/arith019.hs
|
bsd-3-clause
| 216 | 1 | 7 | 46 | 48 | 23 | 25 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Clingo
import Control.Monad.Except
import Data.List.NonEmpty (NonEmpty)
import Data.Text (Text)
type SymbolInjectionM
= Location -> Text -> [PureSymbol] -> ExceptT Text IO [PureSymbol]
groundCallback :: SymbolInjectionM
groundCallback _ "empty" _ = pure []
groundCallback _ "gcd" [PureNumber a, PureNumber b] =
pure [PureNumber $ gcd a b]
groundCallback _ _ _ = throwError "function not found"
printModel :: Model s -> IOSym s ()
printModel m = do
syms <-
map prettySymbol <$> modelSymbols m (selectNone {selectShown = True})
liftIO (putStr "Model: " >> print syms)
main :: IO ()
main =
withDefaultClingo $ do
addProgram
"base"
[]
"p(210,213). p(1365,385). gcd(X,Y,@gcd(X,Y)) :- p(X,Y). foo :- a(@empty(3))."
ground
[Part "base" []]
(Just $ \l t s -> runExceptT (groundCallback l t s))
withSolver [] (withModel printModel)
|
tsahyt/clingo-haskell
|
examples/GroundCallback.hs
|
mit
| 1,002 | 0 | 14 | 251 | 312 | 158 | 154 | 29 | 1 |
module Main where -- Cabal won't allow Spec module, it needs Main to start tests
import Data.List
import Data.Char
import Test.Hspec
import Test.QuickCheck
import Preprocessing (preprocessEmail, vectorizeMail, commonWords)
createMail :: [Char] -> [Char]
createMail s = "Subject: " ++ s ++ " Thanks."
allDifferent :: (Eq a) => [a] -> Bool
allDifferent [] = True
allDifferent (x:xs) = x `notElem` xs && allDifferent xs
isSorted :: Ord a => [a] -> Bool
isSorted [] = True
isSorted xs = foldr (\a f b -> (a >= b) && f a) (const True) (tail xs) (head xs)
main :: IO ()
main = hspec $ do
describe "preprocessEmail" $ do
it "removes the first word" $ do
preprocessEmail "Subject:" `shouldBe` []
preprocessEmail "Subject: test" `shouldBe` ["test"]
it "lowercases all words" $ do
preprocessEmail "Subject: Johny Bravo and his girl"
`shouldBe` ["johny", "bravo", "and", "his", "girl"]
it "removes numbers" $ do
preprocessEmail "Subject: 1234" `shouldBe` []
preprocessEmail "Subject: 1234 135 123" `shouldBe` []
preprocessEmail "Subject: 1234 johny 135 bravo 123"
`shouldBe` ["johny", "bravo"]
it "removes punctuation marks" $ do
preprocessEmail "Subject: wow, johny bravo!"
`shouldBe` ["wow", "johny", "bravo"]
preprocessEmail "Subject: 1? That's not enough."
`shouldBe` ["thats", "not", "enough"]
it "removes short words (length <=2)" $ do
preprocessEmail "Subject: a bc" `shouldBe` []
preprocessEmail "Subject: johny is an important man"
`shouldBe` ["johny", "important", "man"]
it "returns [] on empty string or string with whitespace only" $ do
preprocessEmail "" `shouldBe` []
preprocessEmail " " `shouldBe` []
it "works like tail on already preprocessed" $
property $
\mail -> (preprocessEmail . unwords . preprocessEmail $
(createMail $ mail :: String)
) == (tail . preprocessEmail $ createMail mail)
it "returns lowercased words" $
property $
\mail -> all (\word -> all isLower word) $
preprocessEmail $ show (createMail $ mail :: String)
it "returns words longer than 2" $
property $
\mail -> all (\word -> length word > 2) $
preprocessEmail $ show (createMail $ mail :: String)
describe "vectorizeMail" $ do
-- example dictionary used for vectorization
let dict = [(1, "johny"), (2, "bravo"), (3, "really"), (4, "handsome")]
let dictLength = length dict
it "preprocesses mail and vectorizes it to a double array" $ do
vectorizeMail dict "Subject: johny bravo is really handsome"
`shouldBe` [1.0, 1.0, 1.0, 1.0]
vectorizeMail dict "Subject: james delta is quite ugly"
`shouldBe` [0.0, 0.0, 0.0, 0.0]
vectorizeMail dict "Subject: james bravo is quite handsome"
`shouldBe` [0.0, 1.0, 0.0, 1.0]
vectorizeMail dict "Subject: johny delta is really ugly"
`shouldBe` [1.0, 0.0, 1.0, 0.0]
it "returns double array of length equal to dictionary's length" $
property $
\mail -> (length $
vectorizeMail dict (createMail $ mail :: String)
) == dictLength
it "returns double array which contains 1.0 and 0.0 only" $
property $
\mail -> all (\d -> d == 1.0 || d == 0.0) $
vectorizeMail dict (createMail $ mail :: String)
describe "commonWords" $ do
it "removes rare words (occurences < 10)" $ do
let words = replicate 4 "is" ++ ["bravo"] ++ replicate 7 "johny"
commonWords words `shouldBe` []
it "does not return duplicates" $ do
let words = replicate 20 "a"
commonWords words `shouldBe` ["a"]
it "returns unique set of words" $ do
property $
\words -> allDifferent $ commonWords (words :: [String])
it "returns sorted set of words" $ do
property $
\words -> isSorted $ commonWords (words :: [String])
|
apisarek/malus
|
test/Spec.hs
|
mit
| 4,431 | 0 | 19 | 1,501 | 1,158 | 600 | 558 | 87 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
-- InfRuleShare.hs ---
--
-- Filename: InfRuleShare.hs
-- Description:
-- Author: Manuel Schneckenreither
-- Maintainer:
-- Created: Sun Sep 14 17:35:09 2014 (+0200)
-- Version:
-- Package-Requires: ()
-- Last-Updated: Mon Oct 9 16:16:08 2017 (+0200)
-- By: Manuel Schneckenreither
-- Update #: 482
-- URL:
-- Doc URL:
-- Keywords:
-- Compatibility:
--
--
-- Commentary:
--
--
--
--
-- Change Log:
--
--
--
--
--
-- Code:
{-# LANGUAGE CPP #-}
#define DEBUG
-- | TODO: comment this module
module Data.Rewriting.ARA.ByInferenceRules.InferenceRules.InfRuleShare
( share
)
where
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerCondition
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerCost
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerDatatype
import Data.Rewriting.ARA.ByInferenceRules.AnalyzerSignature
import Data.Rewriting.ARA.ByInferenceRules.CmdLineArguments
import Data.Rewriting.ARA.ByInferenceRules.HelperFunctions
import Data.Rewriting.ARA.ByInferenceRules.InferenceRules.InfRuleMisc
import Data.Rewriting.ARA.ByInferenceRules.Operator
import Data.Rewriting.ARA.ByInferenceRules.Prove
import Data.Rewriting.ARA.ByInferenceRules.TypeSignatures
import Data.Rewriting.ARA.Constants
import Data.Rewriting.ARA.Exception
import Data.Rewriting.Typed.Datatype
import Data.Rewriting.Typed.Problem
import qualified Data.Rewriting.Typed.Term as T
import Control.Arrow
import Control.Exception (throw)
import Data.Function (on)
import Data.List (delete,
find,
foldl',
group,
groupBy,
sort,
sortBy,
(\\))
import Data.Maybe (fromMaybe,
isJust)
import Data.Ord (compare)
import Text.PrettyPrint
#ifdef DEBUG
import Debug.Trace (trace)
#endif
share :: forall f v dt . (Eq v, Eq dt, Read v, Ord v, Show v, Show dt, Show f) =>
ArgumentOptions
-> (ProblemSig f v f dt dt f, CfSigs dt f, ASigs dt f, Int,
ACondition f v Int Int, InfTreeNode f v dt)
-> [(ProblemSig f v f dt dt f, CfSigs dt f, ASigs dt f, Int,
ACondition f v Int Int, [InfTreeNode f v dt])]
share args (prob, cfsigs, asigs, nr, conds, InfTreeNode pre cst (Just (Fun f fc, dt))
i@(_,_,isCtrDeriv,_,_,_) his) =
-- trace ("share")
-- trace ("pre:" ++ show groupedPre)
-- trace ("post:" ++ show groupedPostVars)
-- trace ("pre':" ++ show pre')
-- trace ("pre'':" ++ show pre'')
-- trace ("zipped: " ++ show (zip groupedPre pre'))
-- trace ("shareConds:" ++ show shareConds)
-- trace ("post':" ++ show post')
-- trace ("nr':" ++ show nr')
-- trace ("help: " ++ show (pretty (InfTreeNode pre cst (Just (Fun f fc, dt)) fn [])))
-- trace ("\n\nfunctionName: " ++ show f)
-- -- trace ("varPostGroups: " ++ show varPostGroups)
-- -- trace ("any ((>1) . length) varPostGroups: " ++ show (any ((>1) . length) varPostGroups))
-- trace ("before: " ++ show (InfTreeNode pre cst (Just $ (Fun f fc, dt)) fn []))
-- trace ("after: " ++ show (InfTreeNode (concat pre') cst (Just (post', dt)) fn []))
-- trace ("varsPost: " ++ show varsPost)
-- trace ("varsToReplace: " ++ show varsToReplace)
-- trace ("subs: " ++ show subs)
-- trace ("groupedPre: " ++ show groupedPre )
-- trace ("groupedPostVars: " ++ show groupedPostVars)
-- trace ("SharePre: " ++ show pre)
-- trace ("SharePre'': " ++ show pre'')
-- trace ("share..:" ++ show ((zip groupedPre groupedPostVars)))
-- trace ("varsPost: " ++ show groupedPostVars)
-- trace ("varspre: " ++ show groupedPre)
[ (prob, cfsigs, asigs, nr', conds', [InfTreeNode pre'' cst (Just (post', dt)) i his'])
| any ((>1) . length) varPostGroups
]
where varsPost :: [v]
varsPost = map (\(Var x) -> x) (concatMap getTermVars fc)
varPostGroups = group $ sort varsPost
pre'' = concat pre' ++ filter ((`notElem` varsPost) . fst) pre
groupedPre :: [(v, ADatatype dt Int)]
groupedPre =
-- groupBy ((==) `on` fst) $
sortBy (compare `on` fst) $ -- grouped pre vars-
filter ((`elem` varsPost) . fst) pre
groupedPostVars :: [[v]]
groupedPostVars = group (sort varsPost)
hisNr | null his = 0
| otherwise = fst3 (last his) + 1
his' = his ++ [(hisNr, "share",
InfTreeNodeView
(map (show *** toADatatypeVectorString) (concat pre'))
(map toACostConditionVector cst)
(T.map show show post', toADatatypeVectorString dt))]
conds' = conds { shareConditions = shareConditions conds ++ shareConds }
geq | isJust (lowerboundArg args) || lowerbound args = Leq
| otherwise = Geq
shareConds = foldl shareConds' [] (zip groupedPre pre')
shareConds' acc ((pre,_), [(post,_)])
| pre == post = acc
| otherwise = error "BUG IN SHARE RULE"
shareConds' acc (preDt, postDts) =
acc ++ [(removeDt (snd preDt)
,if isCtrDeriv then Eq else geq
,map (removeDt . snd) postDts)]
origPreOrd ((a,_),_) =
snd $
fromMaybe (error "should not happen")
(find ((== a) . fst . fst) (zip pre [0..]))
(pre', nr') = -- first (sortBy (compare `on` fst . head)) $
-- trace ("(zip groupedPre groupedPostVars): " ++ show (zip groupedPre groupedPostVars)) $
foldl createPre' ([], nr) $
-- sortBy (compare `on` fst) -- revert original order
(zip groupedPre groupedPostVars)
createPre' :: ([[(v, ADatatype dt Int)]], Int)
-> ((v, ADatatype dt Int), [v])
-> ([[(v, ADatatype dt Int)]], Int)
createPre' (p', nrTmp) (pres, [_]) = (p' ++ [[pres]], nrTmp)
createPre' (p', nrTmp) (pres, posts) = (p' ++ [p''], nrTmp')
where (p'', nrTmp') = foldl fun ([], nrTmp) posts
fun :: ([(v, ADatatype dt Int)], Int) -> t -> ([(v, ADatatype dt Int)], Int)
fun (pres', nr'') _ =
(pres' ++ [(read (show varName), SigRefVar dtVar varName)], nr''+1)
where varName = varPrefix ++ show nr''
dtVar = actCostDt $ fetchSigValue asigs cfsigs (toADatatypeVector $ snd pres)
actCostDt (ActualCost _ dt' _) = dt'
actCostDt (SigRefVar dt' _) = dt'
actCostDt _ = error "should not be possible"
subs =
concat $
zipWith (\a b -> if length b == 1
then []
else [(fst a, map fst b)]
) (filter ((`elem` varsPost) . fst) pre) pre'
post' :: Term f v
post' = Fun f (snd $ foldl putVarsIntoTerm (subs, []) fc)
putVarsIntoTerm :: ([(v, [v])], [Term f v])
-> Term f v
-> ([(v, [v])], [Term f v])
putVarsIntoTerm (vs, acc) (Var v) =
case find (\x -> v == fst x) vs of
Nothing -> (vs, acc ++ [Var v])
Just (x,ls) -> if length ls == 1
then (delete (x,ls) vs, acc ++ [Var (head ls)])
else ((x, tail ls) : delete (x,ls) vs, acc ++ [Var (head ls)])
putVarsIntoTerm (vs, acc) (Fun f ch) = (vs', acc ++ [Fun f ch'])
where (vs', ch') = foldl putVarsIntoTerm (vs, []) ch
share _ _ = []
--
-- InfRuleShare.hs ends here
|
ComputationWithBoundedResources/ara-inference
|
src/Data/Rewriting/ARA/ByInferenceRules/InferenceRules/InfRuleShare.hs
|
mit
| 8,713 | 0 | 16 | 3,367 | 2,031 | 1,176 | 855 | 114 | 10 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
{-# OPTIONS_GHC -Wall #-}
module Exercises where
import Data.Char
import Optics.Core
import Optics.TH
{-
Here are some example data types from the slides, and some test data to
experiment with.
-}
data Person = MkPerson
{ _personName :: String
, _personAge :: Int
, _personPets :: [Pet]
}
deriving (Show)
data Pet = MkPet
{ _petName :: String
, _petAge :: Int
}
deriving (Show)
alice, bob :: Person
alice = MkPerson {_personName = "Alice", _personAge = 65, _personPets = []}
bob =
MkPerson
{ _personName = "Bob"
, _personAge = 42
, _personPets =
[ MkPet {_petName = "Mr Scruffy", _petAge = 3}
, MkPet {_petName = "Mr Scruffy 2", _petAge = 4}
]
}
$(makeLenses ''Person)
$(makeLenses ''Pet)
{-
The 'ages' fold extracts all the ages containined within a 'Person'.
* Define a function that tests whether any of the ages exceed the given
integer:
anyOlderThan :: Int -> Person -> Bool
(Avoid producing an intermediate list. The 'Optics.Fold' module has many
useful eliminators for folds.)
* Define the same function using 'has', which tests whether a fold returns any
values. Hint: check out 'filtered :: (a -> Bool) -> AffineFold a a'.
-}
ages :: Fold Person Int
ages = personAge `summing` (personPets % folded % petAge)
anyOlderThan :: Int -> Person -> Bool
anyOlderThan v = anyOf ages (> v)
{-
* Define a `Fold Person Pet` that visits every Pet whose age is greater than
the given integer:
petsOlderThan :: Int -> Fold Person Pet
* Use it to define a function that returns the list of names of such pets.
petNamesOlderThan :: Int -> Person -> [String]
* Why is 'filtered' a fold rather than a traversal? Hint: read the Haddocks
for 'unsafeFiltered'.
-}
sorry :: String -> a
sorry = error
petsOlderThan :: Int -> Fold Person Pet
petsOlderThan v = personPets % folded % filtered ((> v) . view petAge)
petNamesOlderThan :: Int -> Person -> [String]
petNamesOlderThan v = toListOf (petsOlderThan v % petName)
{-
* Define a traversal that visits the name of a person and all the names of
their pets:
names :: Traversal' Person String
* Use it to implement a function that capitalises all the names (using
'Data.Char.toUpper'):
capitaliseNames :: Person -> Person
-}
names :: Traversal' Person String
names = personName `adjoin` (personPets % traversed % petName)
capitaliseNames :: Person -> Person
capitaliseNames = over names (fmap toUpper)
{-
* Define a function that takes a list of 'Person's, each for each name that
each of them contain (including pets' names), print out the name and read a
replacement value from standard input:
replaceNames :: [Person] -> IO [Person]
For example:
ghci> replaceNames [alice,bob]
Replacement for Alice: Charlie
Replacement for Bob: Bob
Replacement for Mr Scruffy: Tiddles
[MkPerson {_personName = "Charlie", _personAge = 65, _personPets = []},MkPerson {_personName = "Bob", _personAge = 42, _personPets = [MkPet {_petName = "Tiddles", _petAge = 3}]}]
-}
replaceNames :: [Person] -> IO [Person]
replaceNames = undefined
{-
Here is a datatype of binary trees with labels at the nodes and leaves.
* Define a traversal of the leaf labels:
leaves :: Traversal (Tree a b) (Tree a' b) a a'
Hint: use 'traversalVL'.
* Test your traversal by using it to 'print' out the values stored in
'treeExample'. Try 'traverseOf' and 'traverseOf_'. How and why do they
differ?
* Define pre-order, in-order and post-order traversals of the node
labels:
preorder, inorder, postorder :: Traversal (Tree a b) (Tree a b') b b'
(A pre-order traversal visits the node label first, then the left subtree,
then the right subtree. An in-order traversal visits the node label between
the subtrees. A post-order traversal visits the node label after the
subtrees.)
* Define a function
attachIndices :: Num e => Traversal s t a (e, a) -> s -> t
that traverses the structure and annotates each value with its index
(i.e. attach 0 to the first value visited by the traversal, 1 to the next,
and so on).
Compare the results of using 'attachIndices' with 'preorder', 'inorder' and
'postorder' on 'treeExample'.
-}
data Tree a b
= Leaf a
| Node (Tree a b) b (Tree a b)
deriving (Show)
treeExample :: Tree Int Char
treeExample =
Node
(Node
(Node
(Leaf 3)
'c'
(Leaf 2))
'a'
(Leaf 4))
'b'
(Node
(Leaf 1)
'd'
(Leaf 2))
leaves :: Traversal (Tree a b) (Tree a' b) a a'
leaves = undefined
preorder, inorder, postorder :: Traversal (Tree a b) (Tree a b') b b'
preorder = undefined
inorder = undefined
postorder = undefined
attachIndices :: Num e => Traversal s t a (e, a) -> s -> t
attachIndices = undefined
{-
Here's a datatype of rose trees, where each node has a label and zero or more
children.
* Define a (pre-order) traversal of the labels in a rose tree:
rtreeLabels :: Traversal (RTree a) (RTree b) a b
* Use 'attachIndices' defined above to annotate the labels of 'rtreeExample'
with their indices.
* Define a non-type changing traversal of the labels without using
'traversalVL':
rtreeLabels' :: Traversal' (RTree a) a
Instead, use 'adjoin' to combine traversals built from the fields (you will
also need 'traversed'). Why can't this produce a type-changing traversal?
-}
data RTree a = MkRTree
{ _rtreeLabel :: a
, _rtreeChildren :: [RTree a]
}
deriving (Show)
$(makeLenses ''RTree)
rtreeExample :: RTree Char
rtreeExample = MkRTree 'a' [MkRTree 'b' [], MkRTree 'c' [MkRTree 'd' []], MkRTree 'e' []]
rtreeLabels :: Traversal (RTree a) (RTree b) a b
rtreeLabels = undefined
rtreeLabels' :: Traversal' (RTree a) a
rtreeLabels' = undefined
{-
* Define a "lens" focused on the value stored in 'Dubious', that uses the 'Int'
field to count the number of times the structure has been accessed.
* Define a type-preserving "traversal" that visits the value stored in
'Duplicated' twice. Can you define such a type-modifying traversal?
* How does this violate the 'Lens' and 'Traversal' laws? Does it matter?
-}
data Dubious a = MkDubious Int a
deriving (Show)
dubiousLens :: Lens (Dubious a) (Dubious b) a b
dubiousLens = undefined
data Duplicated a = MkDuplicated a
deriving (Show)
traverseDuplicated :: Traversal' (Duplicated a) a
traverseDuplicated = undefined
{-
* The following table is scrambled. Unscramble it, then compare the type
signatures of the class methods and the corresponding eliminators. What do
you notice?
|-------------|----------|-----------|------------|
| Class | Method | Optic | Eliminator |
|-------------|----------|-----------|------------|
| Functor | foldMap | Traversal | foldMapOf |
| Foldable | traverse | Setter | over |
| Traversable | fmap | Fold | traverseOf |
|-------------|----------|-----------|------------|
-}
|
Javran/misc
|
optics-zurihac-2021/exercises/Exercises.hs
|
mit
| 7,086 | 0 | 11 | 1,577 | 945 | 524 | 421 | 89 | 1 |
{-# htermination elemIndex :: Int -> [Int] -> Maybe Int #-}
import List
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/List_elemIndex_5.hs
|
mit
| 72 | 0 | 3 | 13 | 5 | 3 | 2 | 1 | 0 |
module Composition where
import Data.Vect
import Utils
ruleOfThirds :: Vec2 -> Vec2 -> [Vec2]
ruleOfThirds min max = [ Vec2 x y | x <- xs, y <- ys]
where xs = map (\x -> lerp (_1 min) (_1 max) x) [0, 1/3, 2/3, 3/3]
ys = map (\y -> lerp (_2 min) (_2 max) y) [0, 1/3, 2/3, 3/3]
toPolar :: (RealFloat a, Floating a) => a -> a -> (a,a)
toPolar x y = (x',y')
where x' = r * (cos phi)
y' = r * (sin phi)
r = sqrt (x ^ 2 + y ^ 2)
phi = atan2 y x
fromPolar :: (RealFloat a) => a -> a -> (a,a)
fromPolar r t = (x,y)
where x = r * (cos t)
y = r * (sin t)
poissonDistrib :: [Vec2]
poissonDistrib = undefined
|
Vetii/Haskell-Art
|
Composition.hs
|
mit
| 669 | 0 | 12 | 211 | 390 | 213 | 177 | 19 | 1 |
module Main where
import Lib
main :: IO ()
main = do
let q = "qdq-gi.q-a ziatmxxitmdqibtqi-ustbi ri.qmoqrcxi.qbubu zir -ibtqi-qp-qaai ripmymsqkir -ibtqi-qy dmxi ri.cnxuoi rruoumxakir -ibtqiqzmobyqzbkii-q.qmxi -imyqzpyqzbi rixmeaki -puzmzoqai -i-qscxmbu zaimzpir -i btq-iymbbq-a;iz -iatmxximzgi.q-a zinqiuzimzgiemgipuao-uyuzmbqpimsmuzabir -ia. za -uzsiacotiimi.qbubu zj"
print $ head $ solve q "person"
|
pogin503/vbautil
|
language/haskell/caesar/app/Main.hs
|
mit
| 408 | 0 | 9 | 52 | 47 | 24 | 23 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Aeson
import Data.String
import Data.Time (getCurrentTime)
import System.Directory
import System.Environment
import System.Exit
import System.IO
import System.IO.Error
import System.Process
import Pond.Data
import qualified Data.ByteString.Lazy.Char8 as LS
import qualified Data.Map as Map
data State = ParseAny | ParseTag | ParseDesc
-- | parseArgs parses the command-line arguments, creating a new 'Ripple'.
parseArgs :: [String] -> Ripple
parseArgs argv = parseArg ParseAny (Ripple "" Nothing []) argv
parseArg :: State -> Ripple -> [String] -> Ripple
parseArg ParseAny r ("-r":xs) = parseArg ParseTag r xs
parseArg ParseAny r (s:xs) = parseArg ParseAny (r { summary = s }) xs
parseArg ParseTag r (s:xs) = parseArg ParseAny (r { reflections = (reflections r) ++ [(fromString s :: Reflection)] }) xs
parseArg _ r _ = r
-- | parseEditorLines parses input received from an editor, creating a new 'Ripple'.
parseEditorLines :: [String] -> Ripple
parseEditorLines lns = parseEditorLine ParseAny (Ripple "" Nothing []) lns
parseEditorLine :: State -> Ripple -> [String] -> Ripple
parseEditorLine ParseAny r (s:xs) = parseEditorLine ParseDesc (r { summary = s }) xs
parseEditorLine ParseDesc r ("":xs) = parseEditorLine ParseDesc r xs
parseEditorLine ParseDesc r (("#"):xs) = parseEditorLine ParseDesc r xs
parseEditorLine ParseDesc r (('#':_):xs) = parseEditorLine ParseDesc r xs
parseEditorLine ParseDesc r (('R':'e':'f':'l':'e':'c':'t':'i':'o':'n':':':s):xs) =
parseEditorLine ParseTag r ([s] ++ xs)
parseEditorLine ParseDesc r (s:xs) =
case (description r) of
Just desc -> parseEditorLine ParseDesc (r { description = Just (desc ++ "\n" ++ s) }) xs
_ -> parseEditorLine ParseDesc (r { description = Just s }) xs
parseEditorLine ParseTag r (s:xs) =
parseEditorLine ParseDesc (r { reflections = (reflections r) ++ [(fromString s :: Reflection)] }) xs
parseEditorLine _ r _ = r
usageText :: String
usageText = "Usage: pond <command> [arg, ...]"
helpText :: String
helpText = unlines
[ ""
, "Commands:"
, "\tlist\t\tList ripples"
, "\tadd\t\tAdd a new ripple"
, "\tadd [-r reflection [-r ...]] [summary]"
, "\tedit\t\tEdit an existing ripple"
, "\tedit <id>"
, "\tshow\t\tPrint the contents of a ripple"
, "\tshow <id>"
, "\thelp\t\tShow help text"
, ""
, "pond is a utility for tracking and managing tasks and other lists. Each"
, "pond contains a collection of items called ripples. Each ripple must have"
, "a summary and may be tagged and grouped using labels called reflections."
, ""
, "Invoking the \"add\" command without arguments will open your"
, "system editor with a git-style format for modifying ripples."
]
templateText :: Ripple -> String
templateText r = unlines
[ (show r) ++
"# Edit the ripple above. Summary is the first line, followed by a blank line"
, "# then an optional long description. Add reflections with \"Reflection: reflection\"."
, "# Lines starting with \"#\" are ignored."
, "#"
, "# Example:"
, "# Clean the rain gutters"
, "#"
, "# The rain gutters at the house are getting pretty nasty. It's about time to"
, "# clean them out."
, "#"
, "# Reflection: chore"
, "# Reflection: weekend"
]
openEditor :: Ripple -> IO Ripple
openEditor ripple = do
tmpdir <- getTemporaryDirectory
(file, tmp) <- openTempFile tmpdir "ripple.txt"
hPutStr tmp (templateText ripple)
hFlush tmp
hClose tmp
(_, Just hout, _, _) <-
createProcess (proc "bash" ["-c", ("$EDITOR " ++ file ++ " && cat " ++ file)]){ std_out = CreatePipe }
contents <- hGetContents hout
return $ parseEditorLines (lines contents)
quietGetContents :: FilePath -> IO String
quietGetContents f = catchIOError (readFile f) (\_ -> return $ "")
readIndex :: FilePath -> IO Pond
readIndex base = do
contents <- quietGetContents (base ++ "/" ++ "index.json")
return $ case decode (LS.pack contents) of
Just p -> p
Nothing -> (Pond Nothing Nothing Map.empty)
readRipple :: FilePath -> FilePath -> IO Ripple
readRipple base sum = do
contents <- quietGetContents (base ++ "/" ++ sum ++ ".json")
return $ case decode (LS.pack contents) of
Just r -> r
Nothing -> (Ripple "" Nothing [])
writeIndex :: Pond -> FilePath -> IO ()
writeIndex pond base = do
let name = (base ++ "/" ++ "index.json")
let tmpName = (name ++ ".tmp")
writeFile tmpName (LS.unpack (encode pond))
catchIOError (removeFile name) (\_ -> return $ ())
renameFile tmpName name
writeRipple :: Ripple -> FilePath -> IO ()
writeRipple ripple base = do
writeFile (base ++ "/" ++ ((checksum ripple) ++ ".json")) (LS.unpack (encode ripple))
printUsageAndExit :: IO ()
printUsageAndExit = do
putStrLn usageText
exitWith (ExitFailure 1)
printHelpAndExit :: IO ()
printHelpAndExit = do
putStrLn usageText
putStr helpText
exitWith ExitSuccess
printShimmer :: Pond -> Shimmer -> IO ()
printShimmer p s = do
putStrLn $ "ID: " ++ (rippleId s) ++ "\nDate: " ++ (show (date s))
case nextM p s of
Just sh -> do
putStrLn ""
printShimmer p sh
Nothing -> return ()
main :: IO ()
main = do
pondDir <- getHomeDirectory >>= (\home -> return $ home ++ "/.pond")
_ <- createDirectoryIfMissing True pondDir
pond <- readIndex pondDir
getArgs >>= (\argv ->
case argv of
"add":xs -> do
let s = parseArgs xs
r <- case s of
(Ripple "" _ _) -> openEditor s
_ -> return $ s
now <- getCurrentTime
writeIndex (with r now pond) pondDir
writeRipple r pondDir
"edit":x:xs -> case searchM pond x of
Just sh -> do
now <- getCurrentTime
r <- readRipple pondDir (rippleId sh) >>= openEditor
writeIndex (replaceWith sh r now pond) pondDir
writeRipple r pondDir
_ -> exitWith (ExitFailure 1)
"show":x:xs -> case searchM pond x of
Just sh -> do
r <- readRipple pondDir (rippleId sh)
print r
_ -> exitWith (ExitFailure 1)
"list":xs -> do
case centerM pond of
Just s -> do
printShimmer pond s
_ -> exitWith (ExitFailure 1)
["help"] -> printHelpAndExit
"help":_ -> printHelpAndExit
unk:_ -> do
putStrLn $ "unknown command: " ++ unk
printUsageAndExit
_ -> printUsageAndExit)
|
tyler-sommer/stormy-pond
|
app/Main.hs
|
mit
| 6,463 | 0 | 22 | 1,493 | 2,081 | 1,053 | 1,028 | 164 | 12 |
{-# LANGUAGE OverloadedStrings #-}
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import Network.API.Dozens
user :: Auth
user = def
{ authUser = "Your User Name"
, authKey = "Your API Key"
}
main :: IO ()
main = withManager tlsManagerSettings $ withDozens user $ do
run getZone >>= print
z <- run createZone { czZoneName = "dummy-dozens.jp" }
rs <- run createRecord
{ crDomain = "dummy-dozens.jp"
, crName = "www"
, crType = A
, crPriority = Just 10
, crBody = "192.168.1.10"
, crTtl = Just 7200
}
run updateRecord
{ urRecordId = recordId $ head rs
, urPriority = Just 50
}
run $ DeleteRecord (recordId $ head rs)
run $ DeleteZone (zoneId . head $ filter (("dummy-dozens.jp" ==) . zoneName) z)
return ()
|
philopon/dozens-hs
|
examples/implicit.hs
|
mit
| 863 | 1 | 15 | 271 | 263 | 135 | 128 | 25 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.Persist.Types.Base where
import qualified Data.Aeson as A
import Control.Exception (Exception)
import Web.PathPieces (PathPiece (..))
import Control.Monad.Trans.Error (Error (..))
import Data.Typeable (Typeable)
import Data.Text (Text, pack)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Text.Encoding.Error (lenientDecode)
import qualified Data.ByteString.Base64 as B64
import qualified Data.Vector as V
import Control.Arrow (second)
import Data.Time (Day, TimeOfDay, UTCTime)
import Data.Int (Int64)
import qualified Data.Text.Read
import Data.ByteString (ByteString, foldl')
import Data.Bits (shiftL, shiftR)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8
import Data.Map (Map)
import qualified Data.HashMap.Strict as HM
import Data.Word (Word32)
import Numeric (showHex, readHex)
#if MIN_VERSION_aeson(0, 7, 0)
import qualified Data.Scientific
#else
import qualified Data.Attoparsec.Number as AN
#endif
-- | A 'Checkmark' should be used as a field type whenever a
-- uniqueness constraint should guarantee that a certain kind of
-- record may appear at most once, but other kinds of records may
-- appear any number of times.
--
-- /NOTE:/ You need to mark any @Checkmark@ fields as @nullable@
-- (see the following example).
--
-- For example, suppose there's a @Location@ entity that
-- represents where a user has lived:
--
-- @
-- Location
-- user UserId
-- name Text
-- current Checkmark nullable
--
-- UniqueLocation user current
-- @
--
-- The @UniqueLocation@ constraint allows any number of
-- 'Inactive' @Location@s to be @current@. However, there may be
-- at most one @current@ @Location@ per user (i.e., either zero
-- or one per user).
--
-- This data type works because of the way that SQL treats
-- @NULL@able fields within uniqueness constraints. The SQL
-- standard says that @NULL@ values should be considered
-- different, so we represent 'Inactive' as SQL @NULL@, thus
-- allowing any number of 'Inactive' records. On the other hand,
-- we represent 'Active' as @TRUE@, so the uniqueness constraint
-- will disallow more than one 'Active' record.
--
-- /Note:/ There may be DBMSs that do not respect the SQL
-- standard's treatment of @NULL@ values on uniqueness
-- constraints, please check if this data type works before
-- relying on it.
--
-- The SQL @BOOLEAN@ type is used because it's the smallest data
-- type available. Note that we never use @FALSE@, just @TRUE@
-- and @NULL@. Provides the same behavior @Maybe ()@ would if
-- @()@ was a valid 'PersistField'.
data Checkmark = Active
-- ^ When used on a uniqueness constraint, there
-- may be at most one 'Active' record.
| Inactive
-- ^ When used on a uniqueness constraint, there
-- may be any number of 'Inactive' records.
deriving (Eq, Ord, Read, Show, Enum, Bounded)
instance PathPiece Checkmark where
toPathPiece = pack . show
fromPathPiece txt =
case reads (T.unpack txt) of
[(a, "")] -> Just a
_ -> Nothing
data IsNullable = Nullable !WhyNullable
| NotNullable
deriving (Eq, Show)
-- | The reason why a field is 'nullable' is very important. A
-- field that is nullable because of a @Maybe@ tag will have its
-- type changed from @A@ to @Maybe A@. OTOH, a field that is
-- nullable because of a @nullable@ tag will remain with the same
-- type.
data WhyNullable = ByMaybeAttr
| ByNullableAttr
deriving (Eq, Show)
data EntityDef = EntityDef
{ entityHaskell :: !HaskellName
, entityDB :: !DBName
, entityId :: !FieldDef
, entityAttrs :: ![Attr]
, entityFields :: ![FieldDef]
, entityUniques :: ![UniqueDef]
, entityForeigns:: ![ForeignDef]
, entityDerives :: ![Text]
, entityExtra :: !(Map Text [ExtraLine])
, entitySum :: !Bool
}
deriving (Show, Eq, Read, Ord)
entityPrimary :: EntityDef -> Maybe CompositeDef
entityPrimary t = case fieldReference (entityId t) of
CompositeRef c -> Just c
_ -> Nothing
entityKeyFields :: EntityDef -> [FieldDef]
entityKeyFields ent = case entityPrimary ent of
Nothing -> [entityId ent]
Just pdef -> compositeFields pdef
keyAndEntityFields :: EntityDef -> [FieldDef]
keyAndEntityFields ent =
case entityPrimary ent of
Nothing -> entityId ent : entityFields ent
Just _ -> entityFields ent
type ExtraLine = [Text]
newtype HaskellName = HaskellName { unHaskellName :: Text }
deriving (Show, Eq, Read, Ord)
newtype DBName = DBName { unDBName :: Text }
deriving (Show, Eq, Read, Ord)
type Attr = Text
data FieldType
= FTTypeCon (Maybe Text) Text
-- ^ Optional module and name.
| FTApp FieldType FieldType
| FTList FieldType
deriving (Show, Eq, Read, Ord)
data FieldDef = FieldDef
{ fieldHaskell :: !HaskellName -- ^ name of the field
, fieldDB :: !DBName
, fieldType :: !FieldType
, fieldSqlType :: !SqlType
, fieldAttrs :: ![Attr] -- ^ user annotations for a field
, fieldStrict :: !Bool -- ^ a strict field in the data type. Default: true
, fieldReference :: !ReferenceDef
}
deriving (Show, Eq, Read, Ord)
-- | There are 3 kinds of references
-- 1) composite (to fields that exist in the record)
-- 2) single field
-- 3) embedded
data ReferenceDef = NoReference
| ForeignRef !HaskellName !FieldType
-- ^ A ForeignRef has a late binding to the EntityDef it references via HaskellName and has the Haskell type of the foreign key in the form of FieldType
| EmbedRef EmbedEntityDef
| CompositeRef CompositeDef
| SelfReference
-- ^ A SelfReference stops an immediate cycle which causes non-termination at compile-time (issue #311).
deriving (Show, Eq, Read, Ord)
-- | An EmbedEntityDef is the same as an EntityDef
-- But it is only used for fieldReference
-- so it only has data needed for embedding
data EmbedEntityDef = EmbedEntityDef
{ embeddedHaskell :: !HaskellName
, embeddedFields :: ![EmbedFieldDef]
} deriving (Show, Eq, Read, Ord)
-- | An EmbedFieldDef is the same as a FieldDef
-- But it is only used for embeddedFields
-- so it only has data needed for embedding
data EmbedFieldDef = EmbedFieldDef
{ emFieldDB :: !DBName
, emFieldEmbed :: Maybe EmbedEntityDef
, emFieldCycle :: Maybe HaskellName
-- ^ 'emFieldEmbed' can create a cycle (issue #311)
-- when a cycle is detected, 'emFieldEmbed' will be Nothing
-- and 'emFieldCycle' will be Just
}
deriving (Show, Eq, Read, Ord)
toEmbedEntityDef :: EntityDef -> EmbedEntityDef
toEmbedEntityDef ent = embDef
where
embDef = EmbedEntityDef
{ embeddedHaskell = entityHaskell ent
, embeddedFields = map toEmbedFieldDef $ entityFields ent
}
toEmbedFieldDef :: FieldDef -> EmbedFieldDef
toEmbedFieldDef field =
EmbedFieldDef { emFieldDB = fieldDB field
, emFieldEmbed = case fieldReference field of
EmbedRef em -> Just em
SelfReference -> Just embDef
_ -> Nothing
, emFieldCycle = case fieldReference field of
SelfReference -> Just $ entityHaskell ent
_ -> Nothing
}
data UniqueDef = UniqueDef
{ uniqueHaskell :: !HaskellName
, uniqueDBName :: !DBName
, uniqueFields :: ![(HaskellName, DBName)]
, uniqueAttrs :: ![Attr]
}
deriving (Show, Eq, Read, Ord)
data CompositeDef = CompositeDef
{ compositeFields :: ![FieldDef]
, compositeAttrs :: ![Attr]
}
deriving (Show, Eq, Read, Ord)
-- | Used instead of FieldDef
-- to generate a smaller amount of code
type ForeignFieldDef = (HaskellName, DBName)
data ForeignDef = ForeignDef
{ foreignRefTableHaskell :: !HaskellName
, foreignRefTableDBName :: !DBName
, foreignConstraintNameHaskell :: !HaskellName
, foreignConstraintNameDBName :: !DBName
, foreignFields :: ![(ForeignFieldDef, ForeignFieldDef)] -- this entity plus the primary entity
, foreignAttrs :: ![Attr]
, foreignNullable :: Bool
}
deriving (Show, Eq, Read, Ord)
data PersistException
= PersistError Text -- ^ Generic Exception
| PersistMarshalError Text
| PersistInvalidField Text
| PersistForeignConstraintUnmet Text
| PersistMongoDBError Text
| PersistMongoDBUnsupported Text
deriving (Show, Typeable)
instance Exception PersistException
instance Error PersistException where
strMsg = PersistError . pack
-- | A raw value which can be stored in any backend and can be marshalled to
-- and from a 'PersistField'.
data PersistValue = PersistText Text
| PersistByteString ByteString
| PersistInt64 Int64
| PersistDouble Double
| PersistRational Rational
| PersistBool Bool
| PersistDay Day
| PersistTimeOfDay TimeOfDay
| PersistUTCTime UTCTime
| PersistNull
| PersistList [PersistValue]
| PersistMap [(Text, PersistValue)]
| PersistObjectId ByteString -- ^ Intended especially for MongoDB backend
| PersistDbSpecific ByteString -- ^ Using 'PersistDbSpecific' allows you to use types specific to a particular backend
-- For example, below is a simple example of the PostGIS geography type:
--
-- @
-- data Geo = Geo ByteString
--
-- instance PersistField Geo where
-- toPersistValue (Geo t) = PersistDbSpecific t
--
-- fromPersistValue (PersistDbSpecific t) = Right $ Geo $ Data.ByteString.concat ["'", t, "'"]
-- fromPersistValue _ = Left "Geo values must be converted from PersistDbSpecific"
--
-- instance PersistFieldSql Geo where
-- sqlType _ = SqlOther "GEOGRAPHY(POINT,4326)"
--
-- toPoint :: Double -> Double -> Geo
-- toPoint lat lon = Geo $ Data.ByteString.concat ["'POINT(", ps $ lon, " ", ps $ lat, ")'"]
-- where ps = Data.Text.pack . show
-- @
--
-- If Foo has a geography field, we can then perform insertions like the following:
--
-- @
-- insert $ Foo (toPoint 44 44)
-- @
--
deriving (Show, Read, Eq, Typeable, Ord)
instance PathPiece PersistValue where
fromPathPiece t =
case Data.Text.Read.signed Data.Text.Read.decimal t of
Right (i, t')
| T.null t' -> Just $ PersistInt64 i
_ -> case reads $ T.unpack t of
[(fks, "")] -> Just $ PersistList fks
_ -> Just $ PersistText t
toPathPiece x =
case fromPersistValueText x of
Left e -> error $ T.unpack e
Right y -> y
fromPersistValueText :: PersistValue -> Either Text Text
fromPersistValueText (PersistText s) = Right s
fromPersistValueText (PersistByteString bs) =
Right $ TE.decodeUtf8With lenientDecode bs
fromPersistValueText (PersistInt64 i) = Right $ T.pack $ show i
fromPersistValueText (PersistDouble d) = Right $ T.pack $ show d
fromPersistValueText (PersistRational r) = Right $ T.pack $ show r
fromPersistValueText (PersistDay d) = Right $ T.pack $ show d
fromPersistValueText (PersistTimeOfDay d) = Right $ T.pack $ show d
fromPersistValueText (PersistUTCTime d) = Right $ T.pack $ show d
fromPersistValueText PersistNull = Left "Unexpected null"
fromPersistValueText (PersistBool b) = Right $ T.pack $ show b
fromPersistValueText (PersistList _) = Left "Cannot convert PersistList to Text"
fromPersistValueText (PersistMap _) = Left "Cannot convert PersistMap to Text"
fromPersistValueText (PersistObjectId _) = Left "Cannot convert PersistObjectId to Text"
fromPersistValueText (PersistDbSpecific _) = Left "Cannot convert PersistDbSpecific to Text"
instance A.ToJSON PersistValue where
toJSON (PersistText t) = A.String $ T.cons 's' t
toJSON (PersistByteString b) = A.String $ T.cons 'b' $ TE.decodeUtf8 $ B64.encode b
toJSON (PersistInt64 i) = A.Number $ fromIntegral i
toJSON (PersistDouble d) = A.Number $
#if MIN_VERSION_aeson(0, 7, 0)
Data.Scientific.fromFloatDigits
#else
AN.D
#endif
d
toJSON (PersistRational r) = A.String $ T.pack $ 'r' : show r
toJSON (PersistBool b) = A.Bool b
toJSON (PersistTimeOfDay t) = A.String $ T.pack $ 't' : show t
toJSON (PersistUTCTime u) = A.String $ T.pack $ 'u' : show u
toJSON (PersistDay d) = A.String $ T.pack $ 'd' : show d
toJSON PersistNull = A.Null
toJSON (PersistList l) = A.Array $ V.fromList $ map A.toJSON l
toJSON (PersistMap m) = A.object $ map (second A.toJSON) m
toJSON (PersistDbSpecific b) = A.String $ T.cons 'p' $ TE.decodeUtf8 $ B64.encode b
toJSON (PersistObjectId o) =
A.toJSON $ showChar 'o' $ showHexLen 8 (bs2i four) $ showHexLen 16 (bs2i eight) ""
where
(four, eight) = BS8.splitAt 4 o
-- taken from crypto-api
bs2i :: ByteString -> Integer
bs2i bs = foldl' (\i b -> (i `shiftL` 8) + fromIntegral b) 0 bs
{-# INLINE bs2i #-}
-- showHex of n padded with leading zeros if necessary to fill d digits
-- taken from Data.BSON
showHexLen :: (Show n, Integral n) => Int -> n -> ShowS
showHexLen d n = showString (replicate (d - sigDigits n) '0') . showHex n where
sigDigits 0 = 1
sigDigits n' = truncate (logBase (16 :: Double) $ fromIntegral n') + 1
instance A.FromJSON PersistValue where
parseJSON (A.String t0) =
case T.uncons t0 of
Nothing -> fail "Null string"
Just ('p', t) -> either (fail "Invalid base64") (return . PersistDbSpecific)
$ B64.decode $ TE.encodeUtf8 t
Just ('s', t) -> return $ PersistText t
Just ('b', t) -> either (fail "Invalid base64") (return . PersistByteString)
$ B64.decode $ TE.encodeUtf8 t
Just ('t', t) -> fmap PersistTimeOfDay $ readMay t
Just ('u', t) -> fmap PersistUTCTime $ readMay t
Just ('d', t) -> fmap PersistDay $ readMay t
Just ('r', t) -> fmap PersistRational $ readMay t
Just ('o', t) -> maybe (fail "Invalid base64") (return . PersistObjectId) $
fmap (i2bs (8 * 12) . fst) $ headMay $ readHex $ T.unpack t
Just (c, _) -> fail $ "Unknown prefix: " ++ [c]
where
headMay [] = Nothing
headMay (x:_) = Just x
readMay :: (Read a, Monad m) => T.Text -> m a
readMay t =
case reads $ T.unpack t of
(x, _):_ -> return x
[] -> fail "Could not read"
-- taken from crypto-api
-- |@i2bs bitLen i@ converts @i@ to a 'ByteString' of @bitLen@ bits (must be a multiple of 8).
i2bs :: Int -> Integer -> BS.ByteString
i2bs l i = BS.unfoldr (\l' -> if l' < 0 then Nothing else Just (fromIntegral (i `shiftR` l'), l' - 8)) (l-8)
{-# INLINE i2bs #-}
#if MIN_VERSION_aeson(0, 7, 0)
parseJSON (A.Number n) = return $
if fromInteger (floor n) == n
then PersistInt64 $ floor n
else PersistDouble $ fromRational $ toRational n
#else
parseJSON (A.Number (AN.I i)) = return $ PersistInt64 $ fromInteger i
parseJSON (A.Number (AN.D d)) = return $ PersistDouble d
#endif
parseJSON (A.Bool b) = return $ PersistBool b
parseJSON A.Null = return $ PersistNull
parseJSON (A.Array a) = fmap PersistList (mapM A.parseJSON $ V.toList a)
parseJSON (A.Object o) =
fmap PersistMap $ mapM go $ HM.toList o
where
go (k, v) = fmap ((,) k) $ A.parseJSON v
-- | A SQL data type. Naming attempts to reflect the underlying Haskell
-- datatypes, eg SqlString instead of SqlVarchar. Different SQL databases may
-- have different translations for these types.
data SqlType = SqlString
| SqlInt32
| SqlInt64
| SqlReal
| SqlNumeric Word32 Word32
| SqlBool
| SqlDay
| SqlTime
| SqlDayTime -- ^ Always uses UTC timezone
| SqlBlob
| SqlOther T.Text -- ^ a backend-specific name
deriving (Show, Read, Eq, Typeable, Ord)
data PersistFilter = Eq | Ne | Gt | Lt | Ge | Le | In | NotIn
| BackendSpecificFilter T.Text
deriving (Read, Show)
data UpdateException = KeyNotFound String
| UpsertError String
deriving Typeable
instance Show UpdateException where
show (KeyNotFound key) = "Key not found during updateGet: " ++ key
show (UpsertError msg) = "Error during upsert: " ++ msg
instance Exception UpdateException
data OnlyUniqueException = OnlyUniqueException String deriving Typeable
instance Show OnlyUniqueException where
show (OnlyUniqueException uniqueMsg) =
"Expected only one unique key, got " ++ uniqueMsg
instance Exception OnlyUniqueException
data PersistUpdate = Assign | Add | Subtract | Multiply | Divide
| BackendSpecificUpdate T.Text
deriving (Read, Show)
|
nakaji-dayo/persistent
|
persistent/Database/Persist/Types/Base.hs
|
mit
| 17,508 | 0 | 18 | 4,670 | 3,848 | 2,096 | 1,752 | 352 | 4 |
import Data.List
import qualified Data.Map as Map
data Link = Link {from :: String, to :: String, cost :: Int}
orderedTuple a b = if a > b then (a, b) else (b, a)
parseFile :: String -> [Link]
parseFile = map parseLine . lines
parseLine :: String -> Link
parseLine str = Link from to (read cost)
where [from, _, to, _, cost] = words str
getAllCities :: [Link] -> [String]
getAllCities = nub . concatMap (\x-> [from x, to x])
createCostIndex :: [Link] -> Map.Map (String, String) Int
createCostIndex = Map.fromList . map getKey
where getKey x = (orderedTuple (from x) (to x), cost x)
pathCost :: Map.Map (String, String) Int -> [String] -> Int
pathCost costIndex xs = sum $ (zipWith cost <*> tail) xs
where cost x y= costIndex Map.! (orderedTuple x y)
compute f file = f $ map (pathCost costIndex) $ permutations cities
where
path = parseFile file
cities = getAllCities path
costIndex = createCostIndex path
main = do
file <- readFile "inputDay9.txt"
print $ compute minimum file
print $ compute maximum file
|
bruno-cadorette/AdventOfCode
|
Day 9/Day9.hs
|
mit
| 1,087 | 0 | 10 | 258 | 454 | 240 | 214 | 25 | 2 |
-- |
-- HJS Haskell Javascript Interpreter
-- (c) Mark Wassell 2007
-- See LICENSE file for license details
module Main(main,parseProgram,parseTest,parseFile) where
import Data.ByteString.Char8(split,pack,unpack,intercalate,readInt)
import Data.List
import qualified Data.Map as M
import Data.Ord
import Data.Char
import System.Console.GetOpt
import Data.Maybe ( fromMaybe )
import Control.Monad.Identity
import Control.Monad.Error
import Control.Monad.State
import System.Directory
import System.Environment
import HJS.Parser
import HJS.Parser.JavaScript
import HJS.Interpreter.InterpMDecl
import HJS.Interpreter.InterpM hiding (getArgs)
import HJS.Interpreter
data VFlag = Quiet | ShowProgress | ShowErrors | ShowAST deriving (Show,Eq)
-- Parse a file with supplied flags
parseFile :: [VFlag] -> String -> IO ()
parseFile flags fname = do
ifM (elem ShowProgress flags) (putStr $ "Parsing \"" ++ fname) (return ())
s <- readFile fname
handleResult flags $ parseProgram s
handleResult flags (Right r) | (elem ShowProgress flags) = putStrLn ("\" ok")
| (elem ShowAST flags) = putStrLn ("\" ok " ++ show r)
| otherwise = return ()
handleResult flags (Left r) | (elem ShowProgress flags) = putStrLn ("\" failed")
| (elem ShowErrors flags) = putStrLn ("\" failed" ++ show r)
| otherwise = return ()
runFile flags fname = do
s <- readFile fname
runString flags s >>= putStrLn . show
runString :: [RunFlag] -> String -> IO Bool
runString flags s = do
case parseProgram s of
Right r -> runProgram flags r
Left s -> putStrLn (show s) >> return False
runTest = testFiles "c:/Mark/MyDevelopments/haskell/HJS/hjs-0.2/testsuite" (runFile [])
main = do
s <- getArgs
main' s
main' args = do
(opt, files) <- interpOpts args
case (foldr (\x s -> case x of Version -> (True || s); _ -> s) False opt) of
True -> putStrLn "HJS - JavaScript Parser - Version 0.1"
_ -> return ()
let vf = foldr (\x s -> case x of (Verbose f) -> (f:s);_ -> s) [] opt
mapM_ (runFile []) files
parseTest = do
-- testFiles "c:/Mark/MyDevelopments/haskell/HJS/hjs-0.2/testsuite" (parseFile [ShowProgress])
testFiles "c:/Mark/MyDevelopments/haskell/HJS/hjs-0.2/testsuite/parsingonly" (parseFile [ShowProgress])
testFiles dir action = do fileList <- getTestFiles dir
mapM_ action fileList
getTestFiles dirName = do
dirList <- getDirectoryContents dirName
return $ map (\f -> dirName ++ "/" ++ f) $
map unpack $
map (Data.ByteString.Char8.intercalate (pack "_")) $
sortBy (comparing (readInt . head)) $
filter (\l -> (<=) 2 (length l)) $
map (split '_' . pack) $
filter (\x -> last x /= '~' || head x == '.' ) dirList
data Flag = Verbose VFlag | Version
deriving Show
options :: [OptDescr Flag]
options =
[ Option ['v'] ["verbose"] (OptArg vflag "LEVEL" ) "=1 show progrss, =2 show errors =3 show AST"
, Option ['V','?'] ["version"] (NoArg Version) "show version number"
]
vflag :: Maybe String -> Flag
vflag Nothing = Verbose Quiet
vflag (Just s) = let s' = filter (\c -> not $ isSpace c) s
in case s' of
"1" -> Verbose ShowProgress
"2" -> Verbose ShowErrors
"3" -> Verbose ShowAST
_ -> error $ "Cannot parse verbosity option " ++ s'
--inp,outp :: Maybe String -> Flag
--outp = Output . fromMaybe "stdout"
--inp = Input . fromMaybe "stdin"
interpOpts :: [String] -> IO ([Flag], [String])
interpOpts argv =
case getOpt Permute options argv of
(o,n,[] ) -> return (o,n)
(_,_,errs) -> ioError (userError (concat errs ++ usageInfo header options))
where header = "Usage: hjs [OPTION...] files..."
ifM a b c = do
case a of
True -> b
False -> c
|
nbrunt/JSHOP
|
res/hjs-0.2.1/src/Main.hs
|
mit
| 4,371 | 15 | 17 | 1,428 | 1,320 | 683 | 637 | 86 | 4 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module Lambda.Derive.Instance where
import Lambda.Type
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data Type = Make
{ from :: Lambda
, to :: Lambda
-- | wenn @Just x@, dann genau @x@ Schritte, sonst egal
, steps :: Maybe Int
}
deriving ( Typeable, Eq, Ord )
$(derives [makeReader, makeToDoc] [''Type])
example :: Type
example = Make
{ from = read "(x -> x x)( x -> x x)"
, to = read "(x -> x x)( x -> x x)"
, steps = Just 2
}
initial :: Lambda -> Type
initial t = Make
{ from = t
, to = t
, steps = Just 0
}
-- local variables:
-- mode: haskell
-- end
|
Erdwolf/autotool-bonn
|
src/Lambda/Derive/Instance.hs
|
gpl-2.0
| 743 | 0 | 9 | 240 | 179 | 105 | 74 | 22 | 1 |
{-# LANGUAGE DeriveDataTypeable, FlexibleContexts, FlexibleInstances,
FunctionalDependencies, GeneralizedNewtypeDeriving,
MultiParamTypeClasses, TypeSynonymInstances #-}
module Yi.Scion where
import Yi.Prelude
import Yi.Core (msgEditor)
import Prelude (lines)
import Bag
import Data.Maybe
import GHC
import HscTypes
import qualified Outputable as O
import Scion
import Scion.Types hiding (gets)
import Scion.Utils
import Outputable
import GHC.SYB.Utils
import PprTyThing (pprTypeForUser)
import FastString (fsLit) -- ghosts
import Yi.Buffer
import Yi.Editor
import Yi.Keymap
import Scion.Inspect ( prettyResult )
import Scion.Inspect.Find ( overlaps, findHsThing, pathToDeepest)
import Scion.Inspect.TypeOf ( typeOf )
loadFile :: String -> ScionM TypecheckedModule
loadFile fn = do
addTarget =<< guessTarget fn Nothing
Scion.load LoadAllTargets
(m:_) <- modulesInDepOrder
typecheckModule =<< parseModule m
functionType :: (Int, Int) -- ^ The line and column of the current point
-> String -- ^ The filename in which the point is positioned
-> ScionM String -- ^ The `ScionM` action resulting in the function's type
functionType pt fn = do
addTarget =<< guessTarget fn Nothing
s <- handleSourceError handleError $ do
mss <- modulesInDepOrder
forM mss $ \m -> do
module' <- loadModule =<< typecheckModule =<< parseModule m
let t = n . fun_matches . unLoc .
last . bagToList . last . bagToList . mapBag m' .
filterBag (\l -> spans (getLoc l) pt) . typecheckedSource
m' = abs_binds . unLoc
n (MatchGroup _ t') = t'
return $ showSDoc $ ppr $ t module'
return . last $ s
-- maybe this can be improved by using "thingsAroundPoint"
play :: (Int, Int) -- ^ The line and column of the current point
-> String -- ^ The filename in which the point is positioned
-> ScionM [String] -- ^ The `ScionM` action resulting in the function's type
play pt fn = do
addTarget =<< guessTarget fn Nothing
handleSourceError handleError $ do
[m] <- modulesInDepOrder
module' <- loadModule =<< typecheckModule =<< parseModule m
lines <$> thingAtPoint pt fn module'
thingsAtPoint :: (Int, Int) -> String -> ScionM String
thingsAtPoint pt fn = do
addTarget =<< guessTarget fn Nothing
Scion.load LoadAllTargets
mss <- modulesInDepOrder
show <$> forM mss (\ms -> do
mod <- typecheckModule =<< parseModule ms
let Just (grp, _, _, _, _) = renamedSource mod
let bnds = typecheckedSource mod
let tyclds = thingsAroundPoint pt (hs_tyclds grp)
let ValBindsOut valds _ = hs_valds grp
return $ showData TypeChecker 2 bnds)
handleError :: SourceError -> ScionM [String]
handleError err = return [show err]
-- This is copied from Protocol.Vim
thingAtPoint :: (TypecheckedMod m) => (Int, Int) -> String -> m -> ScionM String
thingAtPoint (line,col) fname tcm = do
let loc = srcLocSpan $ mkSrcLoc (fsLit fname) line col
--let Just (src, _, _, _, _) = renamedSource tcm
let src = typecheckedSource tcm
--let in_range = const True
let in_range = overlaps loc
let r = findHsThing in_range src
--return (Just (O.showSDoc (O.ppr $ S.toList r)))
unqual <- unqualifiedForModule tcm
case pathToDeepest r of
Nothing -> return ("no info")
Just (x,xs) ->
--return $ Just (O.showSDoc (O.ppr x O.$$ O.ppr xs))
case typeOf (x,xs) of
Just t ->
return $ O.showSDocForUser unqual
(prettyResult x O.<+> O.dcolon O.<+>
pprTypeForUser True t)
_ -> return $ O.showSDocDebug (O.ppr x O.$$ O.ppr xs )
runScionWithLocation :: Show a => ((Int, Int) -> String -> ScionM a) -> YiM a
runScionWithLocation f = do
(pt, fn) <- withEditor $ withBuffer0 $ do
ln <- curLn
col <- curCol
Just fn <- gets file
return ((ln, col), fn)
io $ runScion $ do
-- openCabalProject "." "dist" fails ...
-- loadFile fn
f pt fn
scion :: YiM ()
scion = msgEditor =<< show <$> runScionWithLocation play
|
codemac/yi-editor
|
src/Yi/Scion.hs
|
gpl-2.0
| 4,208 | 0 | 26 | 1,088 | 1,181 | 594 | 587 | 97 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module JSON (tests) where
import Control.Applicative
import Control.Lens hiding ((.=))
import Data.Aeson
import Data.Monoid (mempty)
import MusicBrainz
import MusicBrainz.Data
import MusicBrainz.Data.ArtistCredit
import MusicBrainz.Data.Edit
import MusicBrainz.Data.Editor
import Test.MusicBrainz
import MusicBrainz.API.JSON ()
tests :: [Test]
tests =
[ testGroup "ToJSON Release"
[ testCase "Full release" $ do
editor <- testEditor
r <- autoEdit $ do
ac <- testArtistCredit editor
rg <- fmap coreRef . viewRevision =<< create editor
ReleaseGroupTree
{ releaseGroupData = ReleaseGroup
{ releaseGroupName = "RG"
, releaseGroupComment = ""
, releaseGroupArtistCredit = ac
, releaseGroupPrimaryType = Nothing
, releaseGroupSecondaryTypes = mempty
}
, releaseGroupRelationships = mempty
, releaseGroupAnnotation = ""
}
official <- entityRef <$> add ReleaseStatus { releaseStatusName = "Official" }
digipak <- entityRef <$> add ReleasePackaging { releasePackagingName = "Digipak" }
english <- entityRef <$> add Language
{ languageName = "English"
, languageIsoCode2t = "eng"
, languageIsoCode2b = "eng"
, languageIsoCode1 = "en"
, languageIsoCode3 = "eng"
}
latin <- entityRef <$> add Script
{ scriptName = "Latin"
, scriptIsoNumber = "215"
, scriptIsoCode = "Latn"
}
uk <- entityRef <$> add Country
{ countryIsoCode = "GB"
, countryName = "United Kingdom"
}
return Release { releaseName = "Release"
, releaseComment = "Comment"
, releaseArtistCredit = ac
, releaseReleaseGroup = rg
, releaseDate = (Just 2012, Just 2, Just 15) ^?! partialDate
, releaseCountry = Just uk
, releaseScript = Just latin
, releasePackaging = Just digipak
, releaseStatus = Just official
, releaseBarcode = Just $ "0013964381993" ^?! barcode
, releaseLanguage = Just english
}
toJSON r @?=
object [ "name" .= releaseName r
, "comment" .= releaseComment r
, "artist-credit" .= releaseArtistCredit r
, "release-group" .= releaseReleaseGroup r
, "date" .= releaseDate r
, "country" .= releaseCountry r
, "script" .= releaseScript r
, "packaging" .= releasePackaging r
, "status" .= releaseStatus r
, "barcode" .= releaseBarcode r
, "language" .= releaseLanguage r
]
]
, testCase "ToJSON Track" $ do
editor <- testEditor
track <- autoEdit $ do
ac <- testArtistCredit editor
recording <- fmap coreRef . viewRevision =<< create editor
RecordingTree { recordingData = Recording { recordingName = "R"
, recordingComment = mempty
, recordingArtistCredit = ac
, recordingDuration = Just 12
}
, recordingRelationships = mempty
, recordingAnnotation = mempty
, recordingIsrcs = mempty
, recordingPuids = mempty
}
return Track { trackName = "Track name"
, trackRecording = recording
, trackDuration = Just 12345
, trackArtistCredit = ac
, trackPosition = "4"
}
toJSON track @?=
object [ "name" .= trackName track
, "artist-credit" .= trackArtistCredit track
, "length" .= trackDuration track
, "number" .= trackPosition track
, "recording" .= trackRecording track
]
]
--------------------------------------------------------------------------------
testArtistRef :: Ref Editor -> EditM (Ref Artist)
testArtistRef editor = fmap coreRef . viewRevision =<< create editor
ArtistTree
{ artistData = Artist { artistName = "Artist"
, artistSortName = "Artist"
, artistComment = ""
, artistBeginDate = emptyDate
, artistEndDate = emptyDate
, artistGender = Nothing
, artistCountry = Nothing
, artistType = Nothing
, artistEnded = False
}
, artistRelationships = mempty
, artistAliases = mempty
, artistIpiCodes = mempty
, artistAnnotation = mempty
}
testArtistCredit :: Ref Editor -> EditM (Ref ArtistCredit)
testArtistCredit editor = do
a <- testArtistRef editor
getRef [ ArtistCreditName a "Artist" "" ]
testEditor :: MusicBrainz (Ref Editor)
testEditor = entityRef <$> register
Editor { editorName = "ocharles", editorPassword = "" }
|
metabrainz/musicbrainz-data-service
|
test/suite/JSON.hs
|
gpl-2.0
| 5,716 | 0 | 21 | 2,448 | 1,066 | 584 | 482 | 115 | 1 |
{-# OPTIONS_HADDOCK ignore-exports #-}
module Main (setup, main) where
import Control.Monad
import Graphics.UI.Threepenny.Core
import qualified Graphics.UI.Threepenny.Elements as E
import qualified EXReactive as R
import EXData
main :: IO()
main = startGUI defaultConfig setup
-- We start a server by using the startGUI function.
-- because we didn't put any seetings in defaultConfig, we port from environment 8023, we use address 127.0.0.1
-- Whenever a browser connects to the server, the following function will be executed to start the GUI interaction. It builds the initial HTML page.
setup :: Window -> UI ()
setup window = void $ do
_ <- return window # set title "Excell"
-- we set the title of the HTML documentation
getBody window #+ [E.h1 #+ [string "Reactive Excell"]]
-- separates events for each cell
let
width = 5 :: Int
height = 5 :: Int
cellEventsUI :: [[UI(Event FeedbackValue, Handler FeedbackValue)]]
cellEventsUI = [[liftIO newEvent | _ <- [1..width]] | _<- [1..height]]
cellEvents <- mapM sequence cellEventsUI
-- make display event
displayEvent <- liftIO newEvent :: UI(Event String, Handler String)
let
-- joined event
joinEvent :: Event [FeedbackValue]
joinEvent = unions $ concatMap (map fst) cellEvents
-- coordinates of the cells
coordinates :: [[R.Coordinates]]
coordinates = [[(a,b)| a <- [1..width]] | b <- [1..height]]
outputsUI :: [[UI Element]]
outputsUI = map (map ( \(a,b) -> R.ioCell joinEvent b (snd displayEvent) a)) $ zipWith zip coordinates $ fmap (fmap snd) cellEvents
-- get cell elements from UI monad
outputs <- mapM sequence outputsUI
-- construct html cell distribution
displayEl <- R.displayElement $ fst displayEvent
getBody window #+ [column [ R.makeGrid outputs displayEl]]
-- sign "#+" is combinator that allows us to nest elements quickly in the style of a HTML combination library
-- on the left side of "#+" is the body of the HTML element and we attach to it what is on the right side of "#+"
|
lepoticka/ExcellingCabbage
|
excell.hs
|
gpl-2.0
| 2,067 | 0 | 20 | 430 | 500 | 269 | 231 | 30 | 1 |
{-# LANGUAGE GADTs #-}
-----------------------------------------------------------------------------
-- |
-- Module : Hoodle.Coroutine.Mode
-- Copyright : (c) 2011-2013 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <[email protected]>
-- Stability : experimental
-- Portability : GHC
--
-----------------------------------------------------------------------------
module Hoodle.Coroutine.Mode where
import Control.Applicative
import Control.Lens (view,set,over)
import Control.Monad.State
import qualified Data.IntMap as M
import Graphics.UI.Gtk (adjustmentGetValue)
-- from hoodle-platform
import Data.Hoodle.BBox
import Data.Hoodle.Generic
import Data.Hoodle.Select
import Graphics.Hoodle.Render
import Graphics.Hoodle.Render.Type
-- from this package
import Hoodle.Accessor
import Hoodle.Coroutine.Draw
import Hoodle.Coroutine.Scroll
import Hoodle.GUI.Reflect
import Hoodle.Type.Alias
import Hoodle.Type.Canvas
import Hoodle.Type.Coroutine
import Hoodle.Type.Enum
import Hoodle.Type.Event
import Hoodle.Type.HoodleState
import Hoodle.Type.PageArrangement
import Hoodle.View.Coordinate
--
import Prelude hiding (mapM_, mapM)
modeChange :: UserEvent -> MainCoroutine ()
modeChange command = do
case command of
ToViewAppendMode -> updateXState select2edit >> invalidateAll
-- invalidateAllInBBox Nothing Efficient -- invalidateAll
ToSelectMode -> updateXState edit2select >> invalidateAllInBBox Nothing Efficient -- invalidateAll
_ -> return ()
reflectPenModeUI
reflectPenColorUI
reflectPenWidthUI
where select2edit xst =
either (noaction xst) (whenselect xst) . hoodleModeStateEither . view hoodleModeState $ xst
edit2select xst =
either (whenedit xst) (noaction xst) . hoodleModeStateEither . view hoodleModeState $ xst
noaction :: HoodleState -> a -> MainCoroutine HoodleState
noaction xstate = const (return xstate)
whenselect :: HoodleState -> Hoodle SelectMode -> MainCoroutine HoodleState
whenselect xstate thdl = do
let pages = view gselAll thdl
mselect = view gselSelected thdl
npages <- maybe (return pages)
(\(spgn,spage) -> do
npage <- (liftIO.updatePageBuf.hPage2RPage) spage
return $ M.adjust (const npage) spgn pages )
mselect
let nthdl = set gselAll npages . set gselSelected Nothing $ thdl
return . flip (set hoodleModeState) xstate
. ViewAppendState . gSelect2GHoodle $ nthdl
whenedit :: HoodleState -> Hoodle EditMode -> MainCoroutine HoodleState
whenedit xstate hdl = do
return . flip (set hoodleModeState) xstate
. SelectState
. gHoodle2GSelect $ hdl
-- |
viewModeChange :: UserEvent -> MainCoroutine ()
viewModeChange command = do
case command of
ToSinglePage -> updateXState cont2single >> invalidateAll
ToContSinglePage -> updateXState single2cont >> invalidateAll
_ -> return ()
adjustScrollbarWithGeometryCurrent
where cont2single xst =
unboxBiAct (noaction xst) (whencont xst) . view currentCanvasInfo $ xst
single2cont xst =
unboxBiAct (whensing xst) (noaction xst) . view currentCanvasInfo $ xst
noaction :: HoodleState -> a -> MainCoroutine HoodleState
noaction xstate = const (return xstate)
-------------------------------------
whencont xstate cinfo = do
geometry <- liftIO $ getGeometry4CurrCvs xstate
cdim <- liftIO $ return . canvasDim $ geometry
page <- getCurrentPageCurr
let zmode = view (viewInfo.zoomMode) cinfo
canvas = view drawArea cinfo
cpn = PageNum . view currentPageNum $ cinfo
pdim = PageDimension (view gdimension page)
ViewPortBBox bbox = view (viewInfo.pageArrangement.viewPortBBox) cinfo
(x0,y0) = bbox_upperleft bbox
(xpos,ypos) = maybe (0,0) (unPageCoord.snd) $ desktop2Page geometry (DeskCoord (x0,y0))
let arr = makeSingleArrangement zmode pdim cdim (xpos,ypos)
let nvinfo = ViewInfo (view zoomMode (view viewInfo cinfo)) arr
ncinfo = CanvasInfo (view canvasId cinfo)
canvas
(view mDrawSurface cinfo)
(view scrolledWindow cinfo)
nvinfo
(unPageNum cpn)
(view horizAdjustment cinfo)
(view vertAdjustment cinfo)
(view horizAdjConnId cinfo)
(view vertAdjConnId cinfo)
(view canvasWidgets cinfo)
(view notifiedItem cinfo)
return $ set currentCanvasInfo (CanvasSinglePage ncinfo) xstate
-------------------------------------
whensing xstate cinfo = do
cdim <- liftIO $ return . canvasDim =<< getGeometry4CurrCvs xstate
let zmode = view (viewInfo.zoomMode) cinfo
canvas = view drawArea cinfo
cpn = PageNum . view currentPageNum $ cinfo
(hadj,vadj) = view adjustments cinfo
(xpos,ypos) <- liftIO $ (,) <$> adjustmentGetValue hadj <*> adjustmentGetValue vadj
let arr = makeContinuousArrangement zmode cdim (getHoodle xstate)
(cpn, PageCoord (xpos,ypos))
geometry <- liftIO $ makeCanvasGeometry cpn arr canvas
let DeskCoord (nxpos,nypos) = page2Desktop geometry (cpn,PageCoord (xpos,ypos))
let vinfo = view viewInfo cinfo
nvinfo = ViewInfo (view zoomMode vinfo) arr
ncinfotemp = CanvasInfo (view canvasId cinfo)
(view drawArea cinfo)
(view mDrawSurface cinfo)
(view scrolledWindow cinfo)
nvinfo
(view currentPageNum cinfo)
hadj
vadj
(view horizAdjConnId cinfo)
(view vertAdjConnId cinfo)
(view canvasWidgets cinfo)
(view notifiedItem cinfo)
ncpn = maybe cpn fst $ desktop2Page geometry (DeskCoord (nxpos,nypos))
ncinfo = over currentPageNum (const (unPageNum ncpn)) ncinfotemp
return . over currentCanvasInfo (const (CanvasContPage ncinfo)) $ xstate
|
wavewave/hoodle-core
|
src/Hoodle/Coroutine/Mode.hs
|
gpl-3.0
| 7,201 | 0 | 19 | 2,625 | 1,619 | 825 | 794 | 124 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Types where
import Data.Text (Text)
data Config = Config
{ useHaskellName :: Bool
, colors :: [Color]
, title :: Text
, size :: Int
}
data Color = Color { unColor :: Text }
defaultColors :: [Color]
defaultColors = [
Color "#1abc9c"
, Color "#2ecc71"
, Color "#3498db"
, Color "#9b59b6"
, Color "#f39c12"
, Color "#d35400"
, Color "#e74c3c"
, Color "#bdc3c7"
, Color "#95a5a6"
, Color "#7f8c8d"
]
data ProgramOpts = ProgramOpts
{ fileIn :: String
, fileOut :: String
, conf :: Config
}
|
fgaray/persist2er
|
src/Types.hs
|
gpl-3.0
| 621 | 0 | 9 | 187 | 166 | 97 | 69 | 25 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTube.Videos.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of resources, possibly filtered.
--
-- /See:/ <https://developers.google.com/youtube/ YouTube Data API v3 Reference> for @youtube.videos.list@.
module Network.Google.Resource.YouTube.Videos.List
(
-- * REST Resource
VideosListResource
-- * Creating a Request
, videosList
, VideosList
-- * Request Lenses
, vlChart
, vlXgafv
, vlPart
, vlUploadProtocol
, vlRegionCode
, vlLocale
, vlAccessToken
, vlMyRating
, vlMaxHeight
, vlUploadType
, vlHl
, vlOnBehalfOfContentOwner
, vlVideoCategoryId
, vlMaxWidth
, vlId
, vlPageToken
, vlMaxResults
, vlCallback
) where
import Network.Google.Prelude
import Network.Google.YouTube.Types
-- | A resource alias for @youtube.videos.list@ method which the
-- 'VideosList' request conforms to.
type VideosListResource =
"youtube" :>
"v3" :>
"videos" :>
QueryParams "part" Text :>
QueryParam "chart" VideosListChart :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "regionCode" Text :>
QueryParam "locale" Text :>
QueryParam "access_token" Text :>
QueryParam "myRating" VideosListMyRating :>
QueryParam "maxHeight" (Textual Int32) :>
QueryParam "uploadType" Text :>
QueryParam "hl" Text :>
QueryParam "onBehalfOfContentOwner" Text :>
QueryParam "videoCategoryId" Text :>
QueryParam "maxWidth" (Textual Int32) :>
QueryParams "id" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults"
(Textual Word32)
:>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] VideoListResponse
-- | Retrieves a list of resources, possibly filtered.
--
-- /See:/ 'videosList' smart constructor.
data VideosList =
VideosList'
{ _vlChart :: !(Maybe VideosListChart)
, _vlXgafv :: !(Maybe Xgafv)
, _vlPart :: ![Text]
, _vlUploadProtocol :: !(Maybe Text)
, _vlRegionCode :: !(Maybe Text)
, _vlLocale :: !(Maybe Text)
, _vlAccessToken :: !(Maybe Text)
, _vlMyRating :: !(Maybe VideosListMyRating)
, _vlMaxHeight :: !(Maybe (Textual Int32))
, _vlUploadType :: !(Maybe Text)
, _vlHl :: !(Maybe Text)
, _vlOnBehalfOfContentOwner :: !(Maybe Text)
, _vlVideoCategoryId :: !Text
, _vlMaxWidth :: !(Maybe (Textual Int32))
, _vlId :: !(Maybe [Text])
, _vlPageToken :: !(Maybe Text)
, _vlMaxResults :: !(Textual Word32)
, _vlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'VideosList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'vlChart'
--
-- * 'vlXgafv'
--
-- * 'vlPart'
--
-- * 'vlUploadProtocol'
--
-- * 'vlRegionCode'
--
-- * 'vlLocale'
--
-- * 'vlAccessToken'
--
-- * 'vlMyRating'
--
-- * 'vlMaxHeight'
--
-- * 'vlUploadType'
--
-- * 'vlHl'
--
-- * 'vlOnBehalfOfContentOwner'
--
-- * 'vlVideoCategoryId'
--
-- * 'vlMaxWidth'
--
-- * 'vlId'
--
-- * 'vlPageToken'
--
-- * 'vlMaxResults'
--
-- * 'vlCallback'
videosList
:: [Text] -- ^ 'vlPart'
-> VideosList
videosList pVlPart_ =
VideosList'
{ _vlChart = Nothing
, _vlXgafv = Nothing
, _vlPart = _Coerce # pVlPart_
, _vlUploadProtocol = Nothing
, _vlRegionCode = Nothing
, _vlLocale = Nothing
, _vlAccessToken = Nothing
, _vlMyRating = Nothing
, _vlMaxHeight = Nothing
, _vlUploadType = Nothing
, _vlHl = Nothing
, _vlOnBehalfOfContentOwner = Nothing
, _vlVideoCategoryId = "0"
, _vlMaxWidth = Nothing
, _vlId = Nothing
, _vlPageToken = Nothing
, _vlMaxResults = 5
, _vlCallback = Nothing
}
-- | Return the videos that are in the specified chart.
vlChart :: Lens' VideosList (Maybe VideosListChart)
vlChart = lens _vlChart (\ s a -> s{_vlChart = a})
-- | V1 error format.
vlXgafv :: Lens' VideosList (Maybe Xgafv)
vlXgafv = lens _vlXgafv (\ s a -> s{_vlXgafv = a})
-- | The *part* parameter specifies a comma-separated list of one or more
-- video resource properties that the API response will include. If the
-- parameter identifies a property that contains child properties, the
-- child properties will be included in the response. For example, in a
-- video resource, the snippet property contains the channelId, title,
-- description, tags, and categoryId properties. As such, if you set
-- *part=snippet*, the API response will contain all of those properties.
vlPart :: Lens' VideosList [Text]
vlPart
= lens _vlPart (\ s a -> s{_vlPart = a}) . _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
vlUploadProtocol :: Lens' VideosList (Maybe Text)
vlUploadProtocol
= lens _vlUploadProtocol
(\ s a -> s{_vlUploadProtocol = a})
-- | Use a chart that is specific to the specified region
vlRegionCode :: Lens' VideosList (Maybe Text)
vlRegionCode
= lens _vlRegionCode (\ s a -> s{_vlRegionCode = a})
vlLocale :: Lens' VideosList (Maybe Text)
vlLocale = lens _vlLocale (\ s a -> s{_vlLocale = a})
-- | OAuth access token.
vlAccessToken :: Lens' VideosList (Maybe Text)
vlAccessToken
= lens _vlAccessToken
(\ s a -> s{_vlAccessToken = a})
-- | Return videos liked\/disliked by the authenticated user. Does not
-- support RateType.RATED_TYPE_NONE.
vlMyRating :: Lens' VideosList (Maybe VideosListMyRating)
vlMyRating
= lens _vlMyRating (\ s a -> s{_vlMyRating = a})
vlMaxHeight :: Lens' VideosList (Maybe Int32)
vlMaxHeight
= lens _vlMaxHeight (\ s a -> s{_vlMaxHeight = a}) .
mapping _Coerce
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
vlUploadType :: Lens' VideosList (Maybe Text)
vlUploadType
= lens _vlUploadType (\ s a -> s{_vlUploadType = a})
-- | Stands for \"host language\". Specifies the localization language of the
-- metadata to be filled into snippet.localized. The field is filled with
-- the default metadata if there is no localization in the specified
-- language. The parameter value must be a language code included in the
-- list returned by the i18nLanguages.list method (e.g. en_US, es_MX).
vlHl :: Lens' VideosList (Maybe Text)
vlHl = lens _vlHl (\ s a -> s{_vlHl = a})
-- | *Note:* This parameter is intended exclusively for YouTube content
-- partners. The *onBehalfOfContentOwner* parameter indicates that the
-- request\'s authorization credentials identify a YouTube CMS user who is
-- acting on behalf of the content owner specified in the parameter value.
-- This parameter is intended for YouTube content partners that own and
-- manage many different YouTube channels. It allows content owners to
-- authenticate once and get access to all their video and channel data,
-- without having to provide authentication credentials for each individual
-- channel. The CMS account that the user authenticates with must be linked
-- to the specified YouTube content owner.
vlOnBehalfOfContentOwner :: Lens' VideosList (Maybe Text)
vlOnBehalfOfContentOwner
= lens _vlOnBehalfOfContentOwner
(\ s a -> s{_vlOnBehalfOfContentOwner = a})
-- | Use chart that is specific to the specified video category
vlVideoCategoryId :: Lens' VideosList Text
vlVideoCategoryId
= lens _vlVideoCategoryId
(\ s a -> s{_vlVideoCategoryId = a})
-- | Return the player with maximum height specified in
vlMaxWidth :: Lens' VideosList (Maybe Int32)
vlMaxWidth
= lens _vlMaxWidth (\ s a -> s{_vlMaxWidth = a}) .
mapping _Coerce
-- | Return videos with the given ids.
vlId :: Lens' VideosList [Text]
vlId
= lens _vlId (\ s a -> s{_vlId = a}) . _Default .
_Coerce
-- | The *pageToken* parameter identifies a specific page in the result set
-- that should be returned. In an API response, the nextPageToken and
-- prevPageToken properties identify other pages that could be retrieved.
-- *Note:* This parameter is supported for use in conjunction with the
-- myRating and chart parameters, but it is not supported for use in
-- conjunction with the id parameter.
vlPageToken :: Lens' VideosList (Maybe Text)
vlPageToken
= lens _vlPageToken (\ s a -> s{_vlPageToken = a})
-- | The *maxResults* parameter specifies the maximum number of items that
-- should be returned in the result set. *Note:* This parameter is
-- supported for use in conjunction with the myRating and chart parameters,
-- but it is not supported for use in conjunction with the id parameter.
vlMaxResults :: Lens' VideosList Word32
vlMaxResults
= lens _vlMaxResults (\ s a -> s{_vlMaxResults = a})
. _Coerce
-- | JSONP
vlCallback :: Lens' VideosList (Maybe Text)
vlCallback
= lens _vlCallback (\ s a -> s{_vlCallback = a})
instance GoogleRequest VideosList where
type Rs VideosList = VideoListResponse
type Scopes VideosList =
'["https://www.googleapis.com/auth/youtube",
"https://www.googleapis.com/auth/youtube.force-ssl",
"https://www.googleapis.com/auth/youtube.readonly",
"https://www.googleapis.com/auth/youtubepartner"]
requestClient VideosList'{..}
= go _vlPart _vlChart _vlXgafv _vlUploadProtocol
_vlRegionCode
_vlLocale
_vlAccessToken
_vlMyRating
_vlMaxHeight
_vlUploadType
_vlHl
_vlOnBehalfOfContentOwner
(Just _vlVideoCategoryId)
_vlMaxWidth
(_vlId ^. _Default)
_vlPageToken
(Just _vlMaxResults)
_vlCallback
(Just AltJSON)
youTubeService
where go
= buildClient (Proxy :: Proxy VideosListResource)
mempty
|
brendanhay/gogol
|
gogol-youtube/gen/Network/Google/Resource/YouTube/Videos/List.hs
|
mpl-2.0
| 11,111 | 0 | 29 | 2,920 | 1,771 | 1,020 | 751 | 229 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.AnalyticsReporting.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.AnalyticsReporting.Types.Sum where
import Network.Google.Prelude hiding (Bytes)
-- | Is the metric \`EQUAL\`, \`LESS_THAN\` or \`GREATER_THAN\` the
-- comparisonValue, the default is \`EQUAL\`. If the operator is
-- \`IS_MISSING\`, checks if the metric is missing and would ignore the
-- comparisonValue.
data MetricFilterOperator
= OperatorUnspecified
-- ^ @OPERATOR_UNSPECIFIED@
-- If the operator is not specified, it is treated as \`EQUAL\`.
| Equal
-- ^ @EQUAL@
-- Should the value of the metric be exactly equal to the comparison value.
| LessThan
-- ^ @LESS_THAN@
-- Should the value of the metric be less than to the comparison value.
| GreaterThan
-- ^ @GREATER_THAN@
-- Should the value of the metric be greater than to the comparison value.
| IsMissing
-- ^ @IS_MISSING@
-- Validates if the metric is missing. Doesn\'t take comparisonValue into
-- account.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MetricFilterOperator
instance FromHttpApiData MetricFilterOperator where
parseQueryParam = \case
"OPERATOR_UNSPECIFIED" -> Right OperatorUnspecified
"EQUAL" -> Right Equal
"LESS_THAN" -> Right LessThan
"GREATER_THAN" -> Right GreaterThan
"IS_MISSING" -> Right IsMissing
x -> Left ("Unable to parse MetricFilterOperator from: " <> x)
instance ToHttpApiData MetricFilterOperator where
toQueryParam = \case
OperatorUnspecified -> "OPERATOR_UNSPECIFIED"
Equal -> "EQUAL"
LessThan -> "LESS_THAN"
GreaterThan -> "GREATER_THAN"
IsMissing -> "IS_MISSING"
instance FromJSON MetricFilterOperator where
parseJSON = parseJSONText "MetricFilterOperator"
instance ToJSON MetricFilterOperator where
toJSON = toJSONText
-- | The operator for combining multiple dimension filters. If unspecified,
-- it is treated as an \`OR\`.
data DimensionFilterClauseOperator
= DFCOOperatorUnspecified
-- ^ @OPERATOR_UNSPECIFIED@
-- Unspecified operator. It is treated as an \`OR\`.
| DFCOOR
-- ^ @OR@
-- The logical \`OR\` operator.
| DFCOAnd
-- ^ @AND@
-- The logical \`AND\` operator.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DimensionFilterClauseOperator
instance FromHttpApiData DimensionFilterClauseOperator where
parseQueryParam = \case
"OPERATOR_UNSPECIFIED" -> Right DFCOOperatorUnspecified
"OR" -> Right DFCOOR
"AND" -> Right DFCOAnd
x -> Left ("Unable to parse DimensionFilterClauseOperator from: " <> x)
instance ToHttpApiData DimensionFilterClauseOperator where
toQueryParam = \case
DFCOOperatorUnspecified -> "OPERATOR_UNSPECIFIED"
DFCOOR -> "OR"
DFCOAnd -> "AND"
instance FromJSON DimensionFilterClauseOperator where
parseJSON = parseJSONText "DimensionFilterClauseOperator"
instance ToJSON DimensionFilterClauseOperator where
toJSON = toJSONText
-- | Type of the cohort. The only supported type as of now is
-- \`FIRST_VISIT_DATE\`. If this field is unspecified the cohort is treated
-- as \`FIRST_VISIT_DATE\` type cohort.
data CohortType
= UnspecifiedCohortType
-- ^ @UNSPECIFIED_COHORT_TYPE@
-- If unspecified it\'s treated as \`FIRST_VISIT_DATE\`.
| FirstVisitDate
-- ^ @FIRST_VISIT_DATE@
-- Cohorts that are selected based on first visit date.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable CohortType
instance FromHttpApiData CohortType where
parseQueryParam = \case
"UNSPECIFIED_COHORT_TYPE" -> Right UnspecifiedCohortType
"FIRST_VISIT_DATE" -> Right FirstVisitDate
x -> Left ("Unable to parse CohortType from: " <> x)
instance ToHttpApiData CohortType where
toQueryParam = \case
UnspecifiedCohortType -> "UNSPECIFIED_COHORT_TYPE"
FirstVisitDate -> "FIRST_VISIT_DATE"
instance FromJSON CohortType where
parseJSON = parseJSONText "CohortType"
instance ToJSON CohortType where
toJSON = toJSONText
data SearchUserActivityRequestActivityTypesItem
= ActivityTypeUnspecified
-- ^ @ACTIVITY_TYPE_UNSPECIFIED@
-- ActivityType will never have this value in the response. Using this type
-- in the request will result in an error.
| Pageview
-- ^ @PAGEVIEW@
-- Used when the activity resulted out of a visitor viewing a page.
| Screenview
-- ^ @SCREENVIEW@
-- Used when the activity resulted out of a visitor using an application on
-- a mobile device.
| Goal
-- ^ @GOAL@
-- Used to denote that a goal type activity.
| Ecommerce
-- ^ @ECOMMERCE@
-- An e-commerce transaction was performed by the visitor on the page.
| Event
-- ^ @EVENT@
-- Used when the activity is an event.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SearchUserActivityRequestActivityTypesItem
instance FromHttpApiData SearchUserActivityRequestActivityTypesItem where
parseQueryParam = \case
"ACTIVITY_TYPE_UNSPECIFIED" -> Right ActivityTypeUnspecified
"PAGEVIEW" -> Right Pageview
"SCREENVIEW" -> Right Screenview
"GOAL" -> Right Goal
"ECOMMERCE" -> Right Ecommerce
"EVENT" -> Right Event
x -> Left ("Unable to parse SearchUserActivityRequestActivityTypesItem from: " <> x)
instance ToHttpApiData SearchUserActivityRequestActivityTypesItem where
toQueryParam = \case
ActivityTypeUnspecified -> "ACTIVITY_TYPE_UNSPECIFIED"
Pageview -> "PAGEVIEW"
Screenview -> "SCREENVIEW"
Goal -> "GOAL"
Ecommerce -> "ECOMMERCE"
Event -> "EVENT"
instance FromJSON SearchUserActivityRequestActivityTypesItem where
parseJSON = parseJSONText "SearchUserActivityRequestActivityTypesItem"
instance ToJSON SearchUserActivityRequestActivityTypesItem where
toJSON = toJSONText
-- | The order type. The default orderType is \`VALUE\`.
data OrderByOrderType
= OrderTypeUnspecified
-- ^ @ORDER_TYPE_UNSPECIFIED@
-- Unspecified order type will be treated as sort based on value.
| Value
-- ^ @VALUE@
-- The sort order is based on the value of the chosen column; looks only at
-- the first date range.
| Delta
-- ^ @DELTA@
-- The sort order is based on the difference of the values of the chosen
-- column between the first two date ranges. Usable only if there are
-- exactly two date ranges.
| Smart
-- ^ @SMART@
-- The sort order is based on weighted value of the chosen column. If
-- column has n\/d format, then weighted value of this ratio will be \`(n +
-- totals.n)\/(d + totals.d)\` Usable only for metrics that represent
-- ratios.
| HistogramBucket
-- ^ @HISTOGRAM_BUCKET@
-- Histogram order type is applicable only to dimension columns with
-- non-empty histogram-buckets.
| DimensionAsInteger
-- ^ @DIMENSION_AS_INTEGER@
-- If the dimensions are fixed length numbers, ordinary sort would just
-- work fine. \`DIMENSION_AS_INTEGER\` can be used if the dimensions are
-- variable length numbers.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable OrderByOrderType
instance FromHttpApiData OrderByOrderType where
parseQueryParam = \case
"ORDER_TYPE_UNSPECIFIED" -> Right OrderTypeUnspecified
"VALUE" -> Right Value
"DELTA" -> Right Delta
"SMART" -> Right Smart
"HISTOGRAM_BUCKET" -> Right HistogramBucket
"DIMENSION_AS_INTEGER" -> Right DimensionAsInteger
x -> Left ("Unable to parse OrderByOrderType from: " <> x)
instance ToHttpApiData OrderByOrderType where
toQueryParam = \case
OrderTypeUnspecified -> "ORDER_TYPE_UNSPECIFIED"
Value -> "VALUE"
Delta -> "DELTA"
Smart -> "SMART"
HistogramBucket -> "HISTOGRAM_BUCKET"
DimensionAsInteger -> "DIMENSION_AS_INTEGER"
instance FromJSON OrderByOrderType where
parseJSON = parseJSONText "OrderByOrderType"
instance ToJSON OrderByOrderType where
toJSON = toJSONText
-- | Action associated with this e-commerce action.
data EcommerceDataActionType
= Unknown
-- ^ @UNKNOWN@
-- Action type is not known.
| Click
-- ^ @CLICK@
-- Click through of product lists.
| DetailsView
-- ^ @DETAILS_VIEW@
-- Product detail views.
| AddToCart
-- ^ @ADD_TO_CART@
-- Add product(s) to cart.
| RemoveFromCart
-- ^ @REMOVE_FROM_CART@
-- Remove product(s) from cart.
| Checkout
-- ^ @CHECKOUT@
-- Check out.
| Payment
-- ^ @PAYMENT@
-- Completed purchase.
| Refund
-- ^ @REFUND@
-- Refund of purchase.
| CheckoutOption
-- ^ @CHECKOUT_OPTION@
-- Checkout options.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable EcommerceDataActionType
instance FromHttpApiData EcommerceDataActionType where
parseQueryParam = \case
"UNKNOWN" -> Right Unknown
"CLICK" -> Right Click
"DETAILS_VIEW" -> Right DetailsView
"ADD_TO_CART" -> Right AddToCart
"REMOVE_FROM_CART" -> Right RemoveFromCart
"CHECKOUT" -> Right Checkout
"PAYMENT" -> Right Payment
"REFUND" -> Right Refund
"CHECKOUT_OPTION" -> Right CheckoutOption
x -> Left ("Unable to parse EcommerceDataActionType from: " <> x)
instance ToHttpApiData EcommerceDataActionType where
toQueryParam = \case
Unknown -> "UNKNOWN"
Click -> "CLICK"
DetailsView -> "DETAILS_VIEW"
AddToCart -> "ADD_TO_CART"
RemoveFromCart -> "REMOVE_FROM_CART"
Checkout -> "CHECKOUT"
Payment -> "PAYMENT"
Refund -> "REFUND"
CheckoutOption -> "CHECKOUT_OPTION"
instance FromJSON EcommerceDataActionType where
parseJSON = parseJSONText "EcommerceDataActionType"
instance ToJSON EcommerceDataActionType where
toJSON = toJSONText
-- | The sorting order for the field.
data OrderBySortOrder
= SortOrderUnspecified
-- ^ @SORT_ORDER_UNSPECIFIED@
-- If the sort order is unspecified, the default is ascending.
| Ascending
-- ^ @ASCENDING@
-- Ascending sort. The field will be sorted in an ascending manner.
| Descending
-- ^ @DESCENDING@
-- Descending sort. The field will be sorted in a descending manner.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable OrderBySortOrder
instance FromHttpApiData OrderBySortOrder where
parseQueryParam = \case
"SORT_ORDER_UNSPECIFIED" -> Right SortOrderUnspecified
"ASCENDING" -> Right Ascending
"DESCENDING" -> Right Descending
x -> Left ("Unable to parse OrderBySortOrder from: " <> x)
instance ToHttpApiData OrderBySortOrder where
toQueryParam = \case
SortOrderUnspecified -> "SORT_ORDER_UNSPECIFIED"
Ascending -> "ASCENDING"
Descending -> "DESCENDING"
instance FromJSON OrderBySortOrder where
parseJSON = parseJSONText "OrderBySortOrder"
instance ToJSON OrderBySortOrder where
toJSON = toJSONText
-- | Specifies is the operation to perform to compare the metric. The default
-- is \`EQUAL\`.
data SegmentMetricFilterOperator
= SMFOUnspecifiedOperator
-- ^ @UNSPECIFIED_OPERATOR@
-- Unspecified operator is treated as \`LESS_THAN\` operator.
| SMFOLessThan
-- ^ @LESS_THAN@
-- Checks if the metric value is less than comparison value.
| SMFOGreaterThan
-- ^ @GREATER_THAN@
-- Checks if the metric value is greater than comparison value.
| SMFOEqual
-- ^ @EQUAL@
-- Equals operator.
| SMFOBetween
-- ^ @BETWEEN@
-- For between operator, both the minimum and maximum are exclusive. We
-- will use \`LT\` and \`GT\` for comparison.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SegmentMetricFilterOperator
instance FromHttpApiData SegmentMetricFilterOperator where
parseQueryParam = \case
"UNSPECIFIED_OPERATOR" -> Right SMFOUnspecifiedOperator
"LESS_THAN" -> Right SMFOLessThan
"GREATER_THAN" -> Right SMFOGreaterThan
"EQUAL" -> Right SMFOEqual
"BETWEEN" -> Right SMFOBetween
x -> Left ("Unable to parse SegmentMetricFilterOperator from: " <> x)
instance ToHttpApiData SegmentMetricFilterOperator where
toQueryParam = \case
SMFOUnspecifiedOperator -> "UNSPECIFIED_OPERATOR"
SMFOLessThan -> "LESS_THAN"
SMFOGreaterThan -> "GREATER_THAN"
SMFOEqual -> "EQUAL"
SMFOBetween -> "BETWEEN"
instance FromJSON SegmentMetricFilterOperator where
parseJSON = parseJSONText "SegmentMetricFilterOperator"
instance ToJSON SegmentMetricFilterOperator where
toJSON = toJSONText
-- | The type of the metric, for example \`INTEGER\`.
data MetricHeaderEntryType
= MetricTypeUnspecified
-- ^ @METRIC_TYPE_UNSPECIFIED@
-- Metric type is unspecified.
| Integer
-- ^ @INTEGER@
-- Integer metric.
| Float
-- ^ @FLOAT@
-- Float metric.
| Currency
-- ^ @CURRENCY@
-- Currency metric.
| Percent
-- ^ @PERCENT@
-- Percentage metric.
| Time
-- ^ @TIME@
-- Time metric in \`HH:MM:SS\` format.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MetricHeaderEntryType
instance FromHttpApiData MetricHeaderEntryType where
parseQueryParam = \case
"METRIC_TYPE_UNSPECIFIED" -> Right MetricTypeUnspecified
"INTEGER" -> Right Integer
"FLOAT" -> Right Float
"CURRENCY" -> Right Currency
"PERCENT" -> Right Percent
"TIME" -> Right Time
x -> Left ("Unable to parse MetricHeaderEntryType from: " <> x)
instance ToHttpApiData MetricHeaderEntryType where
toQueryParam = \case
MetricTypeUnspecified -> "METRIC_TYPE_UNSPECIFIED"
Integer -> "INTEGER"
Float -> "FLOAT"
Currency -> "CURRENCY"
Percent -> "PERCENT"
Time -> "TIME"
instance FromJSON MetricHeaderEntryType where
parseJSON = parseJSONText "MetricHeaderEntryType"
instance ToJSON MetricHeaderEntryType where
toJSON = toJSONText
-- | The desired report
-- [sample](https:\/\/support.google.com\/analytics\/answer\/2637192) size.
-- If the the \`samplingLevel\` field is unspecified the \`DEFAULT\`
-- sampling level is used. Every [ReportRequest](#ReportRequest) within a
-- \`batchGet\` method must contain the same \`samplingLevel\` definition.
-- See [developer
-- guide](\/analytics\/devguides\/reporting\/core\/v4\/basics#sampling) for
-- details.
data ReportRequestSamplingLevel
= SamplingUnspecified
-- ^ @SAMPLING_UNSPECIFIED@
-- If the \`samplingLevel\` field is unspecified the \`DEFAULT\` sampling
-- level is used.
| Default
-- ^ @DEFAULT@
-- Returns response with a sample size that balances speed and accuracy.
| Small
-- ^ @SMALL@
-- It returns a fast response with a smaller sampling size.
| Large
-- ^ @LARGE@
-- Returns a more accurate response using a large sampling size. But this
-- may result in response being slower.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ReportRequestSamplingLevel
instance FromHttpApiData ReportRequestSamplingLevel where
parseQueryParam = \case
"SAMPLING_UNSPECIFIED" -> Right SamplingUnspecified
"DEFAULT" -> Right Default
"SMALL" -> Right Small
"LARGE" -> Right Large
x -> Left ("Unable to parse ReportRequestSamplingLevel from: " <> x)
instance ToHttpApiData ReportRequestSamplingLevel where
toQueryParam = \case
SamplingUnspecified -> "SAMPLING_UNSPECIFIED"
Default -> "DEFAULT"
Small -> "SMALL"
Large -> "LARGE"
instance FromJSON ReportRequestSamplingLevel where
parseJSON = parseJSONText "ReportRequestSamplingLevel"
instance ToJSON ReportRequestSamplingLevel where
toJSON = toJSONText
-- | V1 error format.
data Xgafv
= X1
-- ^ @1@
-- v1 error format
| X2
-- ^ @2@
-- v2 error format
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable Xgafv
instance FromHttpApiData Xgafv where
parseQueryParam = \case
"1" -> Right X1
"2" -> Right X2
x -> Left ("Unable to parse Xgafv from: " <> x)
instance ToHttpApiData Xgafv where
toQueryParam = \case
X1 -> "1"
X2 -> "2"
instance FromJSON Xgafv where
parseJSON = parseJSONText "Xgafv"
instance ToJSON Xgafv where
toJSON = toJSONText
-- | Specifies if the step immediately precedes or can be any time before the
-- next step.
data SegmentSequenceStepMatchType
= UnspecifiedMatchType
-- ^ @UNSPECIFIED_MATCH_TYPE@
-- Unspecified match type is treated as precedes.
| Precedes
-- ^ @PRECEDES@
-- Operator indicates that the previous step precedes the next step.
| ImmediatelyPrecedes
-- ^ @IMMEDIATELY_PRECEDES@
-- Operator indicates that the previous step immediately precedes the next
-- step.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SegmentSequenceStepMatchType
instance FromHttpApiData SegmentSequenceStepMatchType where
parseQueryParam = \case
"UNSPECIFIED_MATCH_TYPE" -> Right UnspecifiedMatchType
"PRECEDES" -> Right Precedes
"IMMEDIATELY_PRECEDES" -> Right ImmediatelyPrecedes
x -> Left ("Unable to parse SegmentSequenceStepMatchType from: " <> x)
instance ToHttpApiData SegmentSequenceStepMatchType where
toQueryParam = \case
UnspecifiedMatchType -> "UNSPECIFIED_MATCH_TYPE"
Precedes -> "PRECEDES"
ImmediatelyPrecedes -> "IMMEDIATELY_PRECEDES"
instance FromJSON SegmentSequenceStepMatchType where
parseJSON = parseJSONText "SegmentSequenceStepMatchType"
instance ToJSON SegmentSequenceStepMatchType where
toJSON = toJSONText
-- | Scope for a metric defines the level at which that metric is defined.
-- The specified metric scope must be equal to or greater than its primary
-- scope as defined in the data model. The primary scope is defined by if
-- the segment is selecting users or sessions.
data SegmentMetricFilterScope
= SMFSUnspecifiedScope
-- ^ @UNSPECIFIED_SCOPE@
-- If the scope is unspecified, it defaults to the condition scope,
-- \`USER\` or \`SESSION\` depending on if the segment is trying to choose
-- users or sessions.
| SMFSProduct
-- ^ @PRODUCT@
-- Product scope.
| SMFSHit
-- ^ @HIT@
-- Hit scope.
| SMFSSession
-- ^ @SESSION@
-- Session scope.
| SMFSUser
-- ^ @USER@
-- User scope.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SegmentMetricFilterScope
instance FromHttpApiData SegmentMetricFilterScope where
parseQueryParam = \case
"UNSPECIFIED_SCOPE" -> Right SMFSUnspecifiedScope
"PRODUCT" -> Right SMFSProduct
"HIT" -> Right SMFSHit
"SESSION" -> Right SMFSSession
"USER" -> Right SMFSUser
x -> Left ("Unable to parse SegmentMetricFilterScope from: " <> x)
instance ToHttpApiData SegmentMetricFilterScope where
toQueryParam = \case
SMFSUnspecifiedScope -> "UNSPECIFIED_SCOPE"
SMFSProduct -> "PRODUCT"
SMFSHit -> "HIT"
SMFSSession -> "SESSION"
SMFSUser -> "USER"
instance FromJSON SegmentMetricFilterScope where
parseJSON = parseJSONText "SegmentMetricFilterScope"
instance ToJSON SegmentMetricFilterScope where
toJSON = toJSONText
-- | The type of this e-commerce activity.
data EcommerceDataEcommerceType
= EcommerceTypeUnspecified
-- ^ @ECOMMERCE_TYPE_UNSPECIFIED@
-- Used when the e-commerce activity type is unspecified.
| Classic
-- ^ @CLASSIC@
-- Used when activity has classic (non-enhanced) e-commerce information.
| Enhanced
-- ^ @ENHANCED@
-- Used when activity has enhanced e-commerce information.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable EcommerceDataEcommerceType
instance FromHttpApiData EcommerceDataEcommerceType where
parseQueryParam = \case
"ECOMMERCE_TYPE_UNSPECIFIED" -> Right EcommerceTypeUnspecified
"CLASSIC" -> Right Classic
"ENHANCED" -> Right Enhanced
x -> Left ("Unable to parse EcommerceDataEcommerceType from: " <> x)
instance ToHttpApiData EcommerceDataEcommerceType where
toQueryParam = \case
EcommerceTypeUnspecified -> "ECOMMERCE_TYPE_UNSPECIFIED"
Classic -> "CLASSIC"
Enhanced -> "ENHANCED"
instance FromJSON EcommerceDataEcommerceType where
parseJSON = parseJSONText "EcommerceDataEcommerceType"
instance ToJSON EcommerceDataEcommerceType where
toJSON = toJSONText
-- | How to match the dimension to the expression. The default is REGEXP.
data DimensionFilterOperator
= DFOOperatorUnspecified
-- ^ @OPERATOR_UNSPECIFIED@
-- If the match type is unspecified, it is treated as a \`REGEXP\`.
| DFORegexp
-- ^ @REGEXP@
-- The match expression is treated as a regular expression. All match types
-- are not treated as regular expressions.
| DFOBeginsWith
-- ^ @BEGINS_WITH@
-- Matches the value which begin with the match expression provided.
| DFOEndsWith
-- ^ @ENDS_WITH@
-- Matches the values which end with the match expression provided.
| DFOPartial
-- ^ @PARTIAL@
-- Substring match.
| DFOExact
-- ^ @EXACT@
-- The value should match the match expression entirely.
| DFONumericEqual
-- ^ @NUMERIC_EQUAL@
-- Integer comparison filters. case sensitivity is ignored for these and
-- the expression is assumed to be a string representing an integer.
-- Failure conditions: - If expression is not a valid int64, the client
-- should expect an error. - Input dimensions that are not valid int64
-- values will never match the filter.
| DFONumericGreaterThan
-- ^ @NUMERIC_GREATER_THAN@
-- Checks if the dimension is numerically greater than the match
-- expression. Read the description for \`NUMERIC_EQUALS\` for
-- restrictions.
| DFONumericLessThan
-- ^ @NUMERIC_LESS_THAN@
-- Checks if the dimension is numerically less than the match expression.
-- Read the description for \`NUMERIC_EQUALS\` for restrictions.
| DFOInList
-- ^ @IN_LIST@
-- This option is used to specify a dimension filter whose expression can
-- take any value from a selected list of values. This helps avoiding
-- evaluating multiple exact match dimension filters which are OR\'ed for
-- every single response row. For example: expressions: [\"A\", \"B\",
-- \"C\"] Any response row whose dimension has it is value as A, B or C,
-- matches this DimensionFilter.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable DimensionFilterOperator
instance FromHttpApiData DimensionFilterOperator where
parseQueryParam = \case
"OPERATOR_UNSPECIFIED" -> Right DFOOperatorUnspecified
"REGEXP" -> Right DFORegexp
"BEGINS_WITH" -> Right DFOBeginsWith
"ENDS_WITH" -> Right DFOEndsWith
"PARTIAL" -> Right DFOPartial
"EXACT" -> Right DFOExact
"NUMERIC_EQUAL" -> Right DFONumericEqual
"NUMERIC_GREATER_THAN" -> Right DFONumericGreaterThan
"NUMERIC_LESS_THAN" -> Right DFONumericLessThan
"IN_LIST" -> Right DFOInList
x -> Left ("Unable to parse DimensionFilterOperator from: " <> x)
instance ToHttpApiData DimensionFilterOperator where
toQueryParam = \case
DFOOperatorUnspecified -> "OPERATOR_UNSPECIFIED"
DFORegexp -> "REGEXP"
DFOBeginsWith -> "BEGINS_WITH"
DFOEndsWith -> "ENDS_WITH"
DFOPartial -> "PARTIAL"
DFOExact -> "EXACT"
DFONumericEqual -> "NUMERIC_EQUAL"
DFONumericGreaterThan -> "NUMERIC_GREATER_THAN"
DFONumericLessThan -> "NUMERIC_LESS_THAN"
DFOInList -> "IN_LIST"
instance FromJSON DimensionFilterOperator where
parseJSON = parseJSONText "DimensionFilterOperator"
instance ToJSON DimensionFilterOperator where
toJSON = toJSONText
-- | Type of this activity.
data ActivityActivityType
= AATActivityTypeUnspecified
-- ^ @ACTIVITY_TYPE_UNSPECIFIED@
-- ActivityType will never have this value in the response. Using this type
-- in the request will result in an error.
| AATPageview
-- ^ @PAGEVIEW@
-- Used when the activity resulted out of a visitor viewing a page.
| AATScreenview
-- ^ @SCREENVIEW@
-- Used when the activity resulted out of a visitor using an application on
-- a mobile device.
| AATGoal
-- ^ @GOAL@
-- Used to denote that a goal type activity.
| AATEcommerce
-- ^ @ECOMMERCE@
-- An e-commerce transaction was performed by the visitor on the page.
| AATEvent
-- ^ @EVENT@
-- Used when the activity is an event.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ActivityActivityType
instance FromHttpApiData ActivityActivityType where
parseQueryParam = \case
"ACTIVITY_TYPE_UNSPECIFIED" -> Right AATActivityTypeUnspecified
"PAGEVIEW" -> Right AATPageview
"SCREENVIEW" -> Right AATScreenview
"GOAL" -> Right AATGoal
"ECOMMERCE" -> Right AATEcommerce
"EVENT" -> Right AATEvent
x -> Left ("Unable to parse ActivityActivityType from: " <> x)
instance ToHttpApiData ActivityActivityType where
toQueryParam = \case
AATActivityTypeUnspecified -> "ACTIVITY_TYPE_UNSPECIFIED"
AATPageview -> "PAGEVIEW"
AATScreenview -> "SCREENVIEW"
AATGoal -> "GOAL"
AATEcommerce -> "ECOMMERCE"
AATEvent -> "EVENT"
instance FromJSON ActivityActivityType where
parseJSON = parseJSONText "ActivityActivityType"
instance ToJSON ActivityActivityType where
toJSON = toJSONText
-- | Specifies how the metric expression should be formatted, for example
-- \`INTEGER\`.
data MetricFormattingType
= MFTMetricTypeUnspecified
-- ^ @METRIC_TYPE_UNSPECIFIED@
-- Metric type is unspecified.
| MFTInteger
-- ^ @INTEGER@
-- Integer metric.
| MFTFloat
-- ^ @FLOAT@
-- Float metric.
| MFTCurrency
-- ^ @CURRENCY@
-- Currency metric.
| MFTPercent
-- ^ @PERCENT@
-- Percentage metric.
| MFTTime
-- ^ @TIME@
-- Time metric in \`HH:MM:SS\` format.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MetricFormattingType
instance FromHttpApiData MetricFormattingType where
parseQueryParam = \case
"METRIC_TYPE_UNSPECIFIED" -> Right MFTMetricTypeUnspecified
"INTEGER" -> Right MFTInteger
"FLOAT" -> Right MFTFloat
"CURRENCY" -> Right MFTCurrency
"PERCENT" -> Right MFTPercent
"TIME" -> Right MFTTime
x -> Left ("Unable to parse MetricFormattingType from: " <> x)
instance ToHttpApiData MetricFormattingType where
toQueryParam = \case
MFTMetricTypeUnspecified -> "METRIC_TYPE_UNSPECIFIED"
MFTInteger -> "INTEGER"
MFTFloat -> "FLOAT"
MFTCurrency -> "CURRENCY"
MFTPercent -> "PERCENT"
MFTTime -> "TIME"
instance FromJSON MetricFormattingType where
parseJSON = parseJSONText "MetricFormattingType"
instance ToJSON MetricFormattingType where
toJSON = toJSONText
-- | The operator to use to match the dimension with the expressions.
data SegmentDimensionFilterOperator
= SDFOOperatorUnspecified
-- ^ @OPERATOR_UNSPECIFIED@
-- If the match type is unspecified, it is treated as a REGEXP.
| SDFORegexp
-- ^ @REGEXP@
-- The match expression is treated as a regular expression. All other match
-- types are not treated as regular expressions.
| SDFOBeginsWith
-- ^ @BEGINS_WITH@
-- Matches the values which begin with the match expression provided.
| SDFOEndsWith
-- ^ @ENDS_WITH@
-- Matches the values which end with the match expression provided.
| SDFOPartial
-- ^ @PARTIAL@
-- Substring match.
| SDFOExact
-- ^ @EXACT@
-- The value should match the match expression entirely.
| SDFOInList
-- ^ @IN_LIST@
-- This option is used to specify a dimension filter whose expression can
-- take any value from a selected list of values. This helps avoiding
-- evaluating multiple exact match dimension filters which are OR\'ed for
-- every single response row. For example: expressions: [\"A\", \"B\",
-- \"C\"] Any response row whose dimension has it is value as A, B or C,
-- matches this DimensionFilter.
| SDFONumericLessThan
-- ^ @NUMERIC_LESS_THAN@
-- Integer comparison filters. case sensitivity is ignored for these and
-- the expression is assumed to be a string representing an integer.
-- Failure conditions: - if expression is not a valid int64, the client
-- should expect an error. - input dimensions that are not valid int64
-- values will never match the filter. Checks if the dimension is
-- numerically less than the match expression.
| SDFONumericGreaterThan
-- ^ @NUMERIC_GREATER_THAN@
-- Checks if the dimension is numerically greater than the match
-- expression.
| SDFONumericBetween
-- ^ @NUMERIC_BETWEEN@
-- Checks if the dimension is numerically between the minimum and maximum
-- of the match expression, boundaries excluded.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable SegmentDimensionFilterOperator
instance FromHttpApiData SegmentDimensionFilterOperator where
parseQueryParam = \case
"OPERATOR_UNSPECIFIED" -> Right SDFOOperatorUnspecified
"REGEXP" -> Right SDFORegexp
"BEGINS_WITH" -> Right SDFOBeginsWith
"ENDS_WITH" -> Right SDFOEndsWith
"PARTIAL" -> Right SDFOPartial
"EXACT" -> Right SDFOExact
"IN_LIST" -> Right SDFOInList
"NUMERIC_LESS_THAN" -> Right SDFONumericLessThan
"NUMERIC_GREATER_THAN" -> Right SDFONumericGreaterThan
"NUMERIC_BETWEEN" -> Right SDFONumericBetween
x -> Left ("Unable to parse SegmentDimensionFilterOperator from: " <> x)
instance ToHttpApiData SegmentDimensionFilterOperator where
toQueryParam = \case
SDFOOperatorUnspecified -> "OPERATOR_UNSPECIFIED"
SDFORegexp -> "REGEXP"
SDFOBeginsWith -> "BEGINS_WITH"
SDFOEndsWith -> "ENDS_WITH"
SDFOPartial -> "PARTIAL"
SDFOExact -> "EXACT"
SDFOInList -> "IN_LIST"
SDFONumericLessThan -> "NUMERIC_LESS_THAN"
SDFONumericGreaterThan -> "NUMERIC_GREATER_THAN"
SDFONumericBetween -> "NUMERIC_BETWEEN"
instance FromJSON SegmentDimensionFilterOperator where
parseJSON = parseJSONText "SegmentDimensionFilterOperator"
instance ToJSON SegmentDimensionFilterOperator where
toJSON = toJSONText
-- | The operator for combining multiple metric filters. If unspecified, it
-- is treated as an \`OR\`.
data MetricFilterClauseOperator
= MFCOOperatorUnspecified
-- ^ @OPERATOR_UNSPECIFIED@
-- Unspecified operator. It is treated as an \`OR\`.
| MFCOOR
-- ^ @OR@
-- The logical \`OR\` operator.
| MFCOAnd
-- ^ @AND@
-- The logical \`AND\` operator.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MetricFilterClauseOperator
instance FromHttpApiData MetricFilterClauseOperator where
parseQueryParam = \case
"OPERATOR_UNSPECIFIED" -> Right MFCOOperatorUnspecified
"OR" -> Right MFCOOR
"AND" -> Right MFCOAnd
x -> Left ("Unable to parse MetricFilterClauseOperator from: " <> x)
instance ToHttpApiData MetricFilterClauseOperator where
toQueryParam = \case
MFCOOperatorUnspecified -> "OPERATOR_UNSPECIFIED"
MFCOOR -> "OR"
MFCOAnd -> "AND"
instance FromJSON MetricFilterClauseOperator where
parseJSON = parseJSONText "MetricFilterClauseOperator"
instance ToJSON MetricFilterClauseOperator where
toJSON = toJSONText
-- | Type of the user in the request. The field \`userId\` is associated with
-- this type.
data UserType
= UserIdTypeUnspecified
-- ^ @USER_ID_TYPE_UNSPECIFIED@
-- When the User Id Type is not specified, the default type used will be
-- CLIENT_ID.
| UserIdTypeUserId
-- ^ @USER_ID_TYPE_USER_ID@
-- A single user, like a signed-in user account, that may interact with
-- content across one or more devices and \/ or browser instances.
| UserIdTypeClientId
-- ^ @USER_ID_TYPE_CLIENT_ID@
-- Analytics assigned client_id.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable UserType
instance FromHttpApiData UserType where
parseQueryParam = \case
"USER_ID_TYPE_UNSPECIFIED" -> Right UserIdTypeUnspecified
"USER_ID_TYPE_USER_ID" -> Right UserIdTypeUserId
"USER_ID_TYPE_CLIENT_ID" -> Right UserIdTypeClientId
x -> Left ("Unable to parse UserType from: " <> x)
instance ToHttpApiData UserType where
toQueryParam = \case
UserIdTypeUnspecified -> "USER_ID_TYPE_UNSPECIFIED"
UserIdTypeUserId -> "USER_ID_TYPE_USER_ID"
UserIdTypeClientId -> "USER_ID_TYPE_CLIENT_ID"
instance FromJSON UserType where
parseJSON = parseJSONText "UserType"
instance ToJSON UserType where
toJSON = toJSONText
|
brendanhay/gogol
|
gogol-analyticsreporting/gen/Network/Google/AnalyticsReporting/Types/Sum.hs
|
mpl-2.0
| 34,676 | 0 | 11 | 8,217 | 4,543 | 2,457 | 2,086 | 548 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.BigtableAdmin.Operations.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the latest state of a long-running operation. Clients can use this
-- method to poll the operation result at intervals as recommended by the
-- API service.
--
-- /See:/ <https://cloud.google.com/bigtable/ Cloud Bigtable Admin API Reference> for @bigtableadmin.operations.get@.
module Network.Google.Resource.BigtableAdmin.Operations.Get
(
-- * REST Resource
OperationsGetResource
-- * Creating a Request
, operationsGet
, OperationsGet
-- * Request Lenses
, ogXgafv
, ogUploadProtocol
, ogAccessToken
, ogUploadType
, ogName
, ogCallback
) where
import Network.Google.BigtableAdmin.Types
import Network.Google.Prelude
-- | A resource alias for @bigtableadmin.operations.get@ method which the
-- 'OperationsGet' request conforms to.
type OperationsGetResource =
"v2" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Operation
-- | Gets the latest state of a long-running operation. Clients can use this
-- method to poll the operation result at intervals as recommended by the
-- API service.
--
-- /See:/ 'operationsGet' smart constructor.
data OperationsGet =
OperationsGet'
{ _ogXgafv :: !(Maybe Xgafv)
, _ogUploadProtocol :: !(Maybe Text)
, _ogAccessToken :: !(Maybe Text)
, _ogUploadType :: !(Maybe Text)
, _ogName :: !Text
, _ogCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OperationsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ogXgafv'
--
-- * 'ogUploadProtocol'
--
-- * 'ogAccessToken'
--
-- * 'ogUploadType'
--
-- * 'ogName'
--
-- * 'ogCallback'
operationsGet
:: Text -- ^ 'ogName'
-> OperationsGet
operationsGet pOgName_ =
OperationsGet'
{ _ogXgafv = Nothing
, _ogUploadProtocol = Nothing
, _ogAccessToken = Nothing
, _ogUploadType = Nothing
, _ogName = pOgName_
, _ogCallback = Nothing
}
-- | V1 error format.
ogXgafv :: Lens' OperationsGet (Maybe Xgafv)
ogXgafv = lens _ogXgafv (\ s a -> s{_ogXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ogUploadProtocol :: Lens' OperationsGet (Maybe Text)
ogUploadProtocol
= lens _ogUploadProtocol
(\ s a -> s{_ogUploadProtocol = a})
-- | OAuth access token.
ogAccessToken :: Lens' OperationsGet (Maybe Text)
ogAccessToken
= lens _ogAccessToken
(\ s a -> s{_ogAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ogUploadType :: Lens' OperationsGet (Maybe Text)
ogUploadType
= lens _ogUploadType (\ s a -> s{_ogUploadType = a})
-- | The name of the operation resource.
ogName :: Lens' OperationsGet Text
ogName = lens _ogName (\ s a -> s{_ogName = a})
-- | JSONP
ogCallback :: Lens' OperationsGet (Maybe Text)
ogCallback
= lens _ogCallback (\ s a -> s{_ogCallback = a})
instance GoogleRequest OperationsGet where
type Rs OperationsGet = Operation
type Scopes OperationsGet =
'["https://www.googleapis.com/auth/bigtable.admin",
"https://www.googleapis.com/auth/bigtable.admin.cluster",
"https://www.googleapis.com/auth/bigtable.admin.instance",
"https://www.googleapis.com/auth/cloud-bigtable.admin",
"https://www.googleapis.com/auth/cloud-bigtable.admin.cluster",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient OperationsGet'{..}
= go _ogName _ogXgafv _ogUploadProtocol
_ogAccessToken
_ogUploadType
_ogCallback
(Just AltJSON)
bigtableAdminService
where go
= buildClient (Proxy :: Proxy OperationsGetResource)
mempty
|
brendanhay/gogol
|
gogol-bigtableadmin/gen/Network/Google/Resource/BigtableAdmin/Operations/Get.hs
|
mpl-2.0
| 4,930 | 0 | 15 | 1,137 | 716 | 421 | 295 | 104 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Language.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Language.Types.Product where
import Network.Google.Language.Types.Sum
import Network.Google.Prelude
-- | The syntax analysis request message.
--
-- /See:/ 'analyzeSyntaxRequest' smart constructor.
data AnalyzeSyntaxRequest = AnalyzeSyntaxRequest'
{ _asrEncodingType :: !(Maybe AnalyzeSyntaxRequestEncodingType)
, _asrDocument :: !(Maybe Document)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnalyzeSyntaxRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asrEncodingType'
--
-- * 'asrDocument'
analyzeSyntaxRequest
:: AnalyzeSyntaxRequest
analyzeSyntaxRequest =
AnalyzeSyntaxRequest'
{ _asrEncodingType = Nothing
, _asrDocument = Nothing
}
-- | The encoding type used by the API to calculate offsets.
asrEncodingType :: Lens' AnalyzeSyntaxRequest (Maybe AnalyzeSyntaxRequestEncodingType)
asrEncodingType
= lens _asrEncodingType
(\ s a -> s{_asrEncodingType = a})
-- | Input document.
asrDocument :: Lens' AnalyzeSyntaxRequest (Maybe Document)
asrDocument
= lens _asrDocument (\ s a -> s{_asrDocument = a})
instance FromJSON AnalyzeSyntaxRequest where
parseJSON
= withObject "AnalyzeSyntaxRequest"
(\ o ->
AnalyzeSyntaxRequest' <$>
(o .:? "encodingType") <*> (o .:? "document"))
instance ToJSON AnalyzeSyntaxRequest where
toJSON AnalyzeSyntaxRequest'{..}
= object
(catMaybes
[("encodingType" .=) <$> _asrEncodingType,
("document" .=) <$> _asrDocument])
-- | Represents dependency parse tree information for a token. (For more
-- information on dependency labels, see
-- http:\/\/www.aclweb.org\/anthology\/P13-2017
--
-- /See:/ 'dependencyEdge' smart constructor.
data DependencyEdge = DependencyEdge'
{ _deHeadTokenIndex :: !(Maybe (Textual Int32))
, _deLabel :: !(Maybe DependencyEdgeLabel)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'DependencyEdge' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'deHeadTokenIndex'
--
-- * 'deLabel'
dependencyEdge
:: DependencyEdge
dependencyEdge =
DependencyEdge'
{ _deHeadTokenIndex = Nothing
, _deLabel = Nothing
}
-- | Represents the head of this token in the dependency tree. This is the
-- index of the token which has an arc going to this token. The index is
-- the position of the token in the array of tokens returned by the API
-- method. If this token is a root token, then the \`head_token_index\` is
-- its own index.
deHeadTokenIndex :: Lens' DependencyEdge (Maybe Int32)
deHeadTokenIndex
= lens _deHeadTokenIndex
(\ s a -> s{_deHeadTokenIndex = a})
. mapping _Coerce
-- | The parse label for the token.
deLabel :: Lens' DependencyEdge (Maybe DependencyEdgeLabel)
deLabel = lens _deLabel (\ s a -> s{_deLabel = a})
instance FromJSON DependencyEdge where
parseJSON
= withObject "DependencyEdge"
(\ o ->
DependencyEdge' <$>
(o .:? "headTokenIndex") <*> (o .:? "label"))
instance ToJSON DependencyEdge where
toJSON DependencyEdge'{..}
= object
(catMaybes
[("headTokenIndex" .=) <$> _deHeadTokenIndex,
("label" .=) <$> _deLabel])
-- | The \`Status\` type defines a logical error model that is suitable for
-- different programming environments, including REST APIs and RPC APIs. It
-- is used by [gRPC](https:\/\/github.com\/grpc). The error model is
-- designed to be: - Simple to use and understand for most users - Flexible
-- enough to meet unexpected needs # Overview The \`Status\` message
-- contains three pieces of data: error code, error message, and error
-- details. The error code should be an enum value of google.rpc.Code, but
-- it may accept additional error codes if needed. The error message should
-- be a developer-facing English message that helps developers *understand*
-- and *resolve* the error. If a localized user-facing error message is
-- needed, put the localized message in the error details or localize it in
-- the client. The optional error details may contain arbitrary information
-- about the error. There is a predefined set of error detail types in the
-- package \`google.rpc\` which can be used for common error conditions. #
-- Language mapping The \`Status\` message is the logical representation of
-- the error model, but it is not necessarily the actual wire format. When
-- the \`Status\` message is exposed in different client libraries and
-- different wire protocols, it can be mapped differently. For example, it
-- will likely be mapped to some exceptions in Java, but more likely mapped
-- to some error codes in C. # Other uses The error model and the
-- \`Status\` message can be used in a variety of environments, either with
-- or without APIs, to provide a consistent developer experience across
-- different environments. Example uses of this error model include: -
-- Partial errors. If a service needs to return partial errors to the
-- client, it may embed the \`Status\` in the normal response to indicate
-- the partial errors. - Workflow errors. A typical workflow has multiple
-- steps. Each step may have a \`Status\` message for error reporting
-- purpose. - Batch operations. If a client uses batch request and batch
-- response, the \`Status\` message should be used directly inside batch
-- response, one for each error sub-response. - Asynchronous operations. If
-- an API call embeds asynchronous operation results in its response, the
-- status of those operations should be represented directly using the
-- \`Status\` message. - Logging. If some API errors are stored in logs,
-- the message \`Status\` could be used directly after any stripping needed
-- for security\/privacy reasons.
--
-- /See:/ 'status' smart constructor.
data Status = Status'
{ _sDetails :: !(Maybe [StatusDetailsItem])
, _sCode :: !(Maybe (Textual Int32))
, _sMessage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Status' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sDetails'
--
-- * 'sCode'
--
-- * 'sMessage'
status
:: Status
status =
Status'
{ _sDetails = Nothing
, _sCode = Nothing
, _sMessage = Nothing
}
-- | A list of messages that carry the error details. There will be a common
-- set of message types for APIs to use.
sDetails :: Lens' Status [StatusDetailsItem]
sDetails
= lens _sDetails (\ s a -> s{_sDetails = a}) .
_Default
. _Coerce
-- | The status code, which should be an enum value of google.rpc.Code.
sCode :: Lens' Status (Maybe Int32)
sCode
= lens _sCode (\ s a -> s{_sCode = a}) .
mapping _Coerce
-- | A developer-facing error message, which should be in English. Any
-- user-facing error message should be localized and sent in the
-- google.rpc.Status.details field, or localized by the client.
sMessage :: Lens' Status (Maybe Text)
sMessage = lens _sMessage (\ s a -> s{_sMessage = a})
instance FromJSON Status where
parseJSON
= withObject "Status"
(\ o ->
Status' <$>
(o .:? "details" .!= mempty) <*> (o .:? "code") <*>
(o .:? "message"))
instance ToJSON Status where
toJSON Status'{..}
= object
(catMaybes
[("details" .=) <$> _sDetails,
("code" .=) <$> _sCode,
("message" .=) <$> _sMessage])
-- | Represents the feeling associated with the entire text or entities in
-- the text.
--
-- /See:/ 'sentiment' smart constructor.
data Sentiment = Sentiment'
{ _sScore :: !(Maybe (Textual Double))
, _sMagnitude :: !(Maybe (Textual Double))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Sentiment' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sScore'
--
-- * 'sMagnitude'
sentiment
:: Sentiment
sentiment =
Sentiment'
{ _sScore = Nothing
, _sMagnitude = Nothing
}
-- | Sentiment score between -1.0 (negative sentiment) and 1.0 (positive
-- sentiment).
sScore :: Lens' Sentiment (Maybe Double)
sScore
= lens _sScore (\ s a -> s{_sScore = a}) .
mapping _Coerce
-- | A non-negative number in the [0, +inf) range, which represents the
-- absolute magnitude of sentiment regardless of score (positive or
-- negative).
sMagnitude :: Lens' Sentiment (Maybe Double)
sMagnitude
= lens _sMagnitude (\ s a -> s{_sMagnitude = a}) .
mapping _Coerce
instance FromJSON Sentiment where
parseJSON
= withObject "Sentiment"
(\ o ->
Sentiment' <$>
(o .:? "score") <*> (o .:? "magnitude"))
instance ToJSON Sentiment where
toJSON Sentiment'{..}
= object
(catMaybes
[("score" .=) <$> _sScore,
("magnitude" .=) <$> _sMagnitude])
-- | Represents the smallest syntactic building block of the text.
--
-- /See:/ 'token' smart constructor.
data Token = Token'
{ _tDependencyEdge :: !(Maybe DependencyEdge)
, _tText :: !(Maybe TextSpan)
, _tLemma :: !(Maybe Text)
, _tPartOfSpeech :: !(Maybe PartOfSpeech)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Token' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tDependencyEdge'
--
-- * 'tText'
--
-- * 'tLemma'
--
-- * 'tPartOfSpeech'
token
:: Token
token =
Token'
{ _tDependencyEdge = Nothing
, _tText = Nothing
, _tLemma = Nothing
, _tPartOfSpeech = Nothing
}
-- | Dependency tree parse for this token.
tDependencyEdge :: Lens' Token (Maybe DependencyEdge)
tDependencyEdge
= lens _tDependencyEdge
(\ s a -> s{_tDependencyEdge = a})
-- | The token text.
tText :: Lens' Token (Maybe TextSpan)
tText = lens _tText (\ s a -> s{_tText = a})
-- | [Lemma](https:\/\/en.wikipedia.org\/wiki\/Lemma_%28morphology%29) of the
-- token.
tLemma :: Lens' Token (Maybe Text)
tLemma = lens _tLemma (\ s a -> s{_tLemma = a})
-- | Parts of speech tag for this token.
tPartOfSpeech :: Lens' Token (Maybe PartOfSpeech)
tPartOfSpeech
= lens _tPartOfSpeech
(\ s a -> s{_tPartOfSpeech = a})
instance FromJSON Token where
parseJSON
= withObject "Token"
(\ o ->
Token' <$>
(o .:? "dependencyEdge") <*> (o .:? "text") <*>
(o .:? "lemma")
<*> (o .:? "partOfSpeech"))
instance ToJSON Token where
toJSON Token'{..}
= object
(catMaybes
[("dependencyEdge" .=) <$> _tDependencyEdge,
("text" .=) <$> _tText, ("lemma" .=) <$> _tLemma,
("partOfSpeech" .=) <$> _tPartOfSpeech])
--
-- /See:/ 'statusDetailsItem' smart constructor.
newtype StatusDetailsItem = StatusDetailsItem'
{ _sdiAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'StatusDetailsItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sdiAddtional'
statusDetailsItem
:: HashMap Text JSONValue -- ^ 'sdiAddtional'
-> StatusDetailsItem
statusDetailsItem pSdiAddtional_ =
StatusDetailsItem'
{ _sdiAddtional = _Coerce # pSdiAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
sdiAddtional :: Lens' StatusDetailsItem (HashMap Text JSONValue)
sdiAddtional
= lens _sdiAddtional (\ s a -> s{_sdiAddtional = a})
. _Coerce
instance FromJSON StatusDetailsItem where
parseJSON
= withObject "StatusDetailsItem"
(\ o -> StatusDetailsItem' <$> (parseJSONObject o))
instance ToJSON StatusDetailsItem where
toJSON = toJSON . _sdiAddtional
-- | The request message for the text annotation API, which can perform
-- multiple analysis types (sentiment, entities, and syntax) in one call.
--
-- /See:/ 'annotateTextRequest' smart constructor.
data AnnotateTextRequest = AnnotateTextRequest'
{ _atrEncodingType :: !(Maybe AnnotateTextRequestEncodingType)
, _atrFeatures :: !(Maybe Features)
, _atrDocument :: !(Maybe Document)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnnotateTextRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'atrEncodingType'
--
-- * 'atrFeatures'
--
-- * 'atrDocument'
annotateTextRequest
:: AnnotateTextRequest
annotateTextRequest =
AnnotateTextRequest'
{ _atrEncodingType = Nothing
, _atrFeatures = Nothing
, _atrDocument = Nothing
}
-- | The encoding type used by the API to calculate offsets.
atrEncodingType :: Lens' AnnotateTextRequest (Maybe AnnotateTextRequestEncodingType)
atrEncodingType
= lens _atrEncodingType
(\ s a -> s{_atrEncodingType = a})
-- | The enabled features.
atrFeatures :: Lens' AnnotateTextRequest (Maybe Features)
atrFeatures
= lens _atrFeatures (\ s a -> s{_atrFeatures = a})
-- | Input document.
atrDocument :: Lens' AnnotateTextRequest (Maybe Document)
atrDocument
= lens _atrDocument (\ s a -> s{_atrDocument = a})
instance FromJSON AnnotateTextRequest where
parseJSON
= withObject "AnnotateTextRequest"
(\ o ->
AnnotateTextRequest' <$>
(o .:? "encodingType") <*> (o .:? "features") <*>
(o .:? "document"))
instance ToJSON AnnotateTextRequest where
toJSON AnnotateTextRequest'{..}
= object
(catMaybes
[("encodingType" .=) <$> _atrEncodingType,
("features" .=) <$> _atrFeatures,
("document" .=) <$> _atrDocument])
-- | Represents a mention for an entity in the text. Currently, proper noun
-- mentions are supported.
--
-- /See:/ 'entityMention' smart constructor.
data EntityMention = EntityMention'
{ _emText :: !(Maybe TextSpan)
, _emType :: !(Maybe EntityMentionType)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'EntityMention' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'emText'
--
-- * 'emType'
entityMention
:: EntityMention
entityMention =
EntityMention'
{ _emText = Nothing
, _emType = Nothing
}
-- | The mention text.
emText :: Lens' EntityMention (Maybe TextSpan)
emText = lens _emText (\ s a -> s{_emText = a})
-- | The type of the entity mention.
emType :: Lens' EntityMention (Maybe EntityMentionType)
emType = lens _emType (\ s a -> s{_emType = a})
instance FromJSON EntityMention where
parseJSON
= withObject "EntityMention"
(\ o ->
EntityMention' <$> (o .:? "text") <*> (o .:? "type"))
instance ToJSON EntityMention where
toJSON EntityMention'{..}
= object
(catMaybes
[("text" .=) <$> _emText, ("type" .=) <$> _emType])
-- | Represents an output piece of text.
--
-- /See:/ 'textSpan' smart constructor.
data TextSpan = TextSpan'
{ _tsBeginOffSet :: !(Maybe (Textual Int32))
, _tsContent :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TextSpan' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tsBeginOffSet'
--
-- * 'tsContent'
textSpan
:: TextSpan
textSpan =
TextSpan'
{ _tsBeginOffSet = Nothing
, _tsContent = Nothing
}
-- | The API calculates the beginning offset of the content in the original
-- document according to the EncodingType specified in the API request.
tsBeginOffSet :: Lens' TextSpan (Maybe Int32)
tsBeginOffSet
= lens _tsBeginOffSet
(\ s a -> s{_tsBeginOffSet = a})
. mapping _Coerce
-- | The content of the output text.
tsContent :: Lens' TextSpan (Maybe Text)
tsContent
= lens _tsContent (\ s a -> s{_tsContent = a})
instance FromJSON TextSpan where
parseJSON
= withObject "TextSpan"
(\ o ->
TextSpan' <$>
(o .:? "beginOffset") <*> (o .:? "content"))
instance ToJSON TextSpan where
toJSON TextSpan'{..}
= object
(catMaybes
[("beginOffset" .=) <$> _tsBeginOffSet,
("content" .=) <$> _tsContent])
-- | The text annotations response message.
--
-- /See:/ 'annotateTextResponse' smart constructor.
data AnnotateTextResponse = AnnotateTextResponse'
{ _atrEntities :: !(Maybe [Entity])
, _atrTokens :: !(Maybe [Token])
, _atrDocumentSentiment :: !(Maybe Sentiment)
, _atrSentences :: !(Maybe [Sentence])
, _atrLanguage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnnotateTextResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'atrEntities'
--
-- * 'atrTokens'
--
-- * 'atrDocumentSentiment'
--
-- * 'atrSentences'
--
-- * 'atrLanguage'
annotateTextResponse
:: AnnotateTextResponse
annotateTextResponse =
AnnotateTextResponse'
{ _atrEntities = Nothing
, _atrTokens = Nothing
, _atrDocumentSentiment = Nothing
, _atrSentences = Nothing
, _atrLanguage = Nothing
}
-- | Entities, along with their semantic information, in the input document.
-- Populated if the user enables
-- AnnotateTextRequest.Features.extract_entities.
atrEntities :: Lens' AnnotateTextResponse [Entity]
atrEntities
= lens _atrEntities (\ s a -> s{_atrEntities = a}) .
_Default
. _Coerce
-- | Tokens, along with their syntactic information, in the input document.
-- Populated if the user enables
-- AnnotateTextRequest.Features.extract_syntax.
atrTokens :: Lens' AnnotateTextResponse [Token]
atrTokens
= lens _atrTokens (\ s a -> s{_atrTokens = a}) .
_Default
. _Coerce
-- | The overall sentiment for the document. Populated if the user enables
-- AnnotateTextRequest.Features.extract_document_sentiment.
atrDocumentSentiment :: Lens' AnnotateTextResponse (Maybe Sentiment)
atrDocumentSentiment
= lens _atrDocumentSentiment
(\ s a -> s{_atrDocumentSentiment = a})
-- | Sentences in the input document. Populated if the user enables
-- AnnotateTextRequest.Features.extract_syntax.
atrSentences :: Lens' AnnotateTextResponse [Sentence]
atrSentences
= lens _atrSentences (\ s a -> s{_atrSentences = a})
. _Default
. _Coerce
-- | The language of the text, which will be the same as the language
-- specified in the request or, if not specified, the
-- automatically-detected language. See \`Document.language\` field for
-- more details.
atrLanguage :: Lens' AnnotateTextResponse (Maybe Text)
atrLanguage
= lens _atrLanguage (\ s a -> s{_atrLanguage = a})
instance FromJSON AnnotateTextResponse where
parseJSON
= withObject "AnnotateTextResponse"
(\ o ->
AnnotateTextResponse' <$>
(o .:? "entities" .!= mempty) <*>
(o .:? "tokens" .!= mempty)
<*> (o .:? "documentSentiment")
<*> (o .:? "sentences" .!= mempty)
<*> (o .:? "language"))
instance ToJSON AnnotateTextResponse where
toJSON AnnotateTextResponse'{..}
= object
(catMaybes
[("entities" .=) <$> _atrEntities,
("tokens" .=) <$> _atrTokens,
("documentSentiment" .=) <$> _atrDocumentSentiment,
("sentences" .=) <$> _atrSentences,
("language" .=) <$> _atrLanguage])
-- | All available features for sentiment, syntax, and semantic analysis.
-- Setting each one to true will enable that specific analysis for the
-- input.
--
-- /See:/ 'features' smart constructor.
data Features = Features'
{ _fExtractSyntax :: !(Maybe Bool)
, _fExtractDocumentSentiment :: !(Maybe Bool)
, _fExtractEntities :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Features' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fExtractSyntax'
--
-- * 'fExtractDocumentSentiment'
--
-- * 'fExtractEntities'
features
:: Features
features =
Features'
{ _fExtractSyntax = Nothing
, _fExtractDocumentSentiment = Nothing
, _fExtractEntities = Nothing
}
-- | Extract syntax information.
fExtractSyntax :: Lens' Features (Maybe Bool)
fExtractSyntax
= lens _fExtractSyntax
(\ s a -> s{_fExtractSyntax = a})
-- | Extract document-level sentiment.
fExtractDocumentSentiment :: Lens' Features (Maybe Bool)
fExtractDocumentSentiment
= lens _fExtractDocumentSentiment
(\ s a -> s{_fExtractDocumentSentiment = a})
-- | Extract entities.
fExtractEntities :: Lens' Features (Maybe Bool)
fExtractEntities
= lens _fExtractEntities
(\ s a -> s{_fExtractEntities = a})
instance FromJSON Features where
parseJSON
= withObject "Features"
(\ o ->
Features' <$>
(o .:? "extractSyntax") <*>
(o .:? "extractDocumentSentiment")
<*> (o .:? "extractEntities"))
instance ToJSON Features where
toJSON Features'{..}
= object
(catMaybes
[("extractSyntax" .=) <$> _fExtractSyntax,
("extractDocumentSentiment" .=) <$>
_fExtractDocumentSentiment,
("extractEntities" .=) <$> _fExtractEntities])
-- | ################################################################ #
-- Represents the input to API methods.
--
-- /See:/ 'document' smart constructor.
data Document = Document'
{ _dContent :: !(Maybe Text)
, _dLanguage :: !(Maybe Text)
, _dGcsContentURI :: !(Maybe Text)
, _dType :: !(Maybe DocumentType)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Document' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dContent'
--
-- * 'dLanguage'
--
-- * 'dGcsContentURI'
--
-- * 'dType'
document
:: Document
document =
Document'
{ _dContent = Nothing
, _dLanguage = Nothing
, _dGcsContentURI = Nothing
, _dType = Nothing
}
-- | The content of the input in string format.
dContent :: Lens' Document (Maybe Text)
dContent = lens _dContent (\ s a -> s{_dContent = a})
-- | The language of the document (if not specified, the language is
-- automatically detected). Both ISO and BCP-47 language codes are
-- accepted.
-- **Current Language Restrictions:** * Only English, Spanish, and Japanese
-- textual content are supported. If the language (either specified by the
-- caller or automatically detected) is not supported by the called API
-- method, an \`INVALID_ARGUMENT\` error is returned.
dLanguage :: Lens' Document (Maybe Text)
dLanguage
= lens _dLanguage (\ s a -> s{_dLanguage = a})
-- | The Google Cloud Storage URI where the file content is located. This URI
-- must be of the form: gs:\/\/bucket_name\/object_name. For more details,
-- see https:\/\/cloud.google.com\/storage\/docs\/reference-uris. NOTE:
-- Cloud Storage object versioning is not supported.
dGcsContentURI :: Lens' Document (Maybe Text)
dGcsContentURI
= lens _dGcsContentURI
(\ s a -> s{_dGcsContentURI = a})
-- | Required. If the type is not set or is \`TYPE_UNSPECIFIED\`, returns an
-- \`INVALID_ARGUMENT\` error.
dType :: Lens' Document (Maybe DocumentType)
dType = lens _dType (\ s a -> s{_dType = a})
instance FromJSON Document where
parseJSON
= withObject "Document"
(\ o ->
Document' <$>
(o .:? "content") <*> (o .:? "language") <*>
(o .:? "gcsContentUri")
<*> (o .:? "type"))
instance ToJSON Document where
toJSON Document'{..}
= object
(catMaybes
[("content" .=) <$> _dContent,
("language" .=) <$> _dLanguage,
("gcsContentUri" .=) <$> _dGcsContentURI,
("type" .=) <$> _dType])
-- | The sentiment analysis request message.
--
-- /See:/ 'analyzeSentimentRequest' smart constructor.
data AnalyzeSentimentRequest = AnalyzeSentimentRequest'
{ _aEncodingType :: !(Maybe AnalyzeSentimentRequestEncodingType)
, _aDocument :: !(Maybe Document)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnalyzeSentimentRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aEncodingType'
--
-- * 'aDocument'
analyzeSentimentRequest
:: AnalyzeSentimentRequest
analyzeSentimentRequest =
AnalyzeSentimentRequest'
{ _aEncodingType = Nothing
, _aDocument = Nothing
}
-- | The encoding type used by the API to calculate sentence offsets.
aEncodingType :: Lens' AnalyzeSentimentRequest (Maybe AnalyzeSentimentRequestEncodingType)
aEncodingType
= lens _aEncodingType
(\ s a -> s{_aEncodingType = a})
-- | Input document. Currently, \`analyzeSentiment\` only supports English
-- text (Document.language=\"EN\").
aDocument :: Lens' AnalyzeSentimentRequest (Maybe Document)
aDocument
= lens _aDocument (\ s a -> s{_aDocument = a})
instance FromJSON AnalyzeSentimentRequest where
parseJSON
= withObject "AnalyzeSentimentRequest"
(\ o ->
AnalyzeSentimentRequest' <$>
(o .:? "encodingType") <*> (o .:? "document"))
instance ToJSON AnalyzeSentimentRequest where
toJSON AnalyzeSentimentRequest'{..}
= object
(catMaybes
[("encodingType" .=) <$> _aEncodingType,
("document" .=) <$> _aDocument])
-- | The entity analysis response message.
--
-- /See:/ 'analyzeEntitiesResponse' smart constructor.
data AnalyzeEntitiesResponse = AnalyzeEntitiesResponse'
{ _aerEntities :: !(Maybe [Entity])
, _aerLanguage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnalyzeEntitiesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aerEntities'
--
-- * 'aerLanguage'
analyzeEntitiesResponse
:: AnalyzeEntitiesResponse
analyzeEntitiesResponse =
AnalyzeEntitiesResponse'
{ _aerEntities = Nothing
, _aerLanguage = Nothing
}
-- | The recognized entities in the input document.
aerEntities :: Lens' AnalyzeEntitiesResponse [Entity]
aerEntities
= lens _aerEntities (\ s a -> s{_aerEntities = a}) .
_Default
. _Coerce
-- | The language of the text, which will be the same as the language
-- specified in the request or, if not specified, the
-- automatically-detected language. See \`Document.language\` field for
-- more details.
aerLanguage :: Lens' AnalyzeEntitiesResponse (Maybe Text)
aerLanguage
= lens _aerLanguage (\ s a -> s{_aerLanguage = a})
instance FromJSON AnalyzeEntitiesResponse where
parseJSON
= withObject "AnalyzeEntitiesResponse"
(\ o ->
AnalyzeEntitiesResponse' <$>
(o .:? "entities" .!= mempty) <*> (o .:? "language"))
instance ToJSON AnalyzeEntitiesResponse where
toJSON AnalyzeEntitiesResponse'{..}
= object
(catMaybes
[("entities" .=) <$> _aerEntities,
("language" .=) <$> _aerLanguage])
-- | The sentiment analysis response message.
--
-- /See:/ 'analyzeSentimentResponse' smart constructor.
data AnalyzeSentimentResponse = AnalyzeSentimentResponse'
{ _asrDocumentSentiment :: !(Maybe Sentiment)
, _asrSentences :: !(Maybe [Sentence])
, _asrLanguage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnalyzeSentimentResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'asrDocumentSentiment'
--
-- * 'asrSentences'
--
-- * 'asrLanguage'
analyzeSentimentResponse
:: AnalyzeSentimentResponse
analyzeSentimentResponse =
AnalyzeSentimentResponse'
{ _asrDocumentSentiment = Nothing
, _asrSentences = Nothing
, _asrLanguage = Nothing
}
-- | The overall sentiment of the input document.
asrDocumentSentiment :: Lens' AnalyzeSentimentResponse (Maybe Sentiment)
asrDocumentSentiment
= lens _asrDocumentSentiment
(\ s a -> s{_asrDocumentSentiment = a})
-- | The sentiment for all the sentences in the document.
asrSentences :: Lens' AnalyzeSentimentResponse [Sentence]
asrSentences
= lens _asrSentences (\ s a -> s{_asrSentences = a})
. _Default
. _Coerce
-- | The language of the text, which will be the same as the language
-- specified in the request or, if not specified, the
-- automatically-detected language. See \`Document.language\` field for
-- more details.
asrLanguage :: Lens' AnalyzeSentimentResponse (Maybe Text)
asrLanguage
= lens _asrLanguage (\ s a -> s{_asrLanguage = a})
instance FromJSON AnalyzeSentimentResponse where
parseJSON
= withObject "AnalyzeSentimentResponse"
(\ o ->
AnalyzeSentimentResponse' <$>
(o .:? "documentSentiment") <*>
(o .:? "sentences" .!= mempty)
<*> (o .:? "language"))
instance ToJSON AnalyzeSentimentResponse where
toJSON AnalyzeSentimentResponse'{..}
= object
(catMaybes
[("documentSentiment" .=) <$> _asrDocumentSentiment,
("sentences" .=) <$> _asrSentences,
("language" .=) <$> _asrLanguage])
-- | The entity analysis request message.
--
-- /See:/ 'analyzeEntitiesRequest' smart constructor.
data AnalyzeEntitiesRequest = AnalyzeEntitiesRequest'
{ _aerEncodingType :: !(Maybe AnalyzeEntitiesRequestEncodingType)
, _aerDocument :: !(Maybe Document)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnalyzeEntitiesRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aerEncodingType'
--
-- * 'aerDocument'
analyzeEntitiesRequest
:: AnalyzeEntitiesRequest
analyzeEntitiesRequest =
AnalyzeEntitiesRequest'
{ _aerEncodingType = Nothing
, _aerDocument = Nothing
}
-- | The encoding type used by the API to calculate offsets.
aerEncodingType :: Lens' AnalyzeEntitiesRequest (Maybe AnalyzeEntitiesRequestEncodingType)
aerEncodingType
= lens _aerEncodingType
(\ s a -> s{_aerEncodingType = a})
-- | Input document.
aerDocument :: Lens' AnalyzeEntitiesRequest (Maybe Document)
aerDocument
= lens _aerDocument (\ s a -> s{_aerDocument = a})
instance FromJSON AnalyzeEntitiesRequest where
parseJSON
= withObject "AnalyzeEntitiesRequest"
(\ o ->
AnalyzeEntitiesRequest' <$>
(o .:? "encodingType") <*> (o .:? "document"))
instance ToJSON AnalyzeEntitiesRequest where
toJSON AnalyzeEntitiesRequest'{..}
= object
(catMaybes
[("encodingType" .=) <$> _aerEncodingType,
("document" .=) <$> _aerDocument])
-- | Represents a phrase in the text that is a known entity, such as a
-- person, an organization, or location. The API associates information,
-- such as salience and mentions, with entities.
--
-- /See:/ 'entity' smart constructor.
data Entity = Entity'
{ _eName :: !(Maybe Text)
, _eSalience :: !(Maybe (Textual Double))
, _eMetadata :: !(Maybe EntityMetadata)
, _eType :: !(Maybe EntityType)
, _eMentions :: !(Maybe [EntityMention])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Entity' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eName'
--
-- * 'eSalience'
--
-- * 'eMetadata'
--
-- * 'eType'
--
-- * 'eMentions'
entity
:: Entity
entity =
Entity'
{ _eName = Nothing
, _eSalience = Nothing
, _eMetadata = Nothing
, _eType = Nothing
, _eMentions = Nothing
}
-- | The representative name for the entity.
eName :: Lens' Entity (Maybe Text)
eName = lens _eName (\ s a -> s{_eName = a})
-- | The salience score associated with the entity in the [0, 1.0] range. The
-- salience score for an entity provides information about the importance
-- or centrality of that entity to the entire document text. Scores closer
-- to 0 are less salient, while scores closer to 1.0 are highly salient.
eSalience :: Lens' Entity (Maybe Double)
eSalience
= lens _eSalience (\ s a -> s{_eSalience = a}) .
mapping _Coerce
-- | Metadata associated with the entity. Currently, Wikipedia URLs and
-- Knowledge Graph MIDs are provided, if available. The associated keys are
-- \"wikipedia_url\" and \"mid\", respectively.
eMetadata :: Lens' Entity (Maybe EntityMetadata)
eMetadata
= lens _eMetadata (\ s a -> s{_eMetadata = a})
-- | The entity type.
eType :: Lens' Entity (Maybe EntityType)
eType = lens _eType (\ s a -> s{_eType = a})
-- | The mentions of this entity in the input document. The API currently
-- supports proper noun mentions.
eMentions :: Lens' Entity [EntityMention]
eMentions
= lens _eMentions (\ s a -> s{_eMentions = a}) .
_Default
. _Coerce
instance FromJSON Entity where
parseJSON
= withObject "Entity"
(\ o ->
Entity' <$>
(o .:? "name") <*> (o .:? "salience") <*>
(o .:? "metadata")
<*> (o .:? "type")
<*> (o .:? "mentions" .!= mempty))
instance ToJSON Entity where
toJSON Entity'{..}
= object
(catMaybes
[("name" .=) <$> _eName,
("salience" .=) <$> _eSalience,
("metadata" .=) <$> _eMetadata,
("type" .=) <$> _eType,
("mentions" .=) <$> _eMentions])
-- | The syntax analysis response message.
--
-- /See:/ 'analyzeSyntaxResponse' smart constructor.
data AnalyzeSyntaxResponse = AnalyzeSyntaxResponse'
{ _aTokens :: !(Maybe [Token])
, _aSentences :: !(Maybe [Sentence])
, _aLanguage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AnalyzeSyntaxResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aTokens'
--
-- * 'aSentences'
--
-- * 'aLanguage'
analyzeSyntaxResponse
:: AnalyzeSyntaxResponse
analyzeSyntaxResponse =
AnalyzeSyntaxResponse'
{ _aTokens = Nothing
, _aSentences = Nothing
, _aLanguage = Nothing
}
-- | Tokens, along with their syntactic information, in the input document.
aTokens :: Lens' AnalyzeSyntaxResponse [Token]
aTokens
= lens _aTokens (\ s a -> s{_aTokens = a}) . _Default
. _Coerce
-- | Sentences in the input document.
aSentences :: Lens' AnalyzeSyntaxResponse [Sentence]
aSentences
= lens _aSentences (\ s a -> s{_aSentences = a}) .
_Default
. _Coerce
-- | The language of the text, which will be the same as the language
-- specified in the request or, if not specified, the
-- automatically-detected language. See \`Document.language\` field for
-- more details.
aLanguage :: Lens' AnalyzeSyntaxResponse (Maybe Text)
aLanguage
= lens _aLanguage (\ s a -> s{_aLanguage = a})
instance FromJSON AnalyzeSyntaxResponse where
parseJSON
= withObject "AnalyzeSyntaxResponse"
(\ o ->
AnalyzeSyntaxResponse' <$>
(o .:? "tokens" .!= mempty) <*>
(o .:? "sentences" .!= mempty)
<*> (o .:? "language"))
instance ToJSON AnalyzeSyntaxResponse where
toJSON AnalyzeSyntaxResponse'{..}
= object
(catMaybes
[("tokens" .=) <$> _aTokens,
("sentences" .=) <$> _aSentences,
("language" .=) <$> _aLanguage])
-- | Metadata associated with the entity. Currently, Wikipedia URLs and
-- Knowledge Graph MIDs are provided, if available. The associated keys are
-- \"wikipedia_url\" and \"mid\", respectively.
--
-- /See:/ 'entityMetadata' smart constructor.
newtype EntityMetadata = EntityMetadata'
{ _emAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'EntityMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'emAddtional'
entityMetadata
:: HashMap Text Text -- ^ 'emAddtional'
-> EntityMetadata
entityMetadata pEmAddtional_ =
EntityMetadata'
{ _emAddtional = _Coerce # pEmAddtional_
}
emAddtional :: Lens' EntityMetadata (HashMap Text Text)
emAddtional
= lens _emAddtional (\ s a -> s{_emAddtional = a}) .
_Coerce
instance FromJSON EntityMetadata where
parseJSON
= withObject "EntityMetadata"
(\ o -> EntityMetadata' <$> (parseJSONObject o))
instance ToJSON EntityMetadata where
toJSON = toJSON . _emAddtional
-- | Represents part of speech information for a token. Parts of speech are
-- as defined in
-- http:\/\/www.lrec-conf.org\/proceedings\/lrec2012\/pdf\/274_Paper.pdf
--
-- /See:/ 'partOfSpeech' smart constructor.
data PartOfSpeech = PartOfSpeech'
{ _posProper :: !(Maybe PartOfSpeechProper)
, _posTag :: !(Maybe PartOfSpeechTag)
, _posPerson :: !(Maybe PartOfSpeechPerson)
, _posAspect :: !(Maybe PartOfSpeechAspect)
, _posCase :: !(Maybe PartOfSpeechCase)
, _posGender :: !(Maybe PartOfSpeechGender)
, _posReciprocity :: !(Maybe PartOfSpeechReciprocity)
, _posNumber :: !(Maybe PartOfSpeechNumber)
, _posVoice :: !(Maybe PartOfSpeechVoice)
, _posForm :: !(Maybe PartOfSpeechForm)
, _posTense :: !(Maybe PartOfSpeechTense)
, _posMood :: !(Maybe PartOfSpeechMood)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'PartOfSpeech' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'posProper'
--
-- * 'posTag'
--
-- * 'posPerson'
--
-- * 'posAspect'
--
-- * 'posCase'
--
-- * 'posGender'
--
-- * 'posReciprocity'
--
-- * 'posNumber'
--
-- * 'posVoice'
--
-- * 'posForm'
--
-- * 'posTense'
--
-- * 'posMood'
partOfSpeech
:: PartOfSpeech
partOfSpeech =
PartOfSpeech'
{ _posProper = Nothing
, _posTag = Nothing
, _posPerson = Nothing
, _posAspect = Nothing
, _posCase = Nothing
, _posGender = Nothing
, _posReciprocity = Nothing
, _posNumber = Nothing
, _posVoice = Nothing
, _posForm = Nothing
, _posTense = Nothing
, _posMood = Nothing
}
-- | The grammatical properness.
posProper :: Lens' PartOfSpeech (Maybe PartOfSpeechProper)
posProper
= lens _posProper (\ s a -> s{_posProper = a})
-- | The part of speech tag.
posTag :: Lens' PartOfSpeech (Maybe PartOfSpeechTag)
posTag = lens _posTag (\ s a -> s{_posTag = a})
-- | The grammatical person.
posPerson :: Lens' PartOfSpeech (Maybe PartOfSpeechPerson)
posPerson
= lens _posPerson (\ s a -> s{_posPerson = a})
-- | The grammatical aspect.
posAspect :: Lens' PartOfSpeech (Maybe PartOfSpeechAspect)
posAspect
= lens _posAspect (\ s a -> s{_posAspect = a})
-- | The grammatical case.
posCase :: Lens' PartOfSpeech (Maybe PartOfSpeechCase)
posCase = lens _posCase (\ s a -> s{_posCase = a})
-- | The grammatical gender.
posGender :: Lens' PartOfSpeech (Maybe PartOfSpeechGender)
posGender
= lens _posGender (\ s a -> s{_posGender = a})
-- | The grammatical reciprocity.
posReciprocity :: Lens' PartOfSpeech (Maybe PartOfSpeechReciprocity)
posReciprocity
= lens _posReciprocity
(\ s a -> s{_posReciprocity = a})
-- | The grammatical number.
posNumber :: Lens' PartOfSpeech (Maybe PartOfSpeechNumber)
posNumber
= lens _posNumber (\ s a -> s{_posNumber = a})
-- | The grammatical voice.
posVoice :: Lens' PartOfSpeech (Maybe PartOfSpeechVoice)
posVoice = lens _posVoice (\ s a -> s{_posVoice = a})
-- | The grammatical form.
posForm :: Lens' PartOfSpeech (Maybe PartOfSpeechForm)
posForm = lens _posForm (\ s a -> s{_posForm = a})
-- | The grammatical tense.
posTense :: Lens' PartOfSpeech (Maybe PartOfSpeechTense)
posTense = lens _posTense (\ s a -> s{_posTense = a})
-- | The grammatical mood.
posMood :: Lens' PartOfSpeech (Maybe PartOfSpeechMood)
posMood = lens _posMood (\ s a -> s{_posMood = a})
instance FromJSON PartOfSpeech where
parseJSON
= withObject "PartOfSpeech"
(\ o ->
PartOfSpeech' <$>
(o .:? "proper") <*> (o .:? "tag") <*>
(o .:? "person")
<*> (o .:? "aspect")
<*> (o .:? "case")
<*> (o .:? "gender")
<*> (o .:? "reciprocity")
<*> (o .:? "number")
<*> (o .:? "voice")
<*> (o .:? "form")
<*> (o .:? "tense")
<*> (o .:? "mood"))
instance ToJSON PartOfSpeech where
toJSON PartOfSpeech'{..}
= object
(catMaybes
[("proper" .=) <$> _posProper,
("tag" .=) <$> _posTag, ("person" .=) <$> _posPerson,
("aspect" .=) <$> _posAspect,
("case" .=) <$> _posCase,
("gender" .=) <$> _posGender,
("reciprocity" .=) <$> _posReciprocity,
("number" .=) <$> _posNumber,
("voice" .=) <$> _posVoice, ("form" .=) <$> _posForm,
("tense" .=) <$> _posTense,
("mood" .=) <$> _posMood])
-- | Represents a sentence in the input document.
--
-- /See:/ 'sentence' smart constructor.
data Sentence = Sentence'
{ _sSentiment :: !(Maybe Sentiment)
, _sText :: !(Maybe TextSpan)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Sentence' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sSentiment'
--
-- * 'sText'
sentence
:: Sentence
sentence =
Sentence'
{ _sSentiment = Nothing
, _sText = Nothing
}
-- | For calls to AnalyzeSentiment or if
-- AnnotateTextRequest.Features.extract_document_sentiment is set to true,
-- this field will contain the sentiment for the sentence.
sSentiment :: Lens' Sentence (Maybe Sentiment)
sSentiment
= lens _sSentiment (\ s a -> s{_sSentiment = a})
-- | The sentence text.
sText :: Lens' Sentence (Maybe TextSpan)
sText = lens _sText (\ s a -> s{_sText = a})
instance FromJSON Sentence where
parseJSON
= withObject "Sentence"
(\ o ->
Sentence' <$> (o .:? "sentiment") <*> (o .:? "text"))
instance ToJSON Sentence where
toJSON Sentence'{..}
= object
(catMaybes
[("sentiment" .=) <$> _sSentiment,
("text" .=) <$> _sText])
|
rueshyna/gogol
|
gogol-language/gen/Network/Google/Language/Types/Product.hs
|
mpl-2.0
| 44,866 | 0 | 22 | 11,014 | 8,272 | 4,772 | 3,500 | 901 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DialogFlow.Projects.Locations.SecuritySettings.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the specified SecuritySettings.
--
-- /See:/ <https://cloud.google.com/dialogflow/ Dialogflow API Reference> for @dialogflow.projects.locations.securitySettings.patch@.
module Network.Google.Resource.DialogFlow.Projects.Locations.SecuritySettings.Patch
(
-- * REST Resource
ProjectsLocationsSecuritySettingsPatchResource
-- * Creating a Request
, projectsLocationsSecuritySettingsPatch
, ProjectsLocationsSecuritySettingsPatch
-- * Request Lenses
, plsspXgafv
, plsspUploadProtocol
, plsspUpdateMask
, plsspAccessToken
, plsspUploadType
, plsspPayload
, plsspName
, plsspCallback
) where
import Network.Google.DialogFlow.Types
import Network.Google.Prelude
-- | A resource alias for @dialogflow.projects.locations.securitySettings.patch@ method which the
-- 'ProjectsLocationsSecuritySettingsPatch' request conforms to.
type ProjectsLocationsSecuritySettingsPatchResource =
"v3" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "updateMask" GFieldMask :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GoogleCloudDialogflowCxV3SecuritySettings
:>
Patch '[JSON]
GoogleCloudDialogflowCxV3SecuritySettings
-- | Updates the specified SecuritySettings.
--
-- /See:/ 'projectsLocationsSecuritySettingsPatch' smart constructor.
data ProjectsLocationsSecuritySettingsPatch =
ProjectsLocationsSecuritySettingsPatch'
{ _plsspXgafv :: !(Maybe Xgafv)
, _plsspUploadProtocol :: !(Maybe Text)
, _plsspUpdateMask :: !(Maybe GFieldMask)
, _plsspAccessToken :: !(Maybe Text)
, _plsspUploadType :: !(Maybe Text)
, _plsspPayload :: !GoogleCloudDialogflowCxV3SecuritySettings
, _plsspName :: !Text
, _plsspCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsSecuritySettingsPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plsspXgafv'
--
-- * 'plsspUploadProtocol'
--
-- * 'plsspUpdateMask'
--
-- * 'plsspAccessToken'
--
-- * 'plsspUploadType'
--
-- * 'plsspPayload'
--
-- * 'plsspName'
--
-- * 'plsspCallback'
projectsLocationsSecuritySettingsPatch
:: GoogleCloudDialogflowCxV3SecuritySettings -- ^ 'plsspPayload'
-> Text -- ^ 'plsspName'
-> ProjectsLocationsSecuritySettingsPatch
projectsLocationsSecuritySettingsPatch pPlsspPayload_ pPlsspName_ =
ProjectsLocationsSecuritySettingsPatch'
{ _plsspXgafv = Nothing
, _plsspUploadProtocol = Nothing
, _plsspUpdateMask = Nothing
, _plsspAccessToken = Nothing
, _plsspUploadType = Nothing
, _plsspPayload = pPlsspPayload_
, _plsspName = pPlsspName_
, _plsspCallback = Nothing
}
-- | V1 error format.
plsspXgafv :: Lens' ProjectsLocationsSecuritySettingsPatch (Maybe Xgafv)
plsspXgafv
= lens _plsspXgafv (\ s a -> s{_plsspXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plsspUploadProtocol :: Lens' ProjectsLocationsSecuritySettingsPatch (Maybe Text)
plsspUploadProtocol
= lens _plsspUploadProtocol
(\ s a -> s{_plsspUploadProtocol = a})
-- | Required. The mask to control which fields get updated. If the mask is
-- not present, all fields will be updated.
plsspUpdateMask :: Lens' ProjectsLocationsSecuritySettingsPatch (Maybe GFieldMask)
plsspUpdateMask
= lens _plsspUpdateMask
(\ s a -> s{_plsspUpdateMask = a})
-- | OAuth access token.
plsspAccessToken :: Lens' ProjectsLocationsSecuritySettingsPatch (Maybe Text)
plsspAccessToken
= lens _plsspAccessToken
(\ s a -> s{_plsspAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plsspUploadType :: Lens' ProjectsLocationsSecuritySettingsPatch (Maybe Text)
plsspUploadType
= lens _plsspUploadType
(\ s a -> s{_plsspUploadType = a})
-- | Multipart request metadata.
plsspPayload :: Lens' ProjectsLocationsSecuritySettingsPatch GoogleCloudDialogflowCxV3SecuritySettings
plsspPayload
= lens _plsspPayload (\ s a -> s{_plsspPayload = a})
-- | Required. Resource name of the settings. Format:
-- \`projects\/\/locations\/\/securitySettings\/\`.
plsspName :: Lens' ProjectsLocationsSecuritySettingsPatch Text
plsspName
= lens _plsspName (\ s a -> s{_plsspName = a})
-- | JSONP
plsspCallback :: Lens' ProjectsLocationsSecuritySettingsPatch (Maybe Text)
plsspCallback
= lens _plsspCallback
(\ s a -> s{_plsspCallback = a})
instance GoogleRequest
ProjectsLocationsSecuritySettingsPatch
where
type Rs ProjectsLocationsSecuritySettingsPatch =
GoogleCloudDialogflowCxV3SecuritySettings
type Scopes ProjectsLocationsSecuritySettingsPatch =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow"]
requestClient
ProjectsLocationsSecuritySettingsPatch'{..}
= go _plsspName _plsspXgafv _plsspUploadProtocol
_plsspUpdateMask
_plsspAccessToken
_plsspUploadType
_plsspCallback
(Just AltJSON)
_plsspPayload
dialogFlowService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsSecuritySettingsPatchResource)
mempty
|
brendanhay/gogol
|
gogol-dialogflow/gen/Network/Google/Resource/DialogFlow/Projects/Locations/SecuritySettings/Patch.hs
|
mpl-2.0
| 6,552 | 0 | 17 | 1,436 | 861 | 502 | 359 | 134 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.YouTubeReporting.Jobs.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a job.
--
-- /See:/ <https://developers.google.com/youtube/reporting/v1/reports/ YouTube Reporting API Reference> for @youtubereporting.jobs.get@.
module Network.Google.Resource.YouTubeReporting.Jobs.Get
(
-- * REST Resource
JobsGetResource
-- * Creating a Request
, jobsGet
, JobsGet
-- * Request Lenses
, jgXgafv
, jgJobId
, jgUploadProtocol
, jgAccessToken
, jgUploadType
, jgOnBehalfOfContentOwner
, jgCallback
) where
import Network.Google.Prelude
import Network.Google.YouTubeReporting.Types
-- | A resource alias for @youtubereporting.jobs.get@ method which the
-- 'JobsGet' request conforms to.
type JobsGetResource =
"v1" :>
"jobs" :>
Capture "jobId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "onBehalfOfContentOwner" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Job
-- | Gets a job.
--
-- /See:/ 'jobsGet' smart constructor.
data JobsGet =
JobsGet'
{ _jgXgafv :: !(Maybe Xgafv)
, _jgJobId :: !Text
, _jgUploadProtocol :: !(Maybe Text)
, _jgAccessToken :: !(Maybe Text)
, _jgUploadType :: !(Maybe Text)
, _jgOnBehalfOfContentOwner :: !(Maybe Text)
, _jgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'JobsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'jgXgafv'
--
-- * 'jgJobId'
--
-- * 'jgUploadProtocol'
--
-- * 'jgAccessToken'
--
-- * 'jgUploadType'
--
-- * 'jgOnBehalfOfContentOwner'
--
-- * 'jgCallback'
jobsGet
:: Text -- ^ 'jgJobId'
-> JobsGet
jobsGet pJgJobId_ =
JobsGet'
{ _jgXgafv = Nothing
, _jgJobId = pJgJobId_
, _jgUploadProtocol = Nothing
, _jgAccessToken = Nothing
, _jgUploadType = Nothing
, _jgOnBehalfOfContentOwner = Nothing
, _jgCallback = Nothing
}
-- | V1 error format.
jgXgafv :: Lens' JobsGet (Maybe Xgafv)
jgXgafv = lens _jgXgafv (\ s a -> s{_jgXgafv = a})
-- | The ID of the job to retrieve.
jgJobId :: Lens' JobsGet Text
jgJobId = lens _jgJobId (\ s a -> s{_jgJobId = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
jgUploadProtocol :: Lens' JobsGet (Maybe Text)
jgUploadProtocol
= lens _jgUploadProtocol
(\ s a -> s{_jgUploadProtocol = a})
-- | OAuth access token.
jgAccessToken :: Lens' JobsGet (Maybe Text)
jgAccessToken
= lens _jgAccessToken
(\ s a -> s{_jgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
jgUploadType :: Lens' JobsGet (Maybe Text)
jgUploadType
= lens _jgUploadType (\ s a -> s{_jgUploadType = a})
-- | The content owner\'s external ID on which behalf the user is acting on.
-- If not set, the user is acting for himself (his own channel).
jgOnBehalfOfContentOwner :: Lens' JobsGet (Maybe Text)
jgOnBehalfOfContentOwner
= lens _jgOnBehalfOfContentOwner
(\ s a -> s{_jgOnBehalfOfContentOwner = a})
-- | JSONP
jgCallback :: Lens' JobsGet (Maybe Text)
jgCallback
= lens _jgCallback (\ s a -> s{_jgCallback = a})
instance GoogleRequest JobsGet where
type Rs JobsGet = Job
type Scopes JobsGet =
'["https://www.googleapis.com/auth/yt-analytics-monetary.readonly",
"https://www.googleapis.com/auth/yt-analytics.readonly"]
requestClient JobsGet'{..}
= go _jgJobId _jgXgafv _jgUploadProtocol
_jgAccessToken
_jgUploadType
_jgOnBehalfOfContentOwner
_jgCallback
(Just AltJSON)
youTubeReportingService
where go
= buildClient (Proxy :: Proxy JobsGetResource) mempty
|
brendanhay/gogol
|
gogol-youtube-reporting/gen/Network/Google/Resource/YouTubeReporting/Jobs/Get.hs
|
mpl-2.0
| 4,730 | 0 | 17 | 1,131 | 782 | 455 | 327 | 110 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AppEngine.Apps.DomainMAppings.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the specified domain mapping.
--
-- /See:/ <https://cloud.google.com/appengine/docs/admin-api/ App Engine Admin API Reference> for @appengine.apps.domainMappings.get@.
module Network.Google.Resource.AppEngine.Apps.DomainMAppings.Get
(
-- * REST Resource
AppsDomainMAppingsGetResource
-- * Creating a Request
, appsDomainMAppingsGet
, AppsDomainMAppingsGet
-- * Request Lenses
, admagXgafv
, admagUploadProtocol
, admagAccessToken
, admagUploadType
, admagAppsId
, admagDomainMAppingsId
, admagCallback
) where
import Network.Google.AppEngine.Types
import Network.Google.Prelude
-- | A resource alias for @appengine.apps.domainMappings.get@ method which the
-- 'AppsDomainMAppingsGet' request conforms to.
type AppsDomainMAppingsGetResource =
"v1" :>
"apps" :>
Capture "appsId" Text :>
"domainMappings" :>
Capture "domainMappingsId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] DomainMApping
-- | Gets the specified domain mapping.
--
-- /See:/ 'appsDomainMAppingsGet' smart constructor.
data AppsDomainMAppingsGet =
AppsDomainMAppingsGet'
{ _admagXgafv :: !(Maybe Xgafv)
, _admagUploadProtocol :: !(Maybe Text)
, _admagAccessToken :: !(Maybe Text)
, _admagUploadType :: !(Maybe Text)
, _admagAppsId :: !Text
, _admagDomainMAppingsId :: !Text
, _admagCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AppsDomainMAppingsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'admagXgafv'
--
-- * 'admagUploadProtocol'
--
-- * 'admagAccessToken'
--
-- * 'admagUploadType'
--
-- * 'admagAppsId'
--
-- * 'admagDomainMAppingsId'
--
-- * 'admagCallback'
appsDomainMAppingsGet
:: Text -- ^ 'admagAppsId'
-> Text -- ^ 'admagDomainMAppingsId'
-> AppsDomainMAppingsGet
appsDomainMAppingsGet pAdmagAppsId_ pAdmagDomainMAppingsId_ =
AppsDomainMAppingsGet'
{ _admagXgafv = Nothing
, _admagUploadProtocol = Nothing
, _admagAccessToken = Nothing
, _admagUploadType = Nothing
, _admagAppsId = pAdmagAppsId_
, _admagDomainMAppingsId = pAdmagDomainMAppingsId_
, _admagCallback = Nothing
}
-- | V1 error format.
admagXgafv :: Lens' AppsDomainMAppingsGet (Maybe Xgafv)
admagXgafv
= lens _admagXgafv (\ s a -> s{_admagXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
admagUploadProtocol :: Lens' AppsDomainMAppingsGet (Maybe Text)
admagUploadProtocol
= lens _admagUploadProtocol
(\ s a -> s{_admagUploadProtocol = a})
-- | OAuth access token.
admagAccessToken :: Lens' AppsDomainMAppingsGet (Maybe Text)
admagAccessToken
= lens _admagAccessToken
(\ s a -> s{_admagAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
admagUploadType :: Lens' AppsDomainMAppingsGet (Maybe Text)
admagUploadType
= lens _admagUploadType
(\ s a -> s{_admagUploadType = a})
-- | Part of \`name\`. Name of the resource requested. Example:
-- apps\/myapp\/domainMappings\/example.com.
admagAppsId :: Lens' AppsDomainMAppingsGet Text
admagAppsId
= lens _admagAppsId (\ s a -> s{_admagAppsId = a})
-- | Part of \`name\`. See documentation of \`appsId\`.
admagDomainMAppingsId :: Lens' AppsDomainMAppingsGet Text
admagDomainMAppingsId
= lens _admagDomainMAppingsId
(\ s a -> s{_admagDomainMAppingsId = a})
-- | JSONP
admagCallback :: Lens' AppsDomainMAppingsGet (Maybe Text)
admagCallback
= lens _admagCallback
(\ s a -> s{_admagCallback = a})
instance GoogleRequest AppsDomainMAppingsGet where
type Rs AppsDomainMAppingsGet = DomainMApping
type Scopes AppsDomainMAppingsGet =
'["https://www.googleapis.com/auth/appengine.admin",
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient AppsDomainMAppingsGet'{..}
= go _admagAppsId _admagDomainMAppingsId _admagXgafv
_admagUploadProtocol
_admagAccessToken
_admagUploadType
_admagCallback
(Just AltJSON)
appEngineService
where go
= buildClient
(Proxy :: Proxy AppsDomainMAppingsGetResource)
mempty
|
brendanhay/gogol
|
gogol-appengine/gen/Network/Google/Resource/AppEngine/Apps/DomainMAppings/Get.hs
|
mpl-2.0
| 5,512 | 0 | 18 | 1,232 | 785 | 458 | 327 | 119 | 1 |
module System.CloudyFS.Weather where
import Data.Convertible (convert)
import Data.Maybe (fromJust)
import Data.Time.LocalTime
import Data.ByteString.Char8 hiding (length)
import System.CloudyFS.Expiring
import System.Posix.Types
import Network.HTTP
import Text.HTML.TagSoup
import Data.DateTime (DateTime, addMinutes)
import qualified Data.Time.RFC2822 as RFC2822
data Weather = Weather {
time :: DateTime,
epochTime :: EpochTime,
conditions :: String,
temp :: String
}
instance Expiring Weather where
expiresAt (Weather _ t _ _) = addMinutes 60 (convert t)
asByteString :: Weather -> ByteString
asByteString w = pack $ conditions w ++ "\n" ++ temp w ++ "\n"
getURL :: String -> IO String
getURL x = getResponseBody =<< simpleHTTP (getRequest x)
fetchWeather :: String -> IO (Maybe Weather)
fetchWeather station = do
tags <- fmap parseTags $ getURL url
return $ makeWeather tags
where
url = "http://w1.weather.gov/xml/current_obs/" ++ station ++ ".xml"
makeWeather :: [Tag String] -> Maybe Weather
makeWeather tags =
if tagsOK tags
then Just $ Weather (getDate tags) (getEpochDate tags) (getConditions tags) (getTemperature tags)
else Nothing
where
tagsOK tg = length (sections (~== "<weather>") tg) > 0
getText tg blob = fromTagText $ sections (~== tg) blob !! 0 !! 1
parseDate t = fromJust $ RFC2822.readRFC2822 (getText "<observation_time_rfc822>" t)
getEpochDate t = convert $ zonedTimeToUTC $ parseDate t
getDate t = convert $ parseDate t
getConditions = (getText "<weather>")
getTemperature = (getText "<temperature_string>")
|
bhickey/cloudyfs
|
System/CloudyFS/Weather.hs
|
agpl-3.0
| 1,607 | 0 | 11 | 287 | 517 | 274 | 243 | 39 | 2 |
module J2s.Scanner.Show where
import J2s.Scanner.Token
-- Result Structure
instance Show Token where
show (Token j2s str pos) = show j2s ++ " " ++ show str ++ "\t" ++ show pos ++ "\n"
-- Name of Result Structure
instance Show J2sSc where
show Identifier = "Identifier : "
show KeyWord = "KeyWord : "
show SpecialSimbol = "Special Simbol : "
show BooleanLiteral = "Boolean Literal : "
show CharacterLiteral = "Character Literal : "
show StringLiteral = "String Literal : "
show Operator = "Operator : "
show LineComment = "Line Comment : "
show BlockComment = "Block Comment : "
show DecimalIntegerLiteral = "Integer Literal Decimal: "
show HexIntegerLiteral = "Integer Literal Hex : "
show OctalIntegerLiteral = "Integer Literal Octal : "
show DecimalFloatingPointLiteral = "Floating Literal Decimal : "
show HexadecimalFloatingPointLiteral = "Floating Literal Hexadecimal: "
show NullLiteral = "Null Literal : "
-- show TokMayor = "TOK MAYOR : "
show Error = "Error : "
|
andreagenso/java2scala
|
src/J2s/Scanner/Show.hs
|
apache-2.0
| 1,480 | 0 | 11 | 670 | 207 | 106 | 101 | 21 | 0 |
module DoubleOrIncrement.A304027Spec (main, spec) where
import Test.Hspec
import DoubleOrIncrement.A304027 (a304027)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A304027" $
it "correctly computes the first 20 elements" $
map a304027 [0..19] `shouldBe` expectedValue where
expectedValue = [0,3,0,5,2,0,8,3,1,0,7,4,6,10,0,6,3,3,1,7]
|
peterokagey/haskellOEIS
|
test/DoubleOrIncrement/A304027Spec.hs
|
apache-2.0
| 366 | 0 | 8 | 61 | 154 | 92 | 62 | 10 | 1 |
module LayoutRender ( drawPlay, drawOver ) where
import Util
import Text.Tabular
import Text.Tabular.AsciiArt ( render )
import Data.List.Split ( chunksOf )
import Data.Set ( Set )
import Data.Set as Set ( member )
-- | Put constructor Header on each element in sourceList.
rangeHeader :: Int -> [String] -> [Header String]
rangeHeader len sourceList = take len $ map Header sourceList
-- | Draw the game's layout according to the convert function and
-- the array of number of neighbour mines of each Point.
draw :: (Point -> String) -> [[Int]] -> IO ()
draw convert nums = putStr $ render id id id gridLayout
where (w, h) = dimension nums
grid :: [[Point]]
grid = chunksOf w $ gridPoints w h
gridLayout :: Table String String String
gridLayout = Table
(Group SingleLine
[ Group SingleLine $ rangeHeader h [[c] | c <- rows] ])
(Group SingleLine
[ Group SingleLine $ rangeHeader w [show n | n <- [0..]] ])
(map (map convert) grid)
-- | Draw the game's layout according to the open Points and
-- the array of number of neighbour mines of each Point.
drawPlay opens nums = draw convert nums
where -- Convert a Point position to its representation,
-- either black block or number of neighbour mines.
convert :: Point -> String
convert p | p `member` opens = show $ numAtPoint nums p
| otherwise = ['\x2588']
-- | Draw the game over layout.
drawOver :: Set Point -> [[Int]] -> IO ()
drawOver minePs nums = draw convert nums
where -- Convert a Point position to its representation in
-- String, either black block or number of neighbour mines.
convert :: Point -> String
convert p | p `member` minePs = "*"
| otherwise = show $ numAtPoint nums p
|
ljishen/Minesweeper
|
src/LayoutRender.hs
|
apache-2.0
| 1,945 | 0 | 16 | 600 | 490 | 261 | 229 | 30 | 1 |
{-|
Module : Pulsar.Label
Description : DCPU16 assembly labels.
Copyright : (c) Jesse Haber-Kucharsky, 2014
License : Apache-2.0
Labels are named identifiers for a specific location into a program.
Pulsar supports two kind of labels:
- Global labels, which can be referenced anywhere unambiguously.
- Local labels, indicated with a '@' in their name, which are scoped to the last global label. For instance,
> pizza:
> @cheese:
defines a global label \"pizza\" and a local label \"pizza@cheese\". Until
another global label is defined, the local label can be referenced as
\"@cheese\" directly.
-}
{-# LANGUAGE OverloadedStrings #-}
module Pulsar.Label
(Location (..)
,Table
,scan
,lookup
,exportSymbolTable
,update
,numDefined
,names)
where
import Prelude hiding (lookup)
import Pulsar.Ast hiding (empty)
import Pulsar.SymbolTable (SymbolTable)
import qualified Pulsar.SymbolTable as SymbolTable
import Control.Monad (foldM)
import qualified Data.ByteString.Char8 as BS
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (mapMaybe)
import Data.Monoid ((<>))
data Location
= Unresolved
| Location Word16
deriving (Eq, Show)
newtype Expanded = Expanded ByteString deriving (Show, Eq, Ord)
-- | Stores all the labels defined in the assembly source and keeps track of the
-- most-recently defined globla label.
data Table = Table
{ entries :: Map Expanded Location
, lastGlobal :: ByteString
} deriving (Eq, Show)
exportSymbolTable :: Table -> SymbolTable
exportSymbolTable =
SymbolTable.fromList . mapMaybe get . Map.toList . entries
where
get :: (Expanded, Location) -> Maybe (ByteString, Word16)
get (_, Unresolved) = Nothing
get (Expanded bs, Location loc) = Just (bs, loc)
numDefined :: Table -> Int
numDefined = Map.size . entries
names :: Table -> [ByteString]
names = map (\(Expanded name) -> name) . Map.keys . entries
expand :: Table -> ByteString -> Expanded
expand tab name
| isGlobal name = Expanded name
| otherwise = Expanded $ lastGlobal tab <> name
isGlobal :: ByteString -> Bool
isGlobal name
| BS.null name = True
| BS.head name == '@' = False
| otherwise = True
update :: ByteString -> Word16 -> Table -> Table
update name value tab =
Table entries' lastGlobal'
where
entries' = Map.update (\_ -> Just . Location $ value)
(expand tab name)
(entries tab)
lastGlobal' = if isGlobal name then name else lastGlobal tab
lookup :: ByteString -> Table -> Maybe Location
lookup name tab = Map.lookup (expand tab name) $ entries tab
member :: ByteString -> Table -> Bool
member name tab = Map.member (expand tab name) $ entries tab
-- | Scan a DCPU AST for all defined labels. In the failure case that a label
-- is defined multiple times, the result is the location of the first definition
-- and the name of the label.
scan :: Ast SourcePos -> Either (SourcePos, ByteString) Table
scan = foldM next empty . getLabels
where
next :: Table -> (SourcePos, ByteString) -> Either (SourcePos, ByteString) Table
next tab (pos, name)
| member name tab = Left (pos, name)
| otherwise =
let entries' = Map.insert (expand tab name) Unresolved (entries tab)
lastGlobal' = if isGlobal name
then name
else lastGlobal tab
in
Right (Table entries' lastGlobal')
empty :: Table
empty = Table Map.empty ""
getLabels :: Ast SourcePos -> [(SourcePos, ByteString)]
getLabels = mapMaybe f
where
f (Statement pos (Label name)) = Just (pos, name)
f _ = Nothing
|
hakuch/Pulsar
|
src/Pulsar/Label.hs
|
apache-2.0
| 3,837 | 0 | 14 | 1,007 | 952 | 512 | 440 | 76 | 3 |
module Day1 where
import Data.List (foldl')
move :: Char -> Integer -> Integer
move '(' = succ
move ')' = pred
move x = error $ "unknown move type" ++ [x]
floor :: String -> Integer
floor = foldl' (flip move) 0
basement :: String -> Integer
basement = fst . foldl' nextMove (-1, 0) . zip [1..]
nextMove :: (Integer, Integer) -> (Integer, Char) -> (Integer, Integer)
nextMove acc@(_, level) (pos, move')
| level == -1 = acc
| otherwise = (pos, move move' level)
|
taylor1791/adventofcode
|
2015/src/Day1.hs
|
bsd-2-clause
| 473 | 0 | 9 | 98 | 216 | 118 | 98 | 14 | 1 |
-- 329468
import Euler(digitUsage)
-- fibonacci calculation is too slow
-- only inspect lower digits with modulous
genLowerFib0 n a b = (n,c) : genLowerFib0 (n+1) b c
where c = (a + b) `mod` (10^9)
genLowerFib = (0,0):(1,1):(2,1):genLowerFib0 3 1 1
-- extract upper digits directly
-- as per MathWorld, FibN = phi ^ n / sqrt 5
-- fractional part of log10 represents leading digits
-- calculate 10 ^ (t+8) to get 9 leading digits
calcUpperFib n = f
where phi = (1 + sqrt 5) / 2
logPhi = logBase 10 phi
logSqrt5 = logBase 10 $ sqrt 5
x = (fromIntegral n) * logPhi - logSqrt5
t = x - fromIntegral (floor x)
f = round $ 10 ** (t + 8)
findFib = map fst $ filter (\(_,n) -> isPandigital n) genUpperFib
where isPandigital n = digitUsage 123456789 == digitUsage n
genUpperFib = map (\n -> (n, calcUpperFib n)) getFibK
getFibK = map fst $ filter (\(_,n) -> isPandigital n) genLowerFib
main = putStrLn $ show $ head $ findFib
|
higgsd/euler
|
hs/104.hs
|
bsd-2-clause
| 1,002 | 0 | 11 | 261 | 364 | 197 | 167 | 16 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Text.Parsec.Text.Lazy
-- Copyright : (c) Kazu Yamamoto 2010
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Make strict Texts an instance of 'Stream' with 'Char' token type.
--
-----------------------------------------------------------------------------
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Text.Parsec.Text.Lazy (
Parser, GenParser, parseFromFile
) where
import Text.Parsec.Error
import Text.Parsec.Prim
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
instance (Monad m) => Stream T.Text m Char where
uncons = return . T.uncons
type Parser = Parsec T.Text ()
type GenParser t st = Parsec T.Text st
parseFromFile :: Parser a -> String -> IO (Either ParseError a)
parseFromFile p fname = do
input <- T.readFile fname
return (runP p () fname input)
|
kazu-yamamoto/parsec3
|
Text/Parsec/Text/Lazy.hs
|
bsd-2-clause
| 1,085 | 0 | 10 | 181 | 198 | 118 | 80 | 16 | 1 |
module Demo.Links ( parseInput
, getRect
, findScale
, scaleMap
, nmap
, mismatches
, mkLayout ) where
import Text.Read (readMaybe)
import System.Random
import Demo.Types
import qualified Data.Map as M (Map, empty, lookup)
import qualified Data.List as L (delete, length)
-- Config!
randomEmptyCells = 2 :: Int
randomValueRange = ('A','Z')
{- I think there is a lot of refactoring-opportunity in this file,
especially concerning the use of monads for parsing, but we'll
leave that for later.
-}
mismatches :: InputState -> Maybe InputState -> (Bool,[Int])
mismatches (InSt i1 s1 h1 m1) (Just (InSt i2 s2 h2 m2)) =
let headChanged = h1 /= h2
f = (matchIndex m1 m2)
in (headChanged, (foldr
f
[]
[i1 .. (i1 + s1 - 1)] ))
mismatches _ _ = (False,[]) -- if last InSt is Nothing, no change
matchIndex :: M.Map Int Cell -> M.Map Int Cell -> Int -> [Int] -> [Int]
matchIndex c b i is = let f m = (fmap snd (M.lookup i m))
in if (f c) == (f b)
then is
else i:is
{- There are two implemented functions for Step:
1. arrow: "we're looking for an arrow next"
2. box: "we're looking for a box next"
-}
type Step = (MemSt, [Int]) -> Maybe Cell -> [DElem]
parseInput :: InputState -> Either String [DElem]
parseInput a = fmap (parse ((memVals a),[]) box) (testHead a)
-- I thought there'd be a convieniece function for Maybe -> Either...
testHead :: InputState -> Either String Int
testHead s = case readInt (headVal s) of
Just i -> Right i
_ -> Left "The Head-Index is Invalid!"
parse :: ( MemSt -- The contents of the onscreen memory cells
, [Int] ) -- the list of seen indexes (to check for loopbacks)
-> Step -- The current "machine state" i.e. what its looking for
-> Int -- The index we're looking at right now
-> [DElem] -- the list of diagram elements we're after
parse st step i = let m = fst st
cell = M.lookup i m
in step st cell
box :: Step
box st (Just (i,val)) =
let newst = saw i st -- add box's index to the "seen" list
continue = (Box, (show i), val) : parse newst arrow (i + 1)
in if val == "" -- if empty, we must check to see if next-addr is empty
then case M.lookup (i + 1) (fst st) of
-- if it is empty, we don't draw the box
Nothing -> []
Just (_,"") -> []
_ -> continue
else continue
box _ _ = []
{- Failure for arrow: (end list)
1. the arrow cell is empty/non-existant
2. the value of the arrow cell is not an Int
3. the cell which the arrow points to is empty/invalid
Special failure: (add loopback and end list)
4. the cell which the arrow points to is already seen
-}
arrow :: Step
arrow (m,s) (Just (i,val)) =
-- first test for a valid next-index (check that its an int)
case readInt val of
-- then check whether it has been seen before
Just n -> case getIndex n s of
Just x -> [(LoopBack x, show i, val)]
-- finally check if it points to a valid box
_ -> tryIns (Arrow, show i, val)
(parse (m,s) box n)
_ -> []
-- of course, if the cell didnt exist in the first place, return []
arrow _ _ = []
tryIns :: a -> [a] -> [a]
tryIns a [] = []
tryIns a xs = a:xs
readInt :: String -> Maybe Int
readInt = readMaybe
saw :: Int -> (MemSt, [Int]) -> (MemSt, [Int])
saw i (m,s) = (m, i:s)
getIndex :: (Eq a) => a -> [a] -> Maybe Int
getIndex = r 0
where r i a (x:xs) = if a == x
then Just i
else r (i+1) a xs
r _ _ _ = Nothing
mkLayout :: Diagram -> Layout
mkLayout = flow (0,0)
flow :: (Int, Int) -> [DElem] -> [LElem]
flow (x,y) (a:as) = bound a (x,y) : flow ((fst (sizeOf a)) + x, y) as
flow _ _ = []
bound :: DElem -> (Int, Int) -> LElem
bound elem (x,y) = let (xo, yo) = sizeOf elem
in (elem, (x,y), (xo,yo))
sizeOf :: DElem -> (Int, Int)
sizeOf (Box,_,_) = (2,3)
sizeOf (Arrow,_,_) = (2,3)
sizeOf (LoopBack _,_,_) = (2,5)
getRect :: Layout -> (Int, Int)
getRect = foldr f (0,0)
where f :: LElem -> (Int, Int) -> (Int, Int)
f (_, (x,y), (xo,yo)) (a,b) = (max (x+xo) a, max (y+yo) b)
findScale :: (Double, Double) -> (Int, Int) -> Double
findScale (cw,ch) (lw,lh) = let lwd = fromIntegral lw
lhd = fromIntegral lh
cwd = cw - 10
chd = ch - 10
in if (lhd * (cwd / lwd)) > chd
then chd / lhd
else cwd / lwd
scaleMap :: Double
-> (DElem, (Int, Int), (Int, Int))
-> (DElem, (Double, Double), (Double, Double))
scaleMap scale (e, (x,y), (xo,yo)) = (e, (f x, f y), (f xo, f yo))
where f a = scale * (fromIntegral a)
nmap :: (a -> b) -> (a,a) -> (b,b)
nmap f (x,y) = (f x, f y)
|
RoboNickBot/linked-list-web-demo
|
src/Demo/Links.hs
|
bsd-2-clause
| 5,196 | 0 | 14 | 1,791 | 1,854 | 1,030 | 824 | 107 | 4 |
{-# LANGUAGE QuasiQuotes #-}
import LiquidHaskell
{-
Interesting example as subtyping of bounds is instantiated
with function types!
-}
zipWith :: (b -> Char -> a)
-> b -> b -> a
zipWith f = (bar . f)
bar :: (Char -> c) -> a -> c
bar = undefined
|
spinda/liquidhaskell
|
tests/gsoc15/unknown/pos/bounds1.hs
|
bsd-3-clause
| 264 | 0 | 8 | 68 | 74 | 41 | 33 | 7 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
----------------------------------------------------------------------
-- |
-- Module : Proof.Monad
--
-- Monad for checking proofs in propositional logic.
----------------------------------------------------------------------
module Proof.Monad(
Proof, LatexProof,
Fact, getFact,
CondFact, getCond,
MonadProof,
derivedRule,
assume,
prem,
and_el, and_er, and_i,
or_e, or_il, or_ir,
imp_i, imp_e,
not_e, not_i,
nn_e,
bot_e,
check, checkTauto, checkLatex,
(.::)
) where
import Data.List
import qualified Data.Map as M
import Control.Monad
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Error
import Control.Monad.Trans
import Control.Applicative
import Proof.Language
data Context = Context { premises :: [Premise] }
newtype Fact = Fact { getFact :: Phi }
deriving (Show, Eq, Ord)
data CondFact = CondFact { getCond :: Phi, getCondFact :: Phi }
deriving (Show, Eq, Ord)
newtype Proof a = P { runProof :: ReaderT Context (Either String) a}
deriving (Monad, MonadError String, MonadReader Context, Functor)
data LatexState = LS {
outLines :: [String]
,factLabels :: M.Map Fact String
,condFactLabels :: M.Map CondFact (String, String)
,freshNames :: [String]
}
newtype LatexProof a = LP { runLatex :: StateT LatexState Proof a }
deriving (Monad, Functor, MonadState LatexState)
writeLine :: String -> LatexProof ()
writeLine l = modify (\s -> s { outLines = l:outLines s })
writeLine' = writeLine'' True
writeLine'' :: Bool -- ^ Render with line break?
-> Proof Fact -- ^ Proof
-> String -- ^ Justification
-> [Fact] -- ^ Dependencies
-> [CondFact] -- ^ Conditional dependencies
-> LatexProof Fact
writeLine'' nl proof name deps cdeps = do
res <- LP . lift $ proof
lbl <- getFresh
frefs <- map (\l -> "\\ref{" ++ l ++ "}")
<$> mapM getFactLabel deps
crefs <- map (\(s,e) -> "\\ref{" ++ s ++ "} - \\ref{" ++ e ++ "}")
<$> mapM getCondFactLabel cdeps
let refs = concat . intersperse "," $ crefs ++ frefs
let premise = name ++ if null deps && null cdeps then "" else "(" ++ refs ++ ")"
writeLine $ (if nl then "\\\\ " else "") ++ "\\\"" ++ lbl ++ "\" \\: "
++ showPhi latexFormat (getFact res)
++ " \\= " ++ premise
putFactLabel res lbl
return res
getFresh :: LatexProof String
getFresh = do
fresh <- gets freshNames
modify $ \s -> s { freshNames = tail fresh }
return $ head fresh
putFactLabel :: Fact -> String -> LatexProof ()
putFactLabel f l = modify $ \s -> s { factLabels = M.insert f l (factLabels s) }
putCondFactLabel :: CondFact -> String -> String -> LatexProof ()
putCondFactLabel f st en =
modify $ \s -> s { condFactLabels = M.insert f (st,en) (condFactLabels s) }
getFactLabel :: Fact -> LatexProof String
getFactLabel f = (M.! f) <$> gets factLabels
getCondFactLabel :: CondFact -> LatexProof (String, String)
getCondFactLabel f = (M.! f) <$> gets condFactLabels
class (Monad m) => MonadProof m where
derivedRule :: String -> [Fact] -> [CondFact] -> m Fact -> m Fact
failProof :: String -> m a
assume :: Phi -> (Fact -> m Fact) -> m CondFact
prem :: Phi -> m Fact
and_el :: Fact -> m Fact
and_er :: Fact -> m Fact
and_i :: Fact -> Fact -> m Fact
imp_i :: CondFact -> m Fact
imp_e :: Fact -> Fact -> m Fact
bot_e :: Fact -> Phi -> m Fact
not_e :: Fact -> Fact -> m Fact
nn_e :: Fact -> m Fact
not_i :: CondFact -> m Fact
or_e :: Fact -> CondFact -> CondFact -> m Fact
or_il :: Fact -> Phi -> m Fact
or_ir :: Phi -> Fact -> m Fact
-- |Checks if a proof proves a given sequent.
check :: Sequent -> Proof Fact -> Either String ()
check (prems :- phi) proof =
case runReaderT (runProof proof) (Context prems) of
Left err -> Left err
Right (Fact phi') ->
if (phi == phi')
then Right ()
else Left $ "check: invalid conclusion: reached '"
++ show phi' ++ "', expected '"
++ show phi ++ "'"
-- |Checks a proof of a tautology.
checkTauto :: Proof Fact -> Either String Sequent
checkTauto proof =
case runReaderT (runProof proof) (Context []) of
Left err -> Left err
Right (Fact phi) -> Right ([] :- phi)
checkLatex :: Sequent -> LatexProof Fact -> Either String String
checkLatex (prems :- phi) proof =
case (runReaderT (runProof (runStateT (runLatex proof) initState)) (Context prems)) of
Left err -> Left err
Right (Fact phi', s) ->
if phi == phi'
then Right . wrap . concat . intersperse "\n" . reverse $ outLines s
else Left $ "checkLatex: invalid conclusion: reached '"
++ show phi' ++ "', expected '"
++ show phi ++ "'"
where
wrap s = "\\begin{proofbox}\n" ++ s ++ "\n\\end{proofbox}\n"
initState = LS [] (M.fromList []) (M.fromList []) freshNames
freshNames = ["l" ++ show n | n <- [1..]]
-- |Proof signature. (For debugging proofs).
(.::) :: (MonadProof m) => m Fact -> Phi -> m Fact
p .:: f' = do
(Fact f) <- p
when (f /= f') . failProof $
"Conclusion mismatch: Got '" ++ show f ++ "' expected '" ++ show f' ++ "'"
return (Fact f)
infixl 0 .::
instance MonadProof Proof where
derivedRule _ _ _ m = m
failProof = throwError
assume p m = do f <- local (\ctx -> ctx { premises = p:premises ctx} ) (m (Fact p))
return $ CondFact p (getFact f)
prem p = do prems <- premises <$> ask
if p `elem` prems then return (Fact p)
else throwError $ "prem: Not a premise: " ++ show p
and_el (Fact (a :&: b)) = return $ Fact a
and_el f = throwError $ "and_el: Invalid premise: " ++ show f
and_er (Fact (a :&: b)) = return $ Fact b
and_er f = throwError $ "and_er: Invalid premise: " ++ show f
and_i (Fact a) (Fact b) = return . Fact $ a :&: b
imp_i (CondFact p f) = return . Fact $ p :-> f
imp_e (Fact phi) (Fact (phi' :-> psi)) =
if phi == phi'
then return $ Fact psi
else throwError $ "imp_e: Premises do not match: '"
++ show phi ++ "' and '" ++ show phi' ++ "'"
imp_e _ (Fact f) = throwError $ "imp_e: Premise not an implication: " ++ show f
bot_e (Fact Bot) p = return . Fact $ p
bot_e f p = throwError $ "bot_e: Invalid premise: " ++ show f
not_e (Fact a) (Fact (Not a')) =
if a == a'
then return $ Fact Bot
else throwError $ "not_e: Subformulas does not match: '"
++ show a ++ "' and '" ++ show a' ++ "'"
nn_e (Fact (Not (Not p))) = return $ Fact p
nn_e f = throwError $ "nn_e: Invalid premise: " ++ show f
not_i (CondFact p Bot) = return . Fact $ Not p
not_i (CondFact p f) = throwError $ "not_i: Conclusion in conditional proof is not bottom: "
++ show f
or_e (Fact (a :|: b)) (CondFact a' x1) (CondFact b' x2) = do
when (a /= a') . throwError $
"or_e: Assumption in first conditional "
++ "fact does not match left subformula: "
++ "'" ++ show a ++ "' and '" ++ show a' ++ "'"
when (b /= b') . throwError $
"or_e: Assumption in second conditional "
++ "fact does not match right subformula: "
++ "'" ++ show b ++ "' and '" ++ show b' ++ "'"
when (x1 /= x2) . throwError $
"or_e: Conclusions in conditional subproofs does not match: "
++ "'" ++ show x1 ++ "' and '" ++ show x2 ++ "'"
return . Fact $ x1
or_il (Fact a) b = return . Fact $ a :|: b
or_ir a (Fact b) = return . Fact $ a :|: b
instance MonadProof LatexProof where
derivedRule name deps cdeps proof = do
s <- get
(fact, s') <- LP . lift $ runStateT (runLatex proof) s
put $ s' {outLines = outLines s} -- Suppress output from proof
writeLine' (return fact) name deps cdeps
failProof = LP . lift . throwError
assume p f = do
lStart <- getFresh
writeLine $ "\\[" ++ "\\label{" ++ lStart ++ "}"
writeLine'' False (return (Fact p)) "\\ass" [] []
s <- get
let m = runStateT (runLatex (f (Fact p))) s
(Fact res, s') <- LP . lift $ local (\ctx -> ctx { premises = p:premises ctx }) m
put s'
lEnd <- getFresh
writeLine $ "\\label{" ++ lEnd ++ "}\\]"
let cFact = CondFact p res
putCondFactLabel cFact lStart lEnd
return cFact
prem p = writeLine' (prem p) "\\prem" [] []
and_el f = writeLine' (and_el f) "\\land e_1" [f] []
and_er f = writeLine' (and_er f) "\\land e_2" [f] []
and_i f1 f2 = writeLine' (and_i f1 f2) "\\land i" [f1,f2] []
imp_i cf = writeLine' (imp_i cf) "\\rightarrow i" [] [cf]
imp_e f1 f2 = writeLine' (imp_e f1 f2) "\\rightarrow e" [f1,f2] []
bot_e f p = writeLine' (bot_e f p) "\\bot e" [f] []
not_e f1 f2 = writeLine' (not_e f1 f2) "\\lnot e" [f1,f2] []
nn_e f = writeLine' (nn_e f) "\\lnot\\lnot e" [f] []
not_i cf = writeLine' (not_i cf) "\\lnot i" [] [cf]
or_e f cf1 cf2 = writeLine' (or_e f cf1 cf2) "\\lor e" [f] [cf1,cf2]
or_il f p = writeLine' (or_il f p) "\\lor i_1" [f] []
or_ir p f = writeLine' (or_ir p f) "\\lor i_2" [f] []
|
ulrikrasmussen/Proof
|
src/Proof/Monad.hs
|
bsd-3-clause
| 9,217 | 0 | 18 | 2,503 | 3,481 | 1,770 | 1,711 | 209 | 3 |
ignore "use camelCase"
|
DNNX/hyphenation
|
HLint.hs
|
bsd-3-clause
| 23 | 0 | 5 | 3 | 7 | 2 | 5 | -1 | -1 |
{-# OPTIONS -fasm #-}
module SetBench (main) where
import Criterion.Main
import Data.Set
import qualified Data.Map as M
import qualified Data.Foldable as F
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as VM
import Control.Monad.Primitive
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Random (getRandomR, RandT, StdGen, evalRandT, mkStdGen)
import qualified Data.ByteString.Char8 as BS
import qualified Progression.Main as P
import Control.DeepSeq
import Prelude hiding (filter)
instance NFData BS.ByteString where
rnf xs = xs `seq` ()
shuffle :: V.Vector a -> V.Vector a
shuffle = V.modify (\ mv -> evalRandT (shuffleM mv) (mkStdGen 0))
half :: V.Vector a -> V.Vector a
half xs = V.take (V.length xs `quot` 2) xs
shuffleM :: PrimMonad m => VM.MVector (PrimState m) a -> RandT StdGen m ()
shuffleM xs = forM_ [0..VM.length xs - 1] $ \ i -> do
j <- getRandomR (0, VM.length xs - 1)
lift $ VM.swap xs i j
tSortBench strings = toList (fromList strings)
tIntersectBench (strings, revs) = size (intersection strings revs)
tUnionBench (strings, revs) = size strings + size revs - size (union strings revs)
tDiffBench (strings, revs) = size strings - size (difference strings revs)
tFilterBench strings = size (filter (\ str -> not (BS.null str) && BS.last str /= 's') strings)
tSplitBench strings = case split (BS.pack "logical") strings of
(l, r) -> size l - size r
tEnds strings = case deleteFindMin strings of
(l, strs') -> case deleteFindMax strs' of
(r, strs'') -> size strs'' + BS.length l - BS.length r
tFromList strings = size (fromList strings)
tToList strs = sum [BS.length str | str <- toList strs]
tInsert strs = size (insert (BS.pack "scientifitude") strs)
tIndex strs = M.elemAt (31415926 `rem` M.size strs) strs
tNeighborhood (strs, str) = case splitMember str strs of
(l, x, r) -> (findMax l, x, findMin r)
nf' f a = f a `deepseq` nf f a
tLookupBench (strings, s1, s2) = (s1 `member` strings, s2 `member` strings)
tBenches strings revs = bgroup ""
[bench "Lookup" (nf' tLookupBench (strSet, someStr1,someStr2)),
revSet `seq` bench "Intersect" (nf' tIntersectBench (strSet, revSet)),
bench "Sort" (nf' tSortBench strings),
bench "Union" (nf' tUnionBench (strSet, revSet)),
bench "Difference" (nf' tDiffBench (strSet, revSet)),
bench "Filter" (nf' tFilterBench strSet),
bench "Split" (nf' tSplitBench strSet),
bench "Neighborhood" (nf' tNeighborhood (strSet, someStr2)),
bench "Index" (nf' tIndex strMap),
bench "Min/Max" (nf' tEnds strSet),
bench "FromList" (nf' tFromList strings),
bench "ToList" (nf' tToList strSet),
bench "Insert" (nf' tInsert strSet)]
where !strSet = fromList strings; !revSet = fromList revs; !strMap = M.fromList [(str, str) | str <- strings]
someStr1 = strings !! (314159 `rem` n); someStr2 = revs !! (314159 `rem` n)
n = length strings
main :: IO ()
main = do
strings <- liftM BS.lines (BS.readFile "dictionary.txt")
let !strings' = V.toList (shuffle (V.fromList strings))
let !revs' = Prelude.map BS.reverse strings'
let benches = tBenches strings' revs'
strings' `deepseq` revs' `deepseq` P.defaultMain benches
|
lowasser/TrieMap
|
SetBench.hs
|
bsd-3-clause
| 3,194 | 5 | 15 | 567 | 1,331 | 697 | 634 | -1 | -1 |
-- |
-- Module : Simulation.Aivika.Branch
-- Copyright : Copyright (c) 2016-2017, David Sorokin <[email protected]>
-- License : BSD3
-- Maintainer : David Sorokin <[email protected]>
-- Stability : experimental
-- Tested with: GHC 7.10.3
--
-- This module re-exports the library functionality related to branching computations.
--
module Simulation.Aivika.Branch
(-- * Modules
module Simulation.Aivika.Branch.BR,
module Simulation.Aivika.Branch.Event,
module Simulation.Aivika.Branch.Generator,
module Simulation.Aivika.Branch.QueueStrategy,
module Simulation.Aivika.Branch.Ref.Base) where
import Simulation.Aivika.Branch.BR
import Simulation.Aivika.Branch.Event
import Simulation.Aivika.Branch.Generator
import Simulation.Aivika.Branch.QueueStrategy
import Simulation.Aivika.Branch.Ref.Base
|
dsorokin/aivika-branches
|
Simulation/Aivika/Branch.hs
|
bsd-3-clause
| 863 | 0 | 5 | 130 | 97 | 74 | 23 | 12 | 0 |
{-# language
DuplicateRecordFields
, GeneralizedNewtypeDeriving
, TypeApplications
#-}
module Zoid where
import Clash.Prelude hiding (read)
import Config
import Instructions
import Types
class Arithmetic n where
add :: BitVector n -> BitVector n -> Processor (BitVector n)
-- class Logic a where
-- a == a :: Bool
-- a /= a :: Bool
-- not a :: a
-- class Control a where
-- jump :: a
-- class Load a where
-- load :: a
-- class Store a where
-- store :: a
decode :: BitVector 32 -> ISA
decode = undefined
-- instance Instruction IInstr where
-- type Args IInstr = IFormat
-- class Instruction i where
-- type Args i :: * -> Format
-- execute :: i -> ProcM ()
topEntity
:: ISA
-> Processor ()
topEntity = execute
|
jkopanski/zoid
|
src/Zoid.hs
|
bsd-3-clause
| 763 | 0 | 11 | 185 | 113 | 68 | 45 | -1 | -1 |
{-# LANGUAGE RecursiveDo #-}
module Main where
import qualified Control.Monad.RevState as Rev
import System.Exit (exitFailure)
import System.Timeout (timeout)
-- An example usage of reverse state
-- http://stackoverflow.com/questions/34030388/retrocausality-in-haskell-from-tardis-to-revstate/34287498
lastOccurrence :: Int -> Rev.State [Int] Bool
lastOccurrence x = mdo
Rev.put (x : xs)
xs <- Rev.get
return (not (elem x xs))
lastOccurrences :: [Int] -> Rev.State [Int] [Bool]
lastOccurrences xs = mapM lastOccurrence xs
exampleValue :: [Bool]
exampleValue = flip Rev.evalState [] $ lastOccurrences [3,4,6,7,4,3,5,7]
expectedResult :: [Bool]
expectedResult = [False,False,True,False,True,True,True,True]
-- TODO: use a proper testing framework
main :: IO ()
main = do
b <- timeout 1000000 $ return $! exampleValue == expectedResult
if b == Just True
then return ()
else exitFailure
|
DanBurton/rev-state
|
test/Main.hs
|
bsd-3-clause
| 909 | 0 | 11 | 140 | 296 | 164 | 132 | 22 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.Basics.TOut
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Test the basic timeout mechanism
-----------------------------------------------------------------------------
module TestSuite.Basics.TOut(tests) where
import Data.SBV.Examples.Puzzles.Euler185
import Utils.SBVTestFramework
-- Test suite
tests :: TestTree
tests =
testGroup "Basics.timeout"
[ goldenVsStringShow "timeout1" $ sat $ setTimeOut 1000 >> euler185
]
|
josefs/sbv
|
SBVTestSuite/TestSuite/Basics/TOut.hs
|
bsd-3-clause
| 630 | 0 | 10 | 95 | 72 | 46 | 26 | 7 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Numeral.ES.PE.Corpus (allExamples) where
import Data.String
import Prelude
import Duckling.Numeral.Types
import Duckling.Testing.Types
allExamples :: [Example]
allExamples =
concat
[ examples (NumeralValue 1) ["1"]
, examples (NumeralValue 33) ["33"]
, examples (NumeralValue 1.1) ["1,1", "1,10", "01,10"]
, examples (NumeralValue 0.77) ["0,77", ",77"]
, examples (NumeralValue 100000) ["100.000", "100000"]
, examples (NumeralValue 243) ["243"]
, examples (NumeralValue 3000000) ["3000000", "3.000.000"]
, examples (NumeralValue 1200000) ["1.200.000", "1200000"]
, examples
(NumeralValue (-1200000))
["- 1.200.000", "menos 1.200.000", "-1,2M", "-,0012G"]
, examples (NumeralValue 1.5) ["1,5"]
]
|
facebookincubator/duckling
|
Duckling/Numeral/ES/PE/Corpus.hs
|
bsd-3-clause
| 1,019 | 0 | 11 | 186 | 258 | 149 | 109 | 21 | 1 |
-- Copyright (c) 2014, Dmitry Zuikov
-- All rights reserved.
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
-- * Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
-- * Neither the name of emufat nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module Encode where
import Data.Word (Word8, Word32, Word64)
import qualified Data.ByteString.Lazy as BS
import Control.Monad.Writer
import Control.Monad.State
import Data.Binary.Put
import Data.List
import qualified Data.Map as M
import Text.Printf
import Util
data Rule = REQ Word64 [Chunk] | RANGE Word64 Word64 [Chunk] deriving Show
data Chunk = SEQ BS.ByteString
| RLE Word64 Word8
| SER Word32 Word32
| NSER Word32 Word64 Word32 -- base offset step
| CALLBACK Word8
deriving (Eq, Ord)
instance Show Chunk where
show (SEQ bs) = printf "SEQ [%s]" (intercalate " " $ hexDump 128 bs)
show (RLE a n) = printf "RLE %d %d" a n
show (SER a b) = printf "SER %d %d" a b
show (NSER a b c) = printf "NSER %d %d %d" a b c
show (CALLBACK n) = printf "CALLBACK %d" n
data CmpTree = GEQ Word64 CmpTree CmpTree | CODE [Rule]
deriving (Show)
mkCmpTree :: [Rule] -> CmpTree
mkCmpTree r = mkTree' rulemap
where rulemap = M.fromList $ map (\x -> (fsect x, x)) r
avg :: Int -> Int -> Int
avg a b = a + ((b - a) `div` 2)
splitGeq n m =
let (a, b, c) = M.splitLookup n m
in (a, c `M.union` (maybe M.empty (M.singleton n) b))
mkTree' xs | M.null xs = CODE []
| M.size xs < 3 = CODE (map snd (M.toList xs))
| otherwise =
let ks = map fst $ M.toAscList xs
n = ks !! (length ks `div` 2)
(le, geq) = splitGeq n xs
in GEQ n (mkTree' le) (mkTree' geq)
rsect :: Rule -> Word64
rsect (REQ n _) = n
rsect (RANGE _ n _) = n
fsect :: Rule -> Word64
fsect (REQ n _) = n
fsect (RANGE n _ _) = n
encodeBlock :: BS.ByteString -> [Chunk]
encodeBlock bs = {-# SCC "encodeBlock" #-}eat [] [] groups
where groups = group (BS.unpack bs)
eat :: [Chunk] -> [Word8] -> [[Word8]] -> [Chunk]
eat acc seq (x:xs) | length x == 1 = eat acc (head x:seq) xs
| length x > 1 = eat (packRle (RLE (fromIntegral (length x)) (head x)) seq acc) [] xs
eat acc [] [] = reverse acc
eat acc seq [] = reverse (packseq seq : acc)
packRle r [] acc = r : acc
packRle r seq acc = r : packseq seq : acc
packseq seq = SEQ (BS.pack (reverse seq))
encodeRaw :: Word64 -> Word64 -> BS.ByteString -> [Rule]
encodeRaw blocklen from bs = mergeRules $ evalState (execWriterT (eat bs)) from
where eat bs | BS.null bs = return ()
| otherwise = msplit bs block eat
block chunk = do
i <- get
tell [REQ i (encodeBlock chunk)] >> modify succ
msplit xs f1 f2 = let (a, b) = BS.splitAt fsl xs in f1 a >> f2 b
fsl = fromIntegral blocklen
decodeBlock :: [Chunk] -> BS.ByteString
decodeBlock cs = runPut $ mapM_ chunk cs
where chunk (SEQ bs) = putLazyByteString bs
chunk (RLE n w) = replicateM_ (fromIntegral n) (putWord8 w)
chunk _ = undefined
mergeRules :: [Rule] -> [Rule]
mergeRules r = execWriter (eat r)
where eat (REQ a c : REQ b c' : xs) | a+1 == b && c == c' = eat (RANGE a b c : xs)
eat (REQ a c : RANGE a' b c' : xs) | a+1 == a' && c == c' = eat (RANGE a b c' : xs)
eat (RANGE a b c : RANGE a' b' c' : xs) | b+1 == a' && c == c' = eat (RANGE a b' c' : xs)
eat (RANGE a b c : REQ a' c' : xs) | b+1 == a' && c == c' = eat (RANGE a a' c : xs)
eat (x:y:xs) = tell [x] >> eat (y:xs)
eat x = tell x
chunks :: Rule -> [Chunk]
chunks (REQ _ c) = c
chunks (RANGE _ _ c) = c
|
voidlizard/emufat
|
src/Encode.hs
|
bsd-3-clause
| 5,081 | 0 | 16 | 1,364 | 1,742 | 889 | 853 | 82 | 6 |
-- #hide
--------------------------------------------------------------------------------
-- |
-- Module : Sound.OpenAL.AL.String
-- Copyright : (c) Sven Panne 2005
-- License : BSD-style (see the file libraries/OpenAL/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
--------------------------------------------------------------------------------
module Sound.OpenAL.AL.String (
withALString, peekALString
) where
import Foreign.C.String ( withCString, peekCString )
import Foreign.Ptr ( Ptr, castPtr )
import Sound.OpenAL.AL.BasicTypes ( ALchar )
--------------------------------------------------------------------------------
-- AL uses "Ptr ALchar" instead of "CString" for strings, so some wrappers
-- are quite handy.
withALString :: String -> (Ptr ALchar -> IO a) -> IO a
withALString str action = withCString str (action . castPtr)
peekALString :: Ptr ALchar -> IO String
peekALString = peekCString . castPtr
|
FranklinChen/hugs98-plus-Sep2006
|
packages/OpenAL/Sound/OpenAL/AL/String.hs
|
bsd-3-clause
| 1,005 | 0 | 9 | 144 | 145 | 88 | 57 | 9 | 1 |
module Input
( initInput
) where
import Control.Monad (when)
import Data.IORef (IORef, modifyIORef)
import qualified Graphics.LWGL as GL
import Graphics.UI.GLFW (Key (..), KeyState (..), ModifierKeys,
Window)
import qualified Graphics.UI.GLFW as GLFW
import Camera (Navigation (..))
import Helpers (makeProjection)
import RenderState (RenderState (..))
initInput :: Window -> IORef RenderState -> IO ()
initInput window ref = do
GLFW.setKeyCallback window $ Just (keyCallback ref)
GLFW.setWindowSizeCallback window $ Just (windowSizeCallback ref)
keyCallback :: IORef RenderState -> Window -> Key -> Int
-> KeyState -> ModifierKeys -> IO ()
keyCallback ref _window key _scan keyState _modKeys = do
-- Toggle the wireframe rendering mode.
when (key == Key'R && keyState == KeyState'Pressed) $
modifyIORef ref $ \state ->
state { renderWireframe = not $ renderWireframe state }
-- Camera turning left.
when (key == Key'Left) $
modifyIORef ref $ \state ->
let nav = navigation state
in state { navigation = nav { left = activeKey keyState } }
-- Camera turning right.
when (key == Key'Right) $
modifyIORef ref $ \state ->
let nav = navigation state
in state { navigation = nav { right = activeKey keyState } }
-- Camera go forward.
when (key == Key'Up) $
modifyIORef ref $ \state ->
let nav = navigation state
in state { navigation = nav { forward = activeKey keyState } }
-- Camera go backward.
when (key == Key'Down) $
modifyIORef ref $ \state ->
let nav = navigation state
in state { navigation = nav { backward = activeKey keyState } }
-- Camera go up.
when (key == Key'A) $
modifyIORef ref $ \state ->
let nav = navigation state
in state { navigation = nav { up = activeKey keyState } }
-- Camera go down.
when (key == Key'Z) $
modifyIORef ref $ \state ->
let nav = navigation state
in state { navigation = nav { down = activeKey keyState } }
activeKey :: KeyState -> Bool
activeKey keyState =
keyState == KeyState'Pressed || keyState == KeyState'Repeating
windowSizeCallback :: IORef RenderState -> Window -> Int -> Int -> IO ()
windowSizeCallback ref _window width height = do
GL.glViewport 0 0 width height
modifyIORef ref $ \state ->
state { perspective = makeProjection width height}
|
psandahl/outdoor-terrain
|
src/Input.hs
|
bsd-3-clause
| 2,651 | 0 | 15 | 827 | 782 | 408 | 374 | 53 | 1 |
module Y2015.Day20 (answer1, answer2) where
import Data.List (nub, sort)
import Data.Numbers.Primes
answer1 :: IO ()
answer1 =
let presents = map numberOfPresents [1 ..]
pairs = zip [1 ..] presents
in print $ fst . head $ dropWhile ((>=) targetPresent . snd) pairs
answer2 :: IO ()
answer2 =
let presents = map numberOfPresents' [1 ..]
pairs = zip [1 ..] presents
in print $ fst . head $ dropWhile ((>=) targetPresent . snd) pairs
targetPresent = 36000000
numberOfPresents :: Integer -> Integer
numberOfPresents houseNumber = sum $ map (*10) $ divisors houseNumber
numberOfPresents' houseNumber =
let limit = houseNumber `quot` 50
actualElves = dropWhile (<=limit) $ divisors houseNumber
in sum $ map (*11) actualElves
divisors = sort . combine . primeFactors
combine :: [Integer] -> [Integer]
combine [] = [1]
combine (x:xs) = let next = combine xs in nub $ x : map (*x) next ++ next
|
geekingfrog/advent-of-code
|
src/Y2015/Day20.hs
|
bsd-3-clause
| 960 | 0 | 11 | 224 | 377 | 202 | 175 | 24 | 1 |
-- #hide
module Text.XHtml.Extras where
import Text.XHtml.Internals
import Text.XHtml.Strict.Elements
import Text.XHtml.Strict.Attributes
--
-- * Converting strings to HTML
--
-- | Convert a 'String' to 'Html', converting
-- characters that need to be escaped to HTML entities.
stringToHtml :: String -> Html
stringToHtml = primHtml . stringToHtmlString
-- | This converts a string, but keeps spaces as non-line-breakable.
lineToHtml :: String -> Html
lineToHtml = primHtml . concatMap htmlizeChar2 . stringToHtmlString
where
htmlizeChar2 ' ' = " "
htmlizeChar2 c = [c]
-- | This converts a string, but keeps spaces as non-line-breakable,
-- and adds line breaks between each of the strings in the input list.
linesToHtml :: [String] -> Html
linesToHtml [] = noHtml
linesToHtml (x:[]) = lineToHtml x
linesToHtml (x:xs) = lineToHtml x +++ br +++ linesToHtml xs
--
-- * Html abbreviations
--
primHtmlChar :: String -> Html
-- | Copyright sign.
copyright :: Html
-- | Non-breaking space.
spaceHtml :: Html
bullet :: Html
primHtmlChar = \ x -> primHtml ("&" ++ x ++ ";")
copyright = primHtmlChar "copy"
spaceHtml = primHtmlChar "nbsp"
bullet = primHtmlChar "#149"
-- | Same as 'paragraph'.
p :: Html -> Html
p = paragraph
--
-- * Hotlinks
--
type URL = String
data HotLink = HotLink {
hotLinkURL :: URL,
hotLinkContents :: Html,
hotLinkAttributes :: [HtmlAttr]
} deriving Show
instance HTML HotLink where
toHtml hl = anchor ! (href (hotLinkURL hl) : hotLinkAttributes hl)
<< hotLinkContents hl
hotlink :: URL -> Html -> HotLink
hotlink url h = HotLink {
hotLinkURL = url,
hotLinkContents = h,
hotLinkAttributes = [] }
--
-- * Lists
--
-- (Abridged from Erik Meijer's Original Html library)
ordList :: (HTML a) => [a] -> Html
ordList items = olist << map (li <<) items
unordList :: (HTML a) => [a] -> Html
unordList items = ulist << map (li <<) items
defList :: (HTML a,HTML b) => [(a,b)] -> Html
defList items
= dlist << [ [ dterm << dt, ddef << dd ] | (dt,dd) <- items ]
--
-- * Forms
--
widget :: String -> String -> [HtmlAttr] -> Html
widget w n markupAttrs = input ! ([thetype w,name n] ++ markupAttrs)
checkbox :: String -> String -> Html
hidden :: String -> String -> Html
radio :: String -> String -> Html
reset :: String -> String -> Html
submit :: String -> String -> Html
password :: String -> Html
textfield :: String -> Html
afile :: String -> Html
clickmap :: String -> Html
checkbox n v = widget "checkbox" n [value v]
hidden n v = widget "hidden" n [value v]
radio n v = widget "radio" n [value v]
reset n v = widget "reset" n [value v]
submit n v = widget "submit" n [value v]
password n = widget "password" n []
textfield n = widget "text" n []
afile n = widget "file" n []
clickmap n = widget "image" n []
menu :: String -> [Html] -> Html
menu n choices
= select ! [name n] << [ option << p << choice | choice <- choices ]
gui :: String -> Html -> Html
gui act = form ! [action act,method "post"]
|
FranklinChen/hugs98-plus-Sep2006
|
packages/xhtml/Text/XHtml/Extras.hs
|
bsd-3-clause
| 3,187 | 0 | 12 | 803 | 998 | 544 | 454 | 70 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
-- | This module is where all the routes and handlers are defined for your
-- site. The 'app' function is the initializer that combines everything
-- together and is exported by this module.
module Site
( app
) where
import Control.Concurrent (forkIO)
import Control.Concurrent.STM
import Control.Exception (Handler (..),
SomeException,
catches)
import Control.Monad (foldM)
import Control.Monad.IO.Class (liftIO)
import Data.ByteString.Char8 as B
import Data.Configurator as C
import Data.HashMap.Strict as H
import Data.Pool
import Database.Groundhog.Postgresql
import Heist.Interpreted
import Snap.Snaplet
import Snap.Snaplet.Auth
import Snap.Snaplet.Auth.Backends.PostgresqlSimple
import Snap.Snaplet.Ekg (ekgInit)
import Snap.Snaplet.Heist
import Snap.Snaplet.P2PPicks
import Snap.Snaplet.PostgresqlSimple
import Snap.Snaplet.SES
import Snap.Snaplet.Session.Backends.CookieSession
import Snap.Util.FileServe
import Logging
import P2PPicks.Types (P2PPicksType)
import Prosper as P
import Prosper.Monad
import Application
import PeerTrader
import PeerTrader.Account.Account
import PeerTrader.Account.Web
import PeerTrader.Admin
import PeerTrader.Admin.Database
import PeerTrader.Admin.Statistics
import PeerTrader.AutoFilter.AutoFilter
import PeerTrader.Investment.Database
import PeerTrader.NewUser (NewUser)
import PeerTrader.P2PPicks.Account (P2PPicksAccount, getP2PPicksAccount)
import PeerTrader.P2PPicks.Result (P2PPicksResult)
import PeerTrader.Prosper.Account as A
import PeerTrader.Prosper.Listing
import PeerTrader.Schedule.Web
import PeerTrader.Socket.Activity
import PeerTrader.Socket.Web
import PeerTrader.Splices
import PeerTrader.Strategy.Strategy
import PeerTrader.StrategyManagement.ActiveStrategy
import PeerTrader.Types
import PeerTrader.Route.AutoFilter
import PeerTrader.Route.P2PPicks
import PeerTrader.Route.P2PPicksAccount
import PeerTrader.Route.ProsperAccount
import PeerTrader.Route.Statistics
import PeerTrader.Route.StrategyManagement
import PeerTrader.Route.StreamData
import PeerTrader.Route.User
initializeDatabase
:: Pool Postgresql
-> IO (TVar (HashMap UserLogin AccountData))
initializeDatabase g = do
ptUsers <- runDbConn selectUser g
hm <- foldM newAccount H.empty ptUsers
newTVarIO hm
where
selectUser = select $ ProsperEnabledField ==. True
maybe' :: Maybe a -> b -> (a -> b) -> b
maybe' m d f = maybe d f m
newAccount
:: HashMap UserLogin AccountData
-> PeerTraderAccount
-> IO (HashMap UserLogin AccountData)
newAccount m (PeerTraderAccount n _ (Just userKey) _ _) = do
mUser <- flip runDbConn g $ get userKey
-- Get the most recent account from the API or database
maybe' mUser (return m) $ \user -> do
pAcct <- initProsperAccount user
picksAccount <- flip runDbConn g $ getP2PPicksAccount n
acct <- accountData user pAcct picksAccount
return $ H.insert n acct m
where
-- Attempt to get Prosper account from Prosper API.
-- if fail, log and return result from db
initProsperAccount ui =
P.account ui
`catches`
[ Handler unauthorizedAccount
, Handler failProsperAcct ]
unauthorizedAccount (UnauthorizedException msg) = do
debugM "Initialization" $
"Stale account for user " ++ show n ++ ": " ++ show msg
flip runDbConn g $ latestProsperAccount n
failProsperAcct e = do
debugM "Initialization" $
"Could not retrieve Prosper account for user " ++
show n ++ ": " ++ show (e :: SomeException)
flip runDbConn g $ latestProsperAccount n
newAccount m _ = return m
-- | The application's routes.
routes :: [(ByteString, AppHandler ())]
routes =
[ ("/login", with auth handleLoginSubmit)
, ("/logout", with auth handleLogout)
, ("/changepassword", with auth handleChangePassword)
, ("/sendForgotEmail", sendForgotPasswordEmail)
, ("/resetPassword", handleResetPassword)
, ("/forgotPassword", render "forgot")
, ("/new_user", handleNewUser)
, ("/verifynewuser", handleVerify)
, ("/checkterms", requireLogIn handleCheckTerms)
-- Data
, ("/streamdata", requireLogIn handleStreamData) -- TODO add channel route
, ("/prosperaccount", requireLogIn accountHandler)
, ("/prosperaccountlatest", requireLogIn accountDataHandler)
, ("/statistics", requireLogIn statisticsHandler)
, ("/prosperaccounttimeseries", requireLogIn accountTimeSeriesHandler)
, ("/activity", requireLogIn activity)
, ("/p2ppicksaccount", requireLogIn p2paccountHandler)
-- Strategies
, ("/autofilter", requireLogIn autoFilterHandler)
, ("/p2ppicks", requireLogIn p2ppicksHandler)
-- Control
, ("/prosperactivate", requireLogIn prosperAccountController)
, ("/strategymanagement", requireLogIn strategyManagement)
-- Admin
, ("/peertraderusers", adminJSON peertraderUsers)
, ("/adminstatistics", adminJSON mostRecentStats)
, ("/updatestats", withAdminUser handleUpdateStats)
, ("", serveDirectory "static")
]
-- | The application initializer.
app :: SnapletInit App App
app = makeSnaplet "app" "PeerTrader" Nothing $ do
h <- nestSnaplet "" heist $ heistInit "templates"
s <- nestSnaplet "sess" sess $
initCookieSessionManager "site_key.txt" "sess" (Just 604800) -- One week
k <- nestSnaplet "awsKeys" awsKeys initAWSKeys
d <- nestSnaplet "db" db pgsInit
a <- nestSnaplet "auth" auth $ initPostgresAuth sess d
ek <- nestSnaplet "ekg" ekg ekgInit
p2p <- nestSnaplet "p2ppicks" p2ppicks p2ppicksInit
addRoutes routes
addAuthSplices h auth
-- Add extra splices
modifyHeistState $ bindSplice "ifAdmin" ifAdmin
config <- getSnapletUserConfig
dbName <- configLookup defaultDBName config "groundhog.name"
dbUser <- configLookup defaultDBUser config "groundhog.user"
dbHost <- configLookup defaultDBHost config "groundhog.host"
let connString = "dbname=" ++ dbName ++ " user=" ++ dbUser ++ " host=" ++ dbHost
g <- createPostgresqlPool connString 3
liftIO $ withPostgresqlConn connString $ runDbConn $ runMigration $ do
-- Migrations go here
migrate (undefined :: AutoFilter)
migrate (undefined :: Strategy AutoFilter)
migrate (undefined :: Strategy P2PPicksType)
migrate (undefined :: InvestState)
migrate (undefined :: StrategyState AutoFilter)
migrate (undefined :: StrategyState P2PPicksType)
migrate (undefined :: Investment)
migrate (undefined :: Statistics)
migrate (undefined :: ProsperAccount)
migrate (undefined :: ListingResult)
migrate (undefined :: ActiveStrategy AutoFilter)
migrate (undefined :: ActiveStrategy P2PPicksType)
migrate (undefined :: P2PPicksAccount)
migrate (undefined :: User)
migrate (undefined :: PeerTraderAccount)
migrate (undefined :: P2PPicksResult)
migrate (undefined :: NewUser)
ps <- initializePeerTrader
accts <- liftIO $ initializeDatabase g
commandChan <- liftIO $ atomically newTChan
liftIO $ startClient commandChan accts
let ptApp = App h s a d ek k p2p g ps accts commandChan
_ <- liftIO . forkIO $ scheduleLoop ptApp
return ptApp
where
configLookup d c n = liftIO (C.lookupDefault d c n)
defaultDBHost = "localhost"
defaultDBName = "peertrader"
defaultDBUser = "peertrader"
|
WraithM/peertrader-backend
|
src/Site.hs
|
bsd-3-clause
| 8,647 | 0 | 15 | 2,580 | 1,815 | 964 | 851 | 170 | 2 |
{-# LANGUAGE CPP #-}
module Bead.Persistence.Persist (
Persist
, Config(..)
, defaultConfig
, configToPersistConfig
, Interpreter
, createPersistInit
, createPersistInterpreter
, runPersist
, saveUser
, personalInfo
, filterUsers
, loadUser
, updateUser
, doesUserExist
, userDescription
, userSubmissions
, administratedCourses
, administratedGroups
, scoresOfUser
, attachNotificationToUser
, notificationsOfUser
-- Users file upload
, copyFile -- Copies the given file with the given filename to the users data directory
, listFiles -- List all the user's files
, getFile -- Get the current path for the user's file
#ifndef SSO
-- Registration
, saveUserReg
, loadUserReg
#endif
-- Course
, saveCourse
, courseKeys
, filterCourses
, loadCourse
, groupKeysOfCourse
, isUserInCourse
, userCourses
, createCourseAdmin
, courseAdmins
, subscribedToCourse
, unsubscribedFromCourse
, testScriptsOfCourse
, assessmentsOfCourse
-- Group
, saveGroup
, loadGroup
, courseOfGroup
, filterGroups
, isUserInGroup
, userGroups
, subscribe
, unsubscribe
, groupAdmins
, createGroupAdmin
, subscribedToGroup
, unsubscribedFromGroup
, assessmentsOfGroup
-- Test Scripts
, saveTestScript
, loadTestScript
, courseOfTestScript
, modifyTestScript
-- Test Cases
, saveTestCase
, loadTestCase
, testScriptOfTestCase
, modifyTestCase
, removeTestCaseAssignment
, copyTestCaseFile
, modifyTestScriptOfTestCase
-- Test Jobs
, saveTestJob -- Saves the test job for the test daemon
-- Test Feedback
, insertTestFeedback
, finalizeTestFeedback
, testFeedbacks
, deleteTestFeedbacks -- Deletes the test daemon's feedbacks from the test-incomming
-- Assignment Persistence
, assignmentKeys
, saveAssignment
, loadAssignment
, modifyAssignment
, courseAssignments
, groupAssignments
, saveCourseAssignment
, saveGroupAssignment
, courseOfAssignment
, groupOfAssignment
, submissionsForAssignment
, assignmentCreatedTime
, testCaseOfAssignment
-- Submission
, saveSubmission
, loadSubmission
, assignmentOfSubmission
, usernameOfSubmission
, submissionKeys
, evaluationOfSubmission
, commentsOfSubmission
, feedbacksOfSubmission
, lastSubmission
, removeFromOpened
, openedSubmissions
, openedSubmissionSubset
, usersOpenedSubmissions
-- Feedback
, saveFeedback
, loadFeedback
, submissionOfFeedback
-- Notification
, saveCommentNotification
, saveFeedbackNotification
, saveSystemNotification
, loadNotification
, commentOfNotification
, feedbackOfNotification
, usersOfNotification
-- Evaluation
, saveSubmissionEvaluation
, saveScoreEvaluation
, loadEvaluation
, modifyEvaluation
, submissionOfEvaluation
, scoreOfEvaluation
-- Comment
, saveComment
, loadComment
, submissionOfComment
-- Assessment
, saveCourseAssessment
, saveGroupAssessment
, loadAssessment
, modifyAssessment
, courseOfAssessment
, groupOfAssessment
, scoresOfAssessment
-- Score
, saveScore
, loadScore
, assessmentOfScore
, usernameOfScore
, evaluationOfScore
, testIncomingDataDir
#ifdef TEST
, persistTests
#endif
) where
import Data.Time (UTCTime)
import Data.Set (Set)
import qualified Bead.Config as Config
import Bead.Domain.Types (Erroneous)
import Bead.Domain.Entities
import Bead.Domain.Entity.Notification (Notification)
import qualified Bead.Domain.Entity.Notification as Notif
import Bead.Domain.Relationships
import qualified Bead.Persistence.Initialization as Init
#ifdef MYSQL
import qualified Bead.Persistence.SQL as PersistImpl
#else
import qualified Bead.Persistence.NoSQLDir as PersistImpl
#endif
#ifdef TEST
import Test.Tasty.TestSet (TestSet)
#endif
type Persist a = PersistImpl.Persist a
type Config = PersistImpl.Config
-- Converts Bead config into persists config
configToPersistConfig :: Config.Config -> Config
configToPersistConfig = PersistImpl.configToPersistConfig
-- Save the current user
saveUser :: User -> Persist ()
saveUser = PersistImpl.saveUser
-- Calculates the personal information about the user
personalInfo :: Username -> Persist PersonalInfo
personalInfo = PersistImpl.personalInfo
-- Select users who satiesfies the given predicate
filterUsers :: (User -> Bool) -> Persist [User]
filterUsers = PersistImpl.filterUsers
-- Loads the user information for the given username
loadUser :: Username -> Persist User
loadUser = PersistImpl.loadUser
-- Updates the user information
updateUser :: User -> Persist ()
updateUser = PersistImpl.updateUser
-- Checks if the user is already in the database
doesUserExist :: Username -> Persist Bool
doesUserExist = PersistImpl.doesUserExist
-- Creates a description for the given username
userDescription :: Username -> Persist UserDesc
userDescription = PersistImpl.userDescription
-- Lists all the submission keys for the submissions that submitted by the user
-- for the given assignment
userSubmissions :: Username -> AssignmentKey -> Persist [SubmissionKey]
userSubmissions = PersistImpl.userSubmissions
-- Lists all the courses that are administrated by the user
administratedCourses :: Username -> Persist [(CourseKey, Course)]
administratedCourses = PersistImpl.administratedCourses
-- Lists all the groups that are administrated by the user
administratedGroups :: Username -> Persist [(GroupKey, Group)]
administratedGroups = PersistImpl.administratedGroups
attachNotificationToUser :: Username -> NotificationKey -> Persist ()
attachNotificationToUser = PersistImpl.attachNotificationToUser
notificationsOfUser :: Username -> Persist [NotificationKey]
notificationsOfUser = PersistImpl.notificationsOfUser
-- Lists all the scores submitted for the user
scoresOfUser :: Username -> Persist [ScoreKey]
scoresOfUser = PersistImpl.scoresOfUser
-- * Users file upload
copyFile :: Username -> FilePath -> UsersFile -> Persist () -- Copies the given file with the given filename to the users data directory
copyFile = PersistImpl.copyFile
listFiles :: Username -> Persist [(UsersFile, FileInfo)] -- List all the user's files
listFiles = PersistImpl.listFiles
getFile :: Username -> UsersFile -> Persist FilePath -- Get the current path for the user's file
getFile = PersistImpl.getFile
#ifndef SSO
-- * Registration
-- Save the user registration information which is created at the time, when the
-- user starts a new registration
saveUserReg :: UserRegistration -> Persist UserRegKey
saveUserReg = PersistImpl.saveUserReg
-- Loads the user registration
loadUserReg :: UserRegKey -> Persist UserRegistration
loadUserReg = PersistImpl.loadUserReg
#endif
-- * Course Persistence
-- Saves a Course into the database
saveCourse :: Course -> Persist CourseKey
saveCourse = PersistImpl.saveCourse
-- Lists all the course keys saved in the database
courseKeys :: Persist [CourseKey]
courseKeys = PersistImpl.courseKeys
-- Selects all the courses with satisfies the given property
filterCourses :: (CourseKey -> Course -> Bool) -> Persist [(CourseKey, Course)]
filterCourses = PersistImpl.filterCourses
-- Load the course from the database
loadCourse :: CourseKey -> Persist Course
loadCourse = PersistImpl.loadCourse
-- Lists all the groups keys for the given course, the listed groups
-- are the groups under the given course
groupKeysOfCourse :: CourseKey -> Persist [GroupKey]
groupKeysOfCourse = PersistImpl.groupKeysOfCourse
-- Checks if the user attends the given course
isUserInCourse :: Username -> CourseKey -> Persist Bool
isUserInCourse = PersistImpl.isUserInCourse
-- Lists all the courses which the user attends
userCourses :: Username -> Persist [CourseKey]
userCourses = PersistImpl.userCourses
-- Set the given user as an administrator for the course
createCourseAdmin :: Username -> CourseKey -> Persist ()
createCourseAdmin = PersistImpl.createCourseAdmin
-- Lists all the users which are administrators of the given course
courseAdmins :: CourseKey -> Persist [Username]
courseAdmins = PersistImpl.courseAdmins
-- Lists all the users that are attends as a student on the given course
subscribedToCourse :: CourseKey -> Persist [Username]
subscribedToCourse = PersistImpl.subscribedToCourse
-- Lists all the users that are unsubscribed once from the given course
unsubscribedFromCourse :: CourseKey -> Persist [Username]
unsubscribedFromCourse = PersistImpl.unsubscribedFromCourse
-- Lists all the test scripts that are connected with the course
testScriptsOfCourse :: CourseKey -> Persist [TestScriptKey]
testScriptsOfCourse = PersistImpl.testScriptsOfCourse
-- Lists all the assessment defined for the given course
assessmentsOfCourse :: CourseKey -> Persist [AssessmentKey]
assessmentsOfCourse = PersistImpl.assessmentsOfCourse
-- * Group Persistence
-- Save the group under the given course
saveGroup :: CourseKey -> Group -> Persist GroupKey
saveGroup = PersistImpl.saveGroup
-- Load the group from the database
loadGroup :: GroupKey -> Persist Group
loadGroup = PersistImpl.loadGroup
-- Returns the course of the given group
courseOfGroup :: GroupKey -> Persist CourseKey
courseOfGroup = PersistImpl.courseOfGroup
-- Lists all the groups from the database that satisfies the given predicate
filterGroups :: (GroupKey -> Group -> Bool) -> Persist [(GroupKey, Group)]
filterGroups = PersistImpl.filterGroups
-- Returns True if the user is registered in the group, otherwise False
isUserInGroup :: Username -> GroupKey -> Persist Bool
isUserInGroup = PersistImpl.isUserInGroup
-- Lists all the groups that the user is attended in
userGroups :: Username -> Persist [GroupKey]
userGroups = PersistImpl.userGroups
-- Subscribe the user for the given course and group
subscribe :: Username -> CourseKey -> GroupKey -> Persist ()
subscribe = PersistImpl.subscribe
-- Unsubscribe the user from the given course and group,
-- if the user is not subscribed nothing happens
unsubscribe :: Username -> CourseKey -> GroupKey -> Persist ()
unsubscribe = PersistImpl.unsubscribe
-- Lists all the group admins for the given course
groupAdmins :: GroupKey -> Persist [Username]
groupAdmins = PersistImpl.groupAdmins
-- Set the given user for the given group
createGroupAdmin :: Username -> GroupKey -> Persist ()
createGroupAdmin = PersistImpl.createGroupAdmin
-- Lists all the users that are subscribed to the given group
subscribedToGroup :: GroupKey -> Persist [Username]
subscribedToGroup = PersistImpl.subscribedToGroup
-- Lists all the users that are unsubscribed from the given group at least once
unsubscribedFromGroup :: GroupKey -> Persist [Username]
unsubscribedFromGroup = PersistImpl.unsubscribedFromGroup
-- Lists all the assessment defined for the given course
assessmentsOfGroup :: GroupKey -> Persist [AssessmentKey]
assessmentsOfGroup = PersistImpl.assessmentsOfGroup
-- * Test Scripts
-- Saves the test script for the given course
saveTestScript :: CourseKey -> TestScript -> Persist TestScriptKey
saveTestScript = PersistImpl.saveTestScript
-- Load the test script from the database
loadTestScript :: TestScriptKey -> Persist TestScript
loadTestScript = PersistImpl.loadTestScript
-- Returns the course of the test script
courseOfTestScript :: TestScriptKey -> Persist CourseKey
courseOfTestScript = PersistImpl.courseOfTestScript
-- Updates the test script for the given test script key
modifyTestScript :: TestScriptKey -> TestScript -> Persist ()
modifyTestScript = PersistImpl.modifyTestScript
-- * Test Cases
-- Saves the test case for the given assignment and given test script
saveTestCase :: TestScriptKey -> AssignmentKey -> TestCase -> Persist TestCaseKey
saveTestCase = PersistImpl.saveTestCase
-- Loads the test case from the database
loadTestCase :: TestCaseKey -> Persist TestCase
loadTestCase = PersistImpl.loadTestCase
-- Returns the test script of the given test case
testScriptOfTestCase :: TestCaseKey -> Persist TestScriptKey
testScriptOfTestCase = PersistImpl.testScriptOfTestCase
-- Updates the test case for the given test case key
modifyTestCase :: TestCaseKey -> TestCase -> Persist ()
modifyTestCase = PersistImpl.modifyTestCase
-- Deletes the link from the test case connected to an assignment
removeTestCaseAssignment :: TestCaseKey -> AssignmentKey -> Persist ()
removeTestCaseAssignment = PersistImpl.removeTestCaseAssignment
copyTestCaseFile :: TestCaseKey -> Username -> UsersFile -> Persist ()
copyTestCaseFile = PersistImpl.copyTestCaseFile
modifyTestScriptOfTestCase :: TestCaseKey -> TestScriptKey -> Persist ()
modifyTestScriptOfTestCase = PersistImpl.modifyTestScriptOfTestCase
-- * Test Jobs
saveTestJob :: SubmissionKey -> Persist () -- Saves the test job for the test daemon
saveTestJob = PersistImpl.saveTestJob
-- * Test Feedbacks
-- | Inserts a test feedback for the incoming test comment directory,
-- this function is mainly for testing of this functionality.
-- It creates a test feedback in a locked state. Use finalizeTestFeedback
-- to unlock it.
insertTestFeedback :: SubmissionKey -> FeedbackInfo -> Persist ()
insertTestFeedback = PersistImpl.insertTestFeedback
-- | Unlocks the test feedback, this functionality is mainly
-- for supporting testing.
finalizeTestFeedback :: SubmissionKey -> Persist ()
finalizeTestFeedback = PersistImpl.finalizeTestFeedback
-- | List the feedbacks that the test daemon left in the test-incomming,
-- comments for the groups admin, and comments for the student, and
-- the final test result.
testFeedbacks :: Persist [(SubmissionKey, Feedback)]
testFeedbacks = PersistImpl.testFeedbacks
-- Deletes the test daemon's comment from the test-incomming
deleteTestFeedbacks :: SubmissionKey -> Persist ()
deleteTestFeedbacks = PersistImpl.deleteTestFeedbacks
-- * Assignment
-- Lists all the assignments in the database
assignmentKeys :: Persist [AssignmentKey]
assignmentKeys = PersistImpl.assignmentKeys
-- Save the assignment into the database
saveAssignment :: Assignment -> Persist AssignmentKey
saveAssignment = PersistImpl.saveAssignment
-- Load the assignment from the database
loadAssignment :: AssignmentKey -> Persist Assignment
loadAssignment = PersistImpl.loadAssignment
-- Modify the assignment in the database for the given key
modifyAssignment :: AssignmentKey -> Assignment -> Persist ()
modifyAssignment = PersistImpl.modifyAssignment
-- Lists all the assignment that are created for the given course
courseAssignments :: CourseKey -> Persist [AssignmentKey]
courseAssignments = PersistImpl.courseAssignments
-- Lists all the assignment that are created for the given group
groupAssignments :: GroupKey -> Persist [AssignmentKey]
groupAssignments = PersistImpl.groupAssignments
-- Save the assignment for the given course
saveCourseAssignment :: CourseKey -> Assignment -> Persist AssignmentKey
saveCourseAssignment = PersistImpl.saveCourseAssignment
-- Save the assignment for the given group
saveGroupAssignment :: GroupKey -> Assignment -> Persist AssignmentKey
saveGroupAssignment = PersistImpl.saveGroupAssignment
-- Returns (Just courseKey) the course key of the assignment if the assignment
-- is a course assignment otherwise Nothing
courseOfAssignment :: AssignmentKey -> Persist (Maybe CourseKey)
courseOfAssignment = PersistImpl.courseOfAssignment
-- Returns (Just groupKey) the group key of the assignment if the assignment
-- is a group assignment otherwise Nothing
groupOfAssignment :: AssignmentKey -> Persist (Maybe GroupKey)
groupOfAssignment = PersistImpl.groupOfAssignment
-- Returns all the submissions for the given assignment
submissionsForAssignment :: AssignmentKey -> Persist [SubmissionKey]
submissionsForAssignment = PersistImpl.submissionsForAssignment
-- Returns when the assignment was saved first, the modification of an assignment
-- does not change the time stamp
assignmentCreatedTime :: AssignmentKey -> Persist UTCTime
assignmentCreatedTime = PersistImpl.assignmentCreatedTime
-- Returns the test case of the assignment is if there is any attached.
-- returns (Just key) if there is, otherwise Nothing
testCaseOfAssignment :: AssignmentKey -> Persist (Maybe TestCaseKey)
testCaseOfAssignment = PersistImpl.testCaseOfAssignment
-- * Submission
-- Saves the submission for a given assignment, submitted by the given user
saveSubmission :: AssignmentKey -> Username -> Submission -> Persist SubmissionKey
saveSubmission = PersistImpl.saveSubmission
-- Loads the given submission from the database
loadSubmission :: SubmissionKey -> Persist Submission
loadSubmission = PersistImpl.loadSubmission
-- Returns the assignment for the submission
assignmentOfSubmission :: SubmissionKey -> Persist AssignmentKey
assignmentOfSubmission = PersistImpl.assignmentOfSubmission
-- Returns the username for the submission
usernameOfSubmission :: SubmissionKey -> Persist Username
usernameOfSubmission = PersistImpl.usernameOfSubmission
-- Lists all the submissions stored in the database
submissionKeys :: Persist [SubmissionKey]
submissionKeys = PersistImpl.submissionKeys
-- Returns the evaluation for the submission if the evalution exist, otherwise Nothing
evaluationOfSubmission :: SubmissionKey -> Persist (Maybe EvaluationKey)
evaluationOfSubmission = PersistImpl.evaluationOfSubmission
-- Returns all the comments for the given submission
commentsOfSubmission :: SubmissionKey -> Persist [CommentKey]
commentsOfSubmission = PersistImpl.commentsOfSubmission
-- Return all the feedbacks for the given submission
feedbacksOfSubmission :: SubmissionKey -> Persist [FeedbackKey]
feedbacksOfSubmission = PersistImpl.feedbacksOfSubmission
-- Returns the last submission of an assignment submitted by the given user if the
-- user is submitted something otherwise Nothing
lastSubmission :: AssignmentKey -> Username -> Persist (Maybe SubmissionKey)
lastSubmission = PersistImpl.lastSubmission
-- Remove the submission from the opened (which need to be evaluated) queue
removeFromOpened :: AssignmentKey -> Username -> SubmissionKey -> Persist ()
removeFromOpened = PersistImpl.removeFromOpened
-- Returns all the opened submissions
openedSubmissions :: Persist [SubmissionKey]
openedSubmissions = PersistImpl.openedSubmissions
-- Returns the opened submissions that are associated with the given assignments or users
openedSubmissionSubset :: Set AssignmentKey -> Set Username -> Persist [SubmissionKey]
openedSubmissionSubset = PersistImpl.openedSubmissionSubset
-- Calculates all the opened submisison for a given user and a given assignment
usersOpenedSubmissions :: AssignmentKey -> Username -> Persist [SubmissionKey]
usersOpenedSubmissions = PersistImpl.usersOpenedSubmissions
-- * Feedback
-- Saves the feedback
saveFeedback :: SubmissionKey -> Feedback -> Persist FeedbackKey
saveFeedback = PersistImpl.saveFeedback
-- Loads the feedback
loadFeedback :: FeedbackKey -> Persist Feedback
loadFeedback = PersistImpl.loadFeedback
-- Returns the submission of the feedback
submissionOfFeedback :: FeedbackKey -> Persist SubmissionKey
submissionOfFeedback = PersistImpl.submissionOfFeedback
-- * Notification
saveCommentNotification :: CommentKey -> Notification -> Persist NotificationKey
saveCommentNotification = PersistImpl.saveCommentNotification
saveFeedbackNotification :: FeedbackKey -> Notification -> Persist NotificationKey
saveFeedbackNotification = PersistImpl.saveFeedbackNotification
saveSystemNotification :: Notification -> Persist NotificationKey
saveSystemNotification = PersistImpl.saveSystemNotification
loadNotification :: NotificationKey -> Persist Notification
loadNotification = PersistImpl.loadNotification
commentOfNotification :: NotificationKey -> Persist (Maybe CommentKey)
commentOfNotification = PersistImpl.commentOfNotification
feedbackOfNotification :: NotificationKey -> Persist (Maybe FeedbackKey)
feedbackOfNotification = PersistImpl.feedbackOfNotification
usersOfNotification :: NotificationKey -> Persist [Username]
usersOfNotification = PersistImpl.usersOfNotification
-- * Evaluation
-- Save the evaluation for the given submission
saveSubmissionEvaluation :: SubmissionKey -> Evaluation -> Persist EvaluationKey
saveSubmissionEvaluation = PersistImpl.saveSubmissionEvaluation
-- Save the evaluation for the given score entry
saveScoreEvaluation :: ScoreKey -> Evaluation -> Persist EvaluationKey
saveScoreEvaluation = PersistImpl.saveScoreEvaluation
-- Load the evaluatuon from the database
loadEvaluation :: EvaluationKey -> Persist Evaluation
loadEvaluation = PersistImpl.loadEvaluation
-- Modify the evalution for the given key in the database
modifyEvaluation :: EvaluationKey -> Evaluation -> Persist ()
modifyEvaluation = PersistImpl.modifyEvaluation
-- Returns the submission of the given evaluation
submissionOfEvaluation :: EvaluationKey -> Persist (Maybe SubmissionKey)
submissionOfEvaluation = PersistImpl.submissionOfEvaluation
-- Returns the score entry of the given evaluation
scoreOfEvaluation :: EvaluationKey -> Persist (Maybe ScoreKey)
scoreOfEvaluation = PersistImpl.scoreOfEvaluation
-- * Comment
-- Saves the comment for the given submission
saveComment :: SubmissionKey -> Comment -> Persist CommentKey
saveComment = PersistImpl.saveComment
-- Loads the comment from the database
loadComment :: CommentKey -> Persist Comment
loadComment = PersistImpl.loadComment
-- Returns the submission of the comment
submissionOfComment :: CommentKey -> Persist SubmissionKey
submissionOfComment = PersistImpl.submissionOfComment
-- * Assessment
saveCourseAssessment :: CourseKey -> Assessment -> Persist AssessmentKey
saveCourseAssessment = PersistImpl.saveCourseAssessment
saveGroupAssessment :: GroupKey -> Assessment -> Persist AssessmentKey
saveGroupAssessment = PersistImpl.saveGroupAssessment
loadAssessment :: AssessmentKey -> Persist Assessment
loadAssessment = PersistImpl.loadAssessment
modifyAssessment :: AssessmentKey -> Assessment -> Persist ()
modifyAssessment = PersistImpl.modifyAssessment
courseOfAssessment :: AssessmentKey -> Persist (Maybe CourseKey)
courseOfAssessment = PersistImpl.courseOfAssessment
groupOfAssessment :: AssessmentKey -> Persist (Maybe GroupKey)
groupOfAssessment = PersistImpl.groupOfAssessment
scoresOfAssessment :: AssessmentKey -> Persist [ScoreKey]
scoresOfAssessment = PersistImpl.scoresOfAssessment
-- * Score
saveScore :: Username -> AssessmentKey -> Score -> Persist ScoreKey
saveScore = PersistImpl.saveScore
loadScore :: ScoreKey -> Persist Score
loadScore = PersistImpl.loadScore
assessmentOfScore :: ScoreKey -> Persist AssessmentKey
assessmentOfScore = PersistImpl.assessmentOfScore
usernameOfScore :: ScoreKey -> Persist Username
usernameOfScore = PersistImpl.usernameOfScore
evaluationOfScore :: ScoreKey -> Persist (Maybe EvaluationKey)
evaluationOfScore = PersistImpl.evaluationOfScore
-- * Incomming dir for the test results
testIncomingDataDir :: FilePath
testIncomingDataDir = PersistImpl.testIncomingDataDir
-- * Persistence initialization
-- | Creates a persist initialization structure.
createPersistInit :: Config -> IO (Init.PersistInit)
createPersistInit = PersistImpl.createPersistInit
type Interpreter = PersistImpl.Interpreter
-- | Creates an interpreter for the persistent compuation
createPersistInterpreter :: Config -> IO Interpreter
createPersistInterpreter = PersistImpl.createPersistInterpreter
-- | Parses the configuration string
parseConfig :: String -> Config
parseConfig = PersistImpl.parseConfig
-- | Deafult configuration for the Persistence layer
-- This is only a placeholder.
defaultConfig :: Config
defaultConfig = PersistImpl.defaultConfig
-- | Run the given persist command with the interpreter
runPersist :: Interpreter -> Persist a -> IO (Erroneous a)
runPersist = PersistImpl.runInterpreter
#ifdef TEST
persistTests :: TestSet ()
persistTests = PersistImpl.tests
#endif
|
pgj/bead
|
src/Bead/Persistence/Persist.hs
|
bsd-3-clause
| 23,919 | 0 | 9 | 3,348 | 3,520 | 2,021 | 1,499 | 389 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
-- |
-- Module : Simulation.Aivika.Experiment.Chart.TimeSeriesView
-- Copyright : Copyright (c) 2012-2017, David Sorokin <[email protected]>
-- License : BSD3
-- Maintainer : David Sorokin <[email protected]>
-- Stability : experimental
-- Tested with: GHC 8.0.1
--
-- The module defines 'TimeSeriesView' that plots the time series charts.
--
module Simulation.Aivika.Experiment.Chart.TimeSeriesView
(TimeSeriesView(..),
defaultTimeSeriesView) where
import Control.Monad
import Control.Monad.Trans
import Control.Lens
import qualified Data.Map as M
import Data.IORef
import Data.Maybe
import Data.Either
import Data.Array
import Data.List
import Data.Monoid
import Data.Default.Class
import System.IO
import System.FilePath
import Graphics.Rendering.Chart
import Simulation.Aivika
import Simulation.Aivika.Experiment
import Simulation.Aivika.Experiment.Base
import Simulation.Aivika.Experiment.Chart.Types
import Simulation.Aivika.Experiment.Chart.Utils (colourisePlotLines)
-- | Defines the 'View' that plots the time series charts.
data TimeSeriesView =
TimeSeriesView { timeSeriesTitle :: String,
-- ^ This is a title used in HTML.
timeSeriesDescription :: String,
-- ^ This is a description used in HTML.
timeSeriesWidth :: Int,
-- ^ The width of the chart.
timeSeriesHeight :: Int,
-- ^ The height of the chart.
timeSeriesGridSize :: Maybe Int,
-- ^ The size of the grid, where the series data are processed.
timeSeriesFileName :: ExperimentFilePath,
-- ^ It defines the file name with optional extension for each image to be saved.
-- It may include special variables @$TITLE@, @$RUN_INDEX@ and @$RUN_COUNT@.
--
-- An example is
--
-- @
-- timeSeriesFileName = UniqueFilePath \"$TITLE - $RUN_INDEX\"
-- @
timeSeriesPredicate :: Event Bool,
-- ^ It specifies the predicate that defines
-- when we plot data in the chart.
timeSeriesTransform :: ResultTransform,
-- ^ The transform applied to the results before receiving series.
timeSeriesLeftYSeries :: ResultTransform,
-- ^ It defines the series plotted basing on the left Y axis.
timeSeriesRightYSeries :: ResultTransform,
-- ^ It defines the series plotted basing on the right Y axis.
timeSeriesPlotTitle :: String,
-- ^ This is a title used in the chart when
-- simulating a single run. It may include
-- special variable @$TITLE@.
--
-- An example is
--
-- @
-- timeSeriesPlotTitle = \"$TITLE\"
-- @
timeSeriesRunPlotTitle :: String,
-- ^ The run title for the chart. It is used
-- when simulating multiple runs and it may
-- include special variables @$RUN_INDEX@,
-- @$RUN_COUNT@ and @$PLOT_TITLE@.
--
-- An example is
--
-- @
-- timeSeriesRunPlotTitle = \"$PLOT_TITLE / Run $RUN_INDEX of $RUN_COUNT\"
-- @
timeSeriesPlotLines :: [PlotLines Double Double ->
PlotLines Double Double],
-- ^ Probably, an infinite sequence of plot
-- transformations based on which the plot
-- is constructed for each series. Generally,
-- it must not coincide with a sequence of
-- labels as one label may denote a whole list
-- or an array of data providers.
--
-- Here you can define a colour or style of
-- the plot lines.
timeSeriesBottomAxis :: LayoutAxis Double ->
LayoutAxis Double,
-- ^ A transformation of the bottom axis,
-- after title @time@ is added.
timeSeriesLayout :: LayoutLR Double Double Double ->
LayoutLR Double Double Double
-- ^ A transformation of the plot layout,
-- where you can redefine the axes, for example.
}
-- | The default time series view.
defaultTimeSeriesView :: TimeSeriesView
defaultTimeSeriesView =
TimeSeriesView { timeSeriesTitle = "Time Series",
timeSeriesDescription = "It shows the Time Series chart(s).",
timeSeriesWidth = 640,
timeSeriesHeight = 480,
timeSeriesGridSize = Just (2 * 640),
timeSeriesFileName = UniqueFilePath "TimeSeries($RUN_INDEX)",
timeSeriesPredicate = return True,
timeSeriesTransform = id,
timeSeriesLeftYSeries = const mempty,
timeSeriesRightYSeries = const mempty,
timeSeriesPlotTitle = "$TITLE",
timeSeriesRunPlotTitle = "$PLOT_TITLE / Run $RUN_INDEX of $RUN_COUNT",
timeSeriesPlotLines = colourisePlotLines,
timeSeriesBottomAxis = id,
timeSeriesLayout = id }
instance ChartRendering r => ExperimentView TimeSeriesView (WebPageRenderer r) where
outputView v =
let reporter exp (WebPageRenderer renderer _) dir =
do st <- newTimeSeries v exp renderer dir
let context =
WebPageContext $
WebPageWriter { reporterWriteTOCHtml = timeSeriesTOCHtml st,
reporterWriteHtml = timeSeriesHtml st }
return ExperimentReporter { reporterInitialise = return (),
reporterFinalise = return (),
reporterSimulate = simulateTimeSeries st,
reporterContext = context }
in ExperimentGenerator { generateReporter = reporter }
instance ChartRendering r => ExperimentView TimeSeriesView (FileRenderer r) where
outputView v =
let reporter exp (FileRenderer renderer _) dir =
do st <- newTimeSeries v exp renderer dir
return ExperimentReporter { reporterInitialise = return (),
reporterFinalise = return (),
reporterSimulate = simulateTimeSeries st,
reporterContext = FileContext }
in ExperimentGenerator { generateReporter = reporter }
-- | The state of the view.
data TimeSeriesViewState r =
TimeSeriesViewState { timeSeriesView :: TimeSeriesView,
timeSeriesExperiment :: Experiment,
timeSeriesRenderer :: r,
timeSeriesDir :: FilePath,
timeSeriesMap :: M.Map Int FilePath }
-- | Create a new state of the view.
newTimeSeries :: ChartRendering r => TimeSeriesView -> Experiment -> r -> FilePath -> ExperimentWriter (TimeSeriesViewState r)
newTimeSeries view exp renderer dir =
do let n = experimentRunCount exp
fs <- forM [0..(n - 1)] $ \i ->
resolveFilePath dir $
mapFilePath (flip replaceExtension $ renderableChartExtension renderer) $
expandFilePath (timeSeriesFileName view) $
M.fromList [("$TITLE", timeSeriesTitle view),
("$RUN_INDEX", show $ i + 1),
("$RUN_COUNT", show n)]
liftIO $ forM_ fs $ flip writeFile [] -- reserve the file names
let m = M.fromList $ zip [0..(n - 1)] fs
return TimeSeriesViewState { timeSeriesView = view,
timeSeriesExperiment = exp,
timeSeriesRenderer = renderer,
timeSeriesDir = dir,
timeSeriesMap = m }
-- | Plot the time series chart within simulation.
simulateTimeSeries :: ChartRendering r => TimeSeriesViewState r -> ExperimentData -> Composite ()
simulateTimeSeries st expdata =
do let view = timeSeriesView st
loc = localisePathResultTitle $
experimentLocalisation $
timeSeriesExperiment st
rs1 = timeSeriesLeftYSeries view $
timeSeriesTransform view $
experimentResults expdata
rs2 = timeSeriesRightYSeries view $
timeSeriesTransform view $
experimentResults expdata
exts1 = resultsToDoubleValues rs1
exts2 = resultsToDoubleValues rs2
signals = experimentPredefinedSignals expdata
n = experimentRunCount $ timeSeriesExperiment st
width = timeSeriesWidth view
height = timeSeriesHeight view
predicate = timeSeriesPredicate view
title = timeSeriesTitle view
plotTitle = timeSeriesPlotTitle view
runPlotTitle = timeSeriesRunPlotTitle view
plotLines = timeSeriesPlotLines view
plotBottomAxis = timeSeriesBottomAxis view
plotLayout = timeSeriesLayout view
renderer = timeSeriesRenderer st
i <- liftParameter simulationIndex
let file = fromJust $ M.lookup (i - 1) (timeSeriesMap st)
plotTitle' =
replace "$TITLE" title
plotTitle
runPlotTitle' =
if n == 1
then plotTitle'
else replace "$RUN_INDEX" (show i) $
replace "$RUN_COUNT" (show n) $
replace "$PLOT_TITLE" plotTitle'
runPlotTitle
inputSignal ext =
case timeSeriesGridSize view of
Just m ->
liftEvent $
fmap (mapSignal $ const ()) $
newSignalInTimeGrid m
Nothing ->
return $
pureResultSignal signals $
resultValueSignal ext
inputHistory exts =
forM exts $ \ext ->
do let transform () =
do x <- predicate
if x
then resultValueData ext
else return (1/0) -- the infinite values will be ignored then
s <- inputSignal ext
newSignalHistory $
mapSignalM transform s
hs1 <- inputHistory exts1
hs2 <- inputHistory exts2
disposableComposite $
DisposableEvent $
do let plots hs exts plotLineTails =
do ps <-
forM (zip3 hs exts (head plotLineTails)) $
\(h, ext, plotLines) ->
do (ts, xs) <- readSignalHistory h
return $
toPlot $
plotLines $
plot_lines_values .~ filterPlotLinesValues (zip (elems ts) (elems xs)) $
plot_lines_title .~ (loc $ resultValueIdPath ext) $
def
return (ps, drop (length hs) plotLineTails)
(ps1, plotLineTails) <- plots hs1 exts1 (tails plotLines)
(ps2, plotLineTails) <- plots hs2 exts2 plotLineTails
let ps1' = map Left ps1
ps2' = map Right ps2
ps' = ps1' ++ ps2'
axis = plotBottomAxis $
laxis_title .~ "time" $
def
updateLeftAxis =
if null ps1
then layoutlr_left_axis_visibility .~ AxisVisibility False False False
else id
updateRightAxis =
if null ps2
then layoutlr_right_axis_visibility .~ AxisVisibility False False False
else id
chart = plotLayout .
renderingLayoutLR renderer .
updateLeftAxis . updateRightAxis $
layoutlr_x_axis .~ axis $
layoutlr_title .~ runPlotTitle' $
layoutlr_plots .~ ps' $
def
liftIO $
do renderChart renderer (width, height) file (toRenderable chart)
when (experimentVerbose $ timeSeriesExperiment st) $
putStr "Generated file " >> putStrLn file
-- | Remove the NaN and inifity values.
filterPlotLinesValues :: [(Double, Double)] -> [[(Double, Double)]]
filterPlotLinesValues =
filter (not . null) .
divideBy (\(t, x) -> isNaN x || isInfinite x)
-- | Get the HTML code.
timeSeriesHtml :: TimeSeriesViewState r -> Int -> HtmlWriter ()
timeSeriesHtml st index =
let n = experimentRunCount $ timeSeriesExperiment st
in if n == 1
then timeSeriesHtmlSingle st index
else timeSeriesHtmlMultiple st index
-- | Get the HTML code for a single run.
timeSeriesHtmlSingle :: TimeSeriesViewState r -> Int -> HtmlWriter ()
timeSeriesHtmlSingle st index =
do header st index
let f = fromJust $ M.lookup 0 (timeSeriesMap st)
writeHtmlParagraph $
writeHtmlImage (makeRelative (timeSeriesDir st) f)
-- | Get the HTML code for multiple runs.
timeSeriesHtmlMultiple :: TimeSeriesViewState r -> Int -> HtmlWriter ()
timeSeriesHtmlMultiple st index =
do header st index
let n = experimentRunCount $ timeSeriesExperiment st
forM_ [0..(n - 1)] $ \i ->
let f = fromJust $ M.lookup i (timeSeriesMap st)
in writeHtmlParagraph $
writeHtmlImage (makeRelative (timeSeriesDir st) f)
header :: TimeSeriesViewState r -> Int -> HtmlWriter ()
header st index =
do writeHtmlHeader3WithId ("id" ++ show index) $
writeHtmlText (timeSeriesTitle $ timeSeriesView st)
let description = timeSeriesDescription $ timeSeriesView st
unless (null description) $
writeHtmlParagraph $
writeHtmlText description
-- | Get the TOC item.
timeSeriesTOCHtml :: TimeSeriesViewState r -> Int -> HtmlWriter ()
timeSeriesTOCHtml st index =
writeHtmlListItem $
writeHtmlLink ("#id" ++ show index) $
writeHtmlText (timeSeriesTitle $ timeSeriesView st)
|
dsorokin/aivika-experiment-chart
|
Simulation/Aivika/Experiment/Chart/TimeSeriesView.hs
|
bsd-3-clause
| 14,833 | 0 | 28 | 5,672 | 2,566 | 1,349 | 1,217 | 242 | 6 |
{-# LANGUAGE DeriveDataTypeable, OverloadedStrings, ScopedTypeVariables, TypeSynonymInstances, FlexibleContexts, FlexibleInstances, TemplateHaskell, PatternGuards #-}
-- | useful types and simple accessor functions
module Web.MangoPay.Types where
import Control.Applicative
import Control.Exception.Lifted (Exception, throwIO)
import Control.Monad.Base (MonadBase)
import Data.Text as T hiding (singleton, map, toLower)
import Data.Text.Read as T
import Data.Typeable (Typeable)
import Data.ByteString as BS (ByteString)
import Data.Time.Clock.POSIX (POSIXTime)
import Data.Aeson
import Data.Aeson.Types (Pair,Parser)
import Data.Default
import qualified Data.Text.Encoding as TE
import qualified Data.ByteString.UTF8 as UTF8
import Data.Maybe (listToMaybe)
import qualified Network.HTTP.Conduit as H
import qualified Network.HTTP.Types as HT
import Control.Monad.Logger
import Data.Aeson.Encode (encodeToTextBuilder)
import Data.Text.Lazy.Builder (fromText, toLazyText, singleton)
import Data.Monoid ((<>), mempty)
import Data.Text.Lazy (toStrict)
import Data.String (fromString, IsString)
import qualified Data.Vector as V (length)
import Language.Haskell.TH
import Language.Haskell.TH.Syntax (qLocation)
import Text.Printf (printf)
import qualified Data.ByteString.Lazy as BS (toStrict)
import Data.Char (toLower)
-- | the MangoPay access point
data AccessPoint = Sandbox | Production | Custom ByteString
deriving (Show,Read,Eq,Ord,Typeable)
-- | get the real url for the given access point
getAccessPointURL :: AccessPoint -> ByteString
getAccessPointURL Sandbox="api.sandbox.mangopay.com"
getAccessPointURL Production="api.mangopay.com"
getAccessPointURL (Custom bs)=bs
-- | the app credentials
data Credentials = Credentials {
cClientId :: Text -- ^ client id
,cName :: Text -- ^ the name
,cEmail :: Text -- ^ the email
,cClientSecret :: Maybe Text -- ^ client secret, maybe be Nothing if we haven't generated it
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per MangoPay format
instance ToJSON Credentials where
toJSON c=objectSN ["ClientId" .= cClientId c, "Name" .= cName c , "Email" .= cEmail c,"Passphrase" .= cClientSecret c]
-- | from json as per MangoPay format
instance FromJSON Credentials where
parseJSON (Object v) =Credentials <$>
v .: "ClientId" <*>
v .: "Name" <*>
v .: "Email" <*>
v .: "Passphrase"
parseJSON _= fail "Credentials"
-- | get client id in ByteString form
clientIdBS :: Credentials -> ByteString
clientIdBS=TE.encodeUtf8 . cClientId
-- | the access token is simply a Text
newtype AccessToken=AccessToken ByteString
deriving (Eq, Ord, Read, Show, Typeable)
-- | the oauth token returned after authentication
data OAuthToken = OAuthToken {
oaAccessToken :: Text -- ^ the access token
,oaTokenType :: Text -- ^ the token type
,oaExpires :: Int -- ^ expiration
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per MangoPay format
instance ToJSON OAuthToken where
toJSON oa=objectSN ["access_token" .= oaAccessToken oa, "token_type" .= oaTokenType oa, "expires_in" .= oaExpires oa]
-- | from json as per MangoPay format
instance FromJSON OAuthToken where
parseJSON (Object v) =OAuthToken <$>
v .: "access_token" <*>
v .: "token_type" <*>
v .: "expires_in"
parseJSON _= fail "OAuthToken"
-- | build the access token from the OAuthToken
toAccessToken :: OAuthToken -> AccessToken
toAccessToken oa=AccessToken $ TE.encodeUtf8 $ T.concat [oaTokenType oa, " ",oaAccessToken oa]
-- | an exception that a call to MangoPay may throw
data MpException = MpJSONException String -- ^ JSON parsingError
| MpAppException MpError -- ^ application exception
| MpHttpException H.HttpException (Maybe Value) -- ^ HTTP level exception, maybe with some JSON payload
| MpHttpExceptionS String (Maybe Value) -- ^ HTTP level exception for which we only have a string (no Read instance)
-- , maybe with some JSON payload
deriving (Show,Typeable)
-- | make our exception type a normal exception
instance Exception MpException
-- | to json
instance ToJSON MpException where
toJSON (MpJSONException j) = objectSN ["Type" .= ("MpJSONException"::Text), "Error" .= j]
toJSON (MpAppException mpe) = objectSN ["Type" .= ("MpAppException"::Text), "Error" .= toJSON mpe]
toJSON (MpHttpException e v) = objectSN ["Type" .= ("MpHttpException"::Text), "Error" .= (show e), "Value" .= v]
toJSON (MpHttpExceptionS e v) = objectSN ["Type" .= ("MpHttpException"::Text), "Error" .= e, "Value" .= v]
instance FromJSON MpException where
parseJSON (Object v) = do
typ::String <- v .: "Type"
case typ of
"MpJSONException" -> MpJSONException <$> v .: "Error"
"MpAppException" -> MpAppException <$> v .: "Error"
"MpHttpException" -> MpHttpExceptionS <$> v .: "Error" <*> v .:? "Value"
_ -> fail $ "MpException:" ++ typ
parseJSON _= fail "MpException"
-- | an error returned to us by MangoPay
data MpError = MpError {
igeId :: Text
,igeType :: Text
,igeMessage :: Text
,igeDate :: Maybe MpTime
}
deriving (Show,Eq,Ord,Typeable)
-- | to json as per MangoPay format
instance ToJSON MpError where
toJSON mpe=objectSN ["Id" .= igeId mpe, "Type" .= igeType mpe, "Message" .= igeMessage mpe, "Date" .= igeDate mpe]
-- | from json as per MangoPay format
instance FromJSON MpError where
parseJSON (Object v) = MpError <$>
v .: "Id" <*>
v .: "Type" <*>
v .: "Message" <*>
v .: "Date"
parseJSON _= fail "MpError"
-- | @newtype@ of 'POSIXTime' with MangoPay's JSON format.
newtype MpTime = MpTime { unMpTime :: POSIXTime } deriving (Eq, Ord, Show)
-- | from json as per MangoPay format
instance FromJSON MpTime where
parseJSON n@(Number _) = (MpTime . fromIntegral . (round::Double -> Integer)) <$> parseJSON n
parseJSON o = fail $ "MpTime: " ++ show o
-- | to json as per MangoPay format
instance ToJSON MpTime where
toJSON (MpTime pt) = toJSON (round pt :: Integer)
-- | Pagination info for searches
-- <http://docs.mangopay.com/api-references/pagination/>
data Pagination = Pagination {
pPage :: Integer
,pPerPage :: Integer
}
deriving (Show,Read,Eq,Ord,Typeable)
instance Default Pagination where
def=Pagination 1 10
-- | get pagination attributes for query
paginationAttributes :: Maybe Pagination -> [(ByteString,Maybe ByteString)]
paginationAttributes (Just p)=["page" ?+ pPage p, "per_page" ?+ pPerPage p]
paginationAttributes _=[]
-- | A partial list with pagination information.
data PagedList a= PagedList {
plData :: [a]
,plItemCount :: Integer
,plPageCount :: Integer
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | Id of a card
type CardId=Text
-- | alias for Currency
type Currency=Text
-- | the expiration date of a card
data CardExpiration = CardExpiration {
ceMonth :: Int
,ceYear :: Int
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | currency amount
data Amount=Amount {
aCurrency :: Currency
,aAmount :: Integer -- ^ all amounts should be in cents!
}
deriving (Show,Read,Eq,Ord,Typeable)
-- | to json as per MangoPay format
instance ToJSON Amount where
toJSON b=objectSN ["Currency" .= aCurrency b,"Amount" .= aAmount b]
-- | from json as per MangoPay format
instance FromJSON Amount where
parseJSON (Object v) =Amount <$>
v .: "Currency" <*>
v .: "Amount"
parseJSON _=fail "Amount"
-- | supported income ranges
data IncomeRange=IncomeRange1 | IncomeRange2 | IncomeRange3 | IncomeRange4 | IncomeRange5 | IncomeRange6
deriving (Show,Read,Eq,Ord,Bounded, Enum, Typeable)
-- | to json as per MangoPay format
-- the samples do show string format when writing, integer format when reading...
instance ToJSON IncomeRange where
toJSON IncomeRange1="1"
toJSON IncomeRange2="2"
toJSON IncomeRange3="3"
toJSON IncomeRange4="4"
toJSON IncomeRange5="5"
toJSON IncomeRange6="6"
-- | from json as per MangoPay format
-- the samples do show string format when writing, integer format when reading...
instance FromJSON IncomeRange where
parseJSON (String "1") =pure IncomeRange1
parseJSON (String "2") =pure IncomeRange2
parseJSON (String "3") =pure IncomeRange3
parseJSON (String "4") =pure IncomeRange4
parseJSON (String "5") =pure IncomeRange5
parseJSON (String "6") =pure IncomeRange6
parseJSON (Number 1) =pure IncomeRange1
parseJSON (Number 2) =pure IncomeRange2
parseJSON (Number 3) =pure IncomeRange3
parseJSON (Number 4) =pure IncomeRange4
parseJSON (Number 5) =pure IncomeRange5
parseJSON (Number 6) =pure IncomeRange6
parseJSON _= fail "IncomeRange"
-- | bounds in euros for income range
incomeBounds :: IncomeRange -> (Amount,Amount)
incomeBounds IncomeRange1 = (kEuros 0,kEuros 18)
incomeBounds IncomeRange2 = (kEuros 18,kEuros 30)
incomeBounds IncomeRange3 = (kEuros 30,kEuros 50)
incomeBounds IncomeRange4 = (kEuros 50,kEuros 80)
incomeBounds IncomeRange5 = (kEuros 80,kEuros 120)
incomeBounds IncomeRange6 = (kEuros 120,kEuros (-1))
-- | get Income Range for given Euro amount
incomeRange :: Amount -> IncomeRange
incomeRange (Amount "EUR" cents)
| cents < kCents 18 = IncomeRange1
| cents < kCents 30 = IncomeRange2
| cents < kCents 50 = IncomeRange3
| cents < kCents 80 = IncomeRange4
| cents < kCents 120 = IncomeRange5
| otherwise = IncomeRange6
incomeRange (Amount _ _) = error "Amount should be given in euros"
-- | convert a amount of kilo-euros in an amount
kEuros :: Integer -> Amount
kEuros = Amount "EUR" . kCents -- amount is in cents
kCents :: Integer -> Integer
kCents ke = ke * 1000 * 100
-- | read Card Expiration from text representation (MMYY)
readCardExpiration :: T.Reader CardExpiration
readCardExpiration t |
4 == T.length t,
(m,y)<-T.splitAt 2 t=do
im<-T.decimal m
iy<-T.decimal y
return (CardExpiration (fst im) (fst iy), "")
readCardExpiration _ =Left "Incorrect length"
-- | write card expiration
writeCardExpiration :: CardExpiration -> Text
writeCardExpiration (CardExpiration m y)=let
-- yes I know about text-format, but I don't think performance is that critical here to warrant another dependency
sm=printf "%02d" $ checkRng m
sy=printf "%02d" $ checkRng y
in T.concat [pack sm, pack sy]
where
-- | check range fits in two digits
checkRng :: Int -> Int
checkRng i=if i > 99 then i `mod` 100 else i
-- | read Card Expiration from JSON string (MMYY)
instance FromJSON CardExpiration where
parseJSON (String s) |
Right (ce,"")<- readCardExpiration s=pure ce
parseJSON _=fail "CardExpiration"
-- | show Card Expiration to JSON string (MMYY)
instance ToJSON CardExpiration where
toJSON = toJSON . writeCardExpiration
instance IsString CardExpiration where
fromString s
| Right (ce,"")<-readCardExpiration $ fromString s=ce
fromString _=error "CardExpiration"
-- | the kind of authentication data the user has provided
data KindOfAuthentication =
Light
| Regular
| Strong
deriving (Eq, Ord, Show, Read, Bounded, Enum, Typeable)
instance ToJSON KindOfAuthentication where
toJSON =toJSON . show
instance FromJSON KindOfAuthentication where
parseJSON = jsonRead "KindOfAuthentication"
-- | a structure holding the information of an API call
data CallRecord a = CallRecord {
crReq :: H.Request -- ^ the request to MangoPay
,crResult :: Either MpException (Value,a) -- ^ the error or the JSON value and parsed result
}
-- | which level should we log the call
recordLogLevel :: CallRecord a-> LogLevel
recordLogLevel cr
| HT.methodGet == H.method (crReq cr)=LevelDebug
| otherwise = LevelInfo
-- | the log message from a call
recordLogMessage :: CallRecord a-> Text
recordLogMessage (CallRecord req res)=let
-- we log the method
methB=fromString $ show $ H.method req
-- we log the uri path
pathB=fromText $ TE.decodeUtf8 $ H.path req
-- log the query string if any
qsB=fromText $ TE.decodeUtf8 $ H.queryString req
postB=if H.method req==HT.methodPost
then case H.requestBody req of
(H.RequestBodyBS b)->fromText (TE.decodeUtf8 b) <> " -> "
(H.RequestBodyLBS b)->fromText $ TE.decodeUtf8 $ BS.toStrict b <> " -> "
_->mempty
else mempty
resB=case res of
-- log error
Left e->fromString $ show e
Right (v,_)->case v of
-- we have a list, just log the number of results to avoid polluting the log with too much info
Array arr->fromString (show $ V.length arr) <> " values"
-- we have a simple value we can log it
_->encodeToTextBuilder v
in toStrict . toLazyText $ methB <> singleton ' ' <> pathB <> qsB <> ": " <> postB <> resB
-- | the result
-- if we have a proper result we return it
-- if we have an error we throw it
recordResult :: MonadBase IO m => CallRecord a -> m a
recordResult (CallRecord _ (Left err))=throwIO err
recordResult (CallRecord _ (Right (_,a)))=return a
-- | log a CallRecord
-- MonadLogger doesn't expose a function with a dynamic log level...
logCall :: Q Exp
logCall = [|\a -> monadLoggerLog $(qLocation >>= liftLoc) "mangopay" (recordLogLevel a) (recordLogMessage a)|]
-- | simple class used to hide the serialization of parameters and simplify the calling code
class ToHtQuery a where
(?+) :: ByteString -> a -> (ByteString,Maybe ByteString)
instance ToHtQuery Double where
n ?+ d=n ?+ show d
instance ToHtQuery (Maybe Double) where
n ?+ d=n ?+ fmap show d
instance ToHtQuery Integer where
n ?+ d=n ?+ show d
instance ToHtQuery (Maybe Integer) where
n ?+ d=n ?+ fmap show d
instance ToHtQuery (Maybe MpTime) where
n ?+ d=n ?+ fmap (show . (round :: POSIXTime -> Integer) . unMpTime) d
instance ToHtQuery (Maybe T.Text) where
n ?+ d=(n,fmap TE.encodeUtf8 d)
instance ToHtQuery T.Text where
n ?+ d=(n,Just $ TE.encodeUtf8 d)
instance ToHtQuery (Maybe String) where
n ?+ d=(n,fmap UTF8.fromString d)
instance ToHtQuery String where
n ?+ d=(n,Just $ UTF8.fromString d)
-- | find in assoc list
findAssoc :: Eq a=> [(a,b)] -> a -> Maybe b
findAssoc xs n=listToMaybe $ Prelude.map snd $ Prelude.filter ((n==) . fst) xs
-- | read an object or return Nothing
maybeRead :: Read a => String -> Maybe a
maybeRead = fmap fst . listToMaybe . reads
-- | Remove pairs whose value is null.
-- <https://github.com/bos/aeson/issues/77>
stripNulls :: [Pair] -> [Pair]
stripNulls xs = Prelude.filter (\(_,v) -> v /= Null) xs
-- | Same as 'object', but using 'stripNulls' as well.
objectSN :: [Pair] -> Value
objectSN = object . stripNulls
-- | Read instance from a JSON string.
-- We use to just call "read" which would cause a Prelude.read: no parse error
-- instead of a proper exception.
jsonRead :: (Read a) => String -> Value -> Parser a
jsonRead name (String s) = do
let ss = unpack s
case maybeRead ss of
Just r -> pure r
_ -> fail $ name ++ ": " ++ ss
jsonRead name _ = fail name
-- | Sort direction for list retrieval
data SortDirection = ASC | DESC
deriving (Show,Read,Eq,Ord,Bounded, Enum, Typeable)
-- | Sort transactions
data GenericSort = NoSort | ByCreationDate SortDirection
deriving (Show,Eq,Ord,Typeable)
-- | Default sort
instance Default GenericSort where
def = NoSort
-- | get sort attributes for transaction query
sortAttributes :: GenericSort -> [(ByteString,Maybe ByteString)]
sortAttributes NoSort = []
sortAttributes (ByCreationDate dir)=["Sort" ?+ ("CreationDate:" ++ (map toLower $ show dir))]
|
prowdsponsor/mangopay
|
mangopay/src/Web/MangoPay/Types.hs
|
bsd-3-clause
| 15,821 | 0 | 20 | 3,279 | 4,332 | 2,300 | 2,032 | 292 | 6 |
module Aws.S3.Commands.Multipart
where
import Aws.Aws
import Aws.Core
import Aws.S3.Core
import Control.Applicative
import Control.Arrow (second)
import Control.Monad.IO.Class
import Control.Monad.Trans.Resource
import Crypto.Hash
import Data.ByteString.Char8 ({- IsString -})
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Maybe
import Text.XML.Cursor (($/))
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Lazy as BL
import qualified Data.CaseInsensitive as CI
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Network.HTTP.Conduit as HTTP
import qualified Network.HTTP.Types as HTTP
import qualified Text.XML as XML
{-
Aws supports following 6 api for Multipart-Upload.
Currently this code does not support number 3 and 6.
1. Initiate Multipart Upload
2. Upload Part
3. Upload Part - Copy
4. Complete Multipart Upload
5. Abort Multipart Upload
6. List Parts
-}
data InitiateMultipartUpload
= InitiateMultipartUpload {
imuBucket :: Bucket
, imuObjectName :: Object
, imuCacheControl :: Maybe T.Text
, imuContentDisposition :: Maybe T.Text
, imuContentEncoding :: Maybe T.Text
, imuContentType :: Maybe T.Text
, imuExpires :: Maybe Int
, imuMetadata :: [(T.Text,T.Text)]
, imuStorageClass :: Maybe StorageClass
, imuWebsiteRedirectLocation :: Maybe T.Text
, imuAcl :: Maybe CannedAcl
, imuServerSideEncryption :: Maybe ServerSideEncryption
, imuAutoMakeBucket :: Bool -- ^ Internet Archive S3 nonstandard extension
}
deriving (Show)
postInitiateMultipartUpload :: Bucket -> T.Text -> InitiateMultipartUpload
postInitiateMultipartUpload b o =
InitiateMultipartUpload
b o
Nothing Nothing Nothing Nothing Nothing
[] Nothing Nothing Nothing Nothing
False
data InitiateMultipartUploadResponse
= InitiateMultipartUploadResponse {
imurBucket :: !Bucket
, imurKey :: !T.Text
, imurUploadId :: !T.Text
}
-- | ServiceConfiguration: 'S3Configuration'
instance SignQuery InitiateMultipartUpload where
type ServiceConfiguration InitiateMultipartUpload = S3Configuration
signQuery InitiateMultipartUpload {..} = s3SignQuery S3Query {
s3QMethod = Post
, s3QBucket = Just $ T.encodeUtf8 imuBucket
, s3QObject = Just $ T.encodeUtf8 $ imuObjectName
, s3QSubresources = HTTP.toQuery[ ("uploads" :: B8.ByteString , Nothing :: Maybe B8.ByteString)]
, s3QQuery = []
, s3QContentType = T.encodeUtf8 <$> imuContentType
, s3QContentMd5 = Nothing
, s3QAmzHeaders = map (second T.encodeUtf8) $ catMaybes [
("x-amz-acl",) <$> writeCannedAcl <$> imuAcl
, ("x-amz-storage-class",) <$> writeStorageClass <$> imuStorageClass
, ("x-amz-website-redirect-location",) <$> imuWebsiteRedirectLocation
, ("x-amz-server-side-encryption",) <$> writeServerSideEncryption <$> imuServerSideEncryption
, if imuAutoMakeBucket then Just ("x-amz-auto-make-bucket", "1") else Nothing
] ++ map( \x -> (CI.mk . T.encodeUtf8 $ T.concat ["x-amz-meta-", fst x], snd x)) imuMetadata
, s3QOtherHeaders = map (second T.encodeUtf8) $ catMaybes [
("Expires",) . T.pack . show <$> imuExpires
, ("Cache-Control",) <$> imuCacheControl
, ("Content-Disposition",) <$> imuContentDisposition
, ("Content-Encoding",) <$> imuContentEncoding
]
, s3QRequestBody = Nothing
}
instance ResponseConsumer r InitiateMultipartUploadResponse where
type ResponseMetadata InitiateMultipartUploadResponse = S3Metadata
responseConsumer _ = s3XmlResponseConsumer parse
where parse cursor
= do bucket <- force "Missing Bucket Name" $ cursor $/ elContent "Bucket"
key <- force "Missing Key" $ cursor $/ elContent "Key"
uploadId <- force "Missing UploadID" $ cursor $/ elContent "UploadId"
return InitiateMultipartUploadResponse{
imurBucket = bucket
, imurKey = key
, imurUploadId = uploadId
}
instance Transaction InitiateMultipartUpload InitiateMultipartUploadResponse
instance AsMemoryResponse InitiateMultipartUploadResponse where
type MemoryResponse InitiateMultipartUploadResponse = InitiateMultipartUploadResponse
loadToMemory = return
----------------------------------
data UploadPart = UploadPart {
upObjectName :: T.Text
, upBucket :: Bucket
, upPartNumber :: Integer
, upUploadId :: T.Text
, upContentType :: Maybe B8.ByteString
, upContentMD5 :: Maybe (Digest MD5)
, upServerSideEncryption :: Maybe ServerSideEncryption
, upRequestBody :: HTTP.RequestBody
, upExpect100Continue :: Bool -- ^ Note: Requires http-client >= 0.4.10
}
uploadPart :: Bucket -> T.Text -> Integer -> T.Text -> HTTP.RequestBody -> UploadPart
uploadPart bucket obj p i body =
UploadPart obj bucket p i
Nothing Nothing Nothing body False
data UploadPartResponse
= UploadPartResponse {
uprVersionId :: !(Maybe T.Text),
uprETag :: !T.Text
}
deriving (Show)
-- | ServiceConfiguration: 'S3Configuration'
instance SignQuery UploadPart where
type ServiceConfiguration UploadPart = S3Configuration
signQuery UploadPart {..} = s3SignQuery S3Query {
s3QMethod = Put
, s3QBucket = Just $ T.encodeUtf8 upBucket
, s3QObject = Just $ T.encodeUtf8 upObjectName
, s3QSubresources = HTTP.toQuery[
("partNumber" :: B8.ByteString , Just (T.pack (show upPartNumber)) :: Maybe T.Text)
, ("uploadId" :: B8.ByteString, Just upUploadId :: Maybe T.Text)
]
, s3QQuery = []
, s3QContentType = upContentType
, s3QContentMd5 = upContentMD5
, s3QAmzHeaders = map (second T.encodeUtf8) $ catMaybes [
("x-amz-server-side-encryption",) <$> writeServerSideEncryption <$> upServerSideEncryption
]
, s3QOtherHeaders = catMaybes [
if upExpect100Continue
then Just ("Expect", "100-continue")
else Nothing
]
, s3QRequestBody = Just upRequestBody
}
instance ResponseConsumer UploadPart UploadPartResponse where
type ResponseMetadata UploadPartResponse = S3Metadata
responseConsumer _ = s3ResponseConsumer $ \resp -> do
let vid = T.decodeUtf8 `fmap` lookup "x-amz-version-id" (HTTP.responseHeaders resp)
let etag = fromMaybe "" $ T.decodeUtf8 `fmap` lookup "ETag" (HTTP.responseHeaders resp)
return $ UploadPartResponse vid etag
instance Transaction UploadPart UploadPartResponse
instance AsMemoryResponse UploadPartResponse where
type MemoryResponse UploadPartResponse = UploadPartResponse
loadToMemory = return
----------------------------
data CompleteMultipartUpload
= CompleteMultipartUpload {
cmuBucket :: Bucket
, cmuObjectName :: Object
, cmuUploadId :: T.Text
, cmuPartNumberAndEtags :: [(Integer,T.Text)]
, cmuExpiration :: Maybe T.Text
, cmuServerSideEncryption :: Maybe T.Text
, cmuServerSideEncryptionCustomerAlgorithm :: Maybe T.Text
, cmuVersionId :: Maybe T.Text
}
deriving (Show)
postCompleteMultipartUpload :: Bucket -> T.Text -> T.Text -> [(Integer,T.Text)]-> CompleteMultipartUpload
postCompleteMultipartUpload b o i p = CompleteMultipartUpload b o i p Nothing Nothing Nothing Nothing
data CompleteMultipartUploadResponse
= CompleteMultipartUploadResponse {
cmurLocation :: !T.Text
, cmurBucket :: !Bucket
, cmurKey :: !T.Text
, cmurETag :: !T.Text
}
-- | ServiceConfiguration: 'S3Configuration'
instance SignQuery CompleteMultipartUpload where
type ServiceConfiguration CompleteMultipartUpload = S3Configuration
signQuery CompleteMultipartUpload {..} = s3SignQuery S3Query {
s3QMethod = Post
, s3QBucket = Just $ T.encodeUtf8 cmuBucket
, s3QObject = Just $ T.encodeUtf8 cmuObjectName
, s3QSubresources = HTTP.toQuery[
("uploadId" :: B8.ByteString, Just cmuUploadId :: Maybe T.Text)
]
, s3QQuery = []
, s3QContentType = Nothing
, s3QContentMd5 = Nothing
, s3QAmzHeaders = catMaybes [ ("x-amz-expiration",) <$> (T.encodeUtf8 <$> cmuExpiration)
, ("x-amz-server-side-encryption",) <$> (T.encodeUtf8 <$> cmuServerSideEncryption)
, ("x-amz-server-side-encryption-customer-algorithm",)
<$> (T.encodeUtf8 <$> cmuServerSideEncryptionCustomerAlgorithm)
, ("x-amz-version-id",) <$> (T.encodeUtf8 <$> cmuVersionId)
]
, s3QOtherHeaders = []
, s3QRequestBody = Just $ HTTP.RequestBodyLBS reqBody
}
where reqBody = XML.renderLBS XML.def XML.Document {
XML.documentPrologue = XML.Prologue [] Nothing []
, XML.documentRoot = root
, XML.documentEpilogue = []
}
root = XML.Element {
XML.elementName = "CompleteMultipartUpload"
, XML.elementAttributes = M.empty
, XML.elementNodes = (partNode <$> cmuPartNumberAndEtags)
}
partNode (partNumber, etag) = XML.NodeElement XML.Element {
XML.elementName = "Part"
, XML.elementAttributes = M.empty
, XML.elementNodes = [keyNode (T.pack (show partNumber)),etagNode etag]
}
etagNode = toNode "ETag"
keyNode = toNode "PartNumber"
toNode name content = XML.NodeElement XML.Element {
XML.elementName = name
, XML.elementAttributes = M.empty
, XML.elementNodes = [XML.NodeContent content]
}
instance ResponseConsumer r CompleteMultipartUploadResponse where
type ResponseMetadata CompleteMultipartUploadResponse = S3Metadata
responseConsumer _ = s3XmlResponseConsumer parse
where parse cursor
= do location <- force "Missing Location" $ cursor $/ elContent "Location"
bucket <- force "Missing Bucket Name" $ cursor $/ elContent "Bucket"
key <- force "Missing Key" $ cursor $/ elContent "Key"
etag <- force "Missing ETag" $ cursor $/ elContent "ETag"
return CompleteMultipartUploadResponse{
cmurLocation = location
, cmurBucket = bucket
, cmurKey = key
, cmurETag = etag
}
instance Transaction CompleteMultipartUpload CompleteMultipartUploadResponse
instance AsMemoryResponse CompleteMultipartUploadResponse where
type MemoryResponse CompleteMultipartUploadResponse = CompleteMultipartUploadResponse
loadToMemory = return
----------------------------
data AbortMultipartUpload
= AbortMultipartUpload {
amuBucket :: Bucket
, amuObjectName :: Object
, amuUploadId :: T.Text
}
deriving (Show)
postAbortMultipartUpload :: Bucket -> T.Text -> T.Text -> AbortMultipartUpload
postAbortMultipartUpload b o i = AbortMultipartUpload b o i
data AbortMultipartUploadResponse
= AbortMultipartUploadResponse {
}
-- | ServiceConfiguration: 'S3Configuration'
instance SignQuery AbortMultipartUpload where
type ServiceConfiguration AbortMultipartUpload = S3Configuration
signQuery AbortMultipartUpload {..} = s3SignQuery S3Query {
s3QMethod = Delete
, s3QBucket = Just $ T.encodeUtf8 amuBucket
, s3QObject = Just $ T.encodeUtf8 amuObjectName
, s3QSubresources = HTTP.toQuery[
("uploadId" :: B8.ByteString, Just amuUploadId :: Maybe T.Text)
]
, s3QQuery = []
, s3QContentType = Nothing
, s3QContentMd5 = Nothing
, s3QAmzHeaders = []
, s3QOtherHeaders = []
, s3QRequestBody = Nothing
}
instance ResponseConsumer r AbortMultipartUploadResponse where
type ResponseMetadata AbortMultipartUploadResponse = S3Metadata
responseConsumer _ = s3XmlResponseConsumer parse
where parse _cursor
= return AbortMultipartUploadResponse {}
instance Transaction AbortMultipartUpload AbortMultipartUploadResponse
instance AsMemoryResponse AbortMultipartUploadResponse where
type MemoryResponse AbortMultipartUploadResponse = AbortMultipartUploadResponse
loadToMemory = return
----------------------------
getUploadId ::
Configuration
-> S3Configuration NormalQuery
-> HTTP.Manager
-> T.Text
-> T.Text
-> IO T.Text
getUploadId cfg s3cfg mgr bucket object = do
InitiateMultipartUploadResponse {
imurBucket = _bucket
, imurKey = _object'
, imurUploadId = uploadId
} <- memoryAws cfg s3cfg mgr $ postInitiateMultipartUpload bucket object
return uploadId
sendEtag ::
Configuration
-> S3Configuration NormalQuery
-> HTTP.Manager
-> T.Text
-> T.Text
-> T.Text
-> [T.Text]
-> IO ()
sendEtag cfg s3cfg mgr bucket object uploadId etags = do
_ <- memoryAws cfg s3cfg mgr $
postCompleteMultipartUpload bucket object uploadId (zip [1..] etags)
return ()
putConduit ::
MonadResource m =>
Configuration
-> S3Configuration NormalQuery
-> HTTP.Manager
-> T.Text
-> T.Text
-> T.Text
-> Conduit BL.ByteString m T.Text
putConduit cfg s3cfg mgr bucket object uploadId = loop 1
where
loop n = do
v' <- await
case v' of
Just v -> do
UploadPartResponse _ etag <- memoryAws cfg s3cfg mgr $
uploadPart bucket object n uploadId (HTTP.RequestBodyLBS v)
yield etag
loop (n+1)
Nothing -> return ()
chunkedConduit :: (MonadResource m) => Integer -> Conduit B8.ByteString m BL.ByteString
chunkedConduit size = loop 0 []
where
loop :: Monad m => Integer -> [B8.ByteString] -> Conduit B8.ByteString m BL.ByteString
loop cnt str = await >>= maybe (yieldChunk str) go
where
go :: Monad m => B8.ByteString -> Conduit B8.ByteString m BL.ByteString
go line
| size <= len = yieldChunk newStr >> loop 0 []
| otherwise = loop len newStr
where
len = fromIntegral (B8.length line) + cnt
newStr = line:str
yieldChunk :: Monad m => [B8.ByteString] -> Conduit i m BL.ByteString
yieldChunk = yield . BL.fromChunks . reverse
multipartUpload ::
Configuration
-> S3Configuration NormalQuery
-> HTTP.Manager
-> T.Text
-> T.Text
-> Conduit () (ResourceT IO) B8.ByteString
-> Integer
-> ResourceT IO ()
multipartUpload cfg s3cfg mgr bucket object src chunkSize = do
uploadId <- liftIO $ getUploadId cfg s3cfg mgr bucket object
etags <- src
$= chunkedConduit chunkSize
$= putConduit cfg s3cfg mgr bucket object uploadId
$$ CL.consume
liftIO $ sendEtag cfg s3cfg mgr bucket object uploadId etags
multipartUploadSink :: MonadResource m
=> Configuration
-> S3Configuration NormalQuery
-> HTTP.Manager
-> T.Text -- ^ Bucket name
-> T.Text -- ^ Object name
-> Integer -- ^ chunkSize (minimum: 5MB)
-> Sink B8.ByteString m ()
multipartUploadSink cfg s3cfg = multipartUploadSinkWithInitiator cfg s3cfg postInitiateMultipartUpload
multipartUploadWithInitiator ::
Configuration
-> S3Configuration NormalQuery
-> (Bucket -> T.Text -> InitiateMultipartUpload)
-> HTTP.Manager
-> T.Text
-> T.Text
-> Conduit () (ResourceT IO) B8.ByteString
-> Integer
-> ResourceT IO ()
multipartUploadWithInitiator cfg s3cfg initiator mgr bucket object src chunkSize = do
uploadId <- liftIO $ imurUploadId <$> memoryAws cfg s3cfg mgr (initiator bucket object)
etags <- src
$= chunkedConduit chunkSize
$= putConduit cfg s3cfg mgr bucket object uploadId
$$ CL.consume
liftIO $ sendEtag cfg s3cfg mgr bucket object uploadId etags
multipartUploadSinkWithInitiator :: MonadResource m
=> Configuration
-> S3Configuration NormalQuery
-> (Bucket -> T.Text -> InitiateMultipartUpload) -- ^ Initiator
-> HTTP.Manager
-> T.Text -- ^ Bucket name
-> T.Text -- ^ Object name
-> Integer -- ^ chunkSize (minimum: 5MB)
-> Sink B8.ByteString m ()
multipartUploadSinkWithInitiator cfg s3cfg initiator mgr bucket object chunkSize = do
uploadId <- liftIO $ imurUploadId <$> memoryAws cfg s3cfg mgr (initiator bucket object)
etags <- chunkedConduit chunkSize
$= putConduit cfg s3cfg mgr bucket object uploadId
$= CL.consume
liftIO $ sendEtag cfg s3cfg mgr bucket object uploadId etags
|
romanb/aws
|
Aws/S3/Commands/Multipart.hs
|
bsd-3-clause
| 17,779 | 1 | 19 | 5,141 | 4,026 | 2,147 | 1,879 | -1 | -1 |
-- | This is a template pakej.hs file, provided with
-- the intention to be customized by Pakej users
{-# LANGUAGE OverloadedStrings #-} -- This extension is not required, but users will probably need it
module Main (main) where
-- All Pakej functionality is exported by this single module
import Pakej
-- We need (Category..) and Category.id—exported from Pakej module—to compose widgets
import Prelude hiding ((.), id)
-- This is not very interesting widget (it does exactly nothing) but it's the basis for extension
main :: IO ()
main = pakej id
|
supki/pakej
|
data/pakej.hs
|
bsd-3-clause
| 555 | 0 | 6 | 94 | 51 | 34 | 17 | 6 | 1 |
module Core.Syntax where
import Core.Value
import Core.Variable
data Expression
= Sequence Expression Value Expression
| Case Variable [(Value, Expression)]
| Application Variable [Value]
| Unit Value
| Store Value
| Fetch Variable (Maybe Offset)
| Update Variable Value
data Declaration
= Declaration Name [Variable] Expression
|
farre/grin
|
Core/Syntax.hs
|
bsd-3-clause
| 348 | 0 | 8 | 64 | 101 | 59 | 42 | 13 | 0 |
-- Copyright (c) 2014-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file. An additional grant of patent rights can
-- be found in the PATENTS file.
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | The implementation of the 'Haxl' monad.
module Haxl.Core.Monad (
-- * The monad
GenHaxl, runHaxl,
env,
-- * Env
Env(..), caches, initEnvWithData, initEnv, emptyEnv,
-- * Exceptions
throw, catch, catchIf, try, tryToHaxlException,
-- * Data fetching and caching
dataFetch, uncachedRequest,
cacheRequest, cacheResult, cachedComputation,
dumpCacheAsHaskell,
-- * Unsafe operations
unsafeLiftIO, unsafeToHaxlException,
) where
import Haxl.Core.Types
import Haxl.Core.Show1
import Haxl.Core.StateStore
import Haxl.Core.Exception
import Haxl.Core.RequestStore
import Haxl.Core.Util
import Haxl.Core.DataCache as DataCache
import qualified Data.Text as Text
import Control.Exception (Exception(..), SomeException)
#if __GLASGOW_HASKELL__ >= 708
import Control.Exception (SomeAsyncException(..))
#endif
#if __GLASGOW_HASKELL__ >= 710
import Control.Exception (AllocationLimitExceeded(..))
#endif
import Control.Monad
import qualified Control.Exception as Exception
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative hiding (Const)
#endif
import Control.DeepSeq
import GHC.Exts (IsString(..))
#if __GLASGOW_HASKELL__ < 706
import Prelude hiding (catch)
#endif
import Data.IORef
import Data.List
import Data.Monoid
import Data.Time
import qualified Data.HashMap.Strict as HashMap
import Text.Printf
import Text.PrettyPrint hiding ((<>))
import Control.Arrow (left)
#ifdef EVENTLOG
import Control.Exception (bracket_)
import Debug.Trace (traceEventIO)
#endif
import Data.TASequence.FastCatQueue
-- -----------------------------------------------------------------------------
-- The environment
-- | The data we carry around in the Haxl monad.
data Env u = Env
{ cacheRef :: IORef (DataCache ResultVar) -- cached data fetches
, memoRef :: IORef (DataCache (MemoVar u)) -- memoized computations
, flags :: Flags
, userEnv :: u
, statsRef :: IORef Stats
, states :: StateStore
-- ^ Data sources and other components can store their state in
-- here. Items in this store must be instances of 'StateKey'.
}
type Caches u = (IORef (DataCache ResultVar), IORef (DataCache (MemoVar u)))
caches :: Env u -> Caches u
caches env = (cacheRef env, memoRef env)
-- | Initialize an environment with a 'StateStore', an input map, a
-- preexisting 'DataCache', and a seed for the random number generator.
initEnvWithData :: StateStore -> u -> Caches u -> IO (Env u)
initEnvWithData states e (cref, mref) = do
sref <- newIORef emptyStats
return Env
{ cacheRef = cref
, memoRef = mref
, flags = defaultFlags
, userEnv = e
, states = states
, statsRef = sref
}
-- | Initializes an environment with 'DataStates' and an input map.
initEnv :: StateStore -> u -> IO (Env u)
initEnv states e = do
cref <- newIORef DataCache.empty
mref <- newIORef DataCache.empty
initEnvWithData states e (cref,mref)
-- | A new, empty environment.
emptyEnv :: u -> IO (Env u)
emptyEnv = initEnv stateEmpty
-- -----------------------------------------------------------------------------
-- | The Haxl monad, which does several things:
--
-- * It is a reader monad for 'Env' and 'IORef' 'RequestStore', The
-- latter is the current batch of unsubmitted data fetch requests.
--
-- * It is a concurrency, or resumption, monad. A computation may run
-- partially and return 'Blocked', in which case the framework should
-- perform the outstanding requests in the 'RequestStore', and then
-- resume the computation.
--
-- * The Applicative combinator '<*>' explores /both/ branches in the
-- event that the left branch is 'Blocked', so that we can collect
-- multiple requests and submit them as a batch.
--
-- * It contains IO, so that we can perform real data fetching.
--
newtype GenHaxl u a = GenHaxl
{ unHaxl :: Env u -> IORef (RequestStore u) -> IO (Result u a) }
-- A Haxl continuation.
newtype HaxlCont u a b = HaxlCont { unCont :: a -> GenHaxl u b }
-- A type-aligned catenable queue of Haxl computations.
type Haxls u a b = FastTCQueue (HaxlCont u) a b
-- | The result of a computation is either 'Done' with a value, 'Throw'
-- with an exception, or 'Blocked' on the result of a data fetch with
-- a continuation.
data Result u a
= Done a
| Throw SomeException
| Blocked (Haxls u () a) -- input is () because just waiting for fetch data to be put into Ref
instance (Show a) => Show (Result u a) where
show (Done a) = printf "Done(%s)" $ show a
show (Throw e) = printf "Throw(%s)" $ show e
show Blocked{} = "Blocked"
instance Monad (GenHaxl u) where
return a = GenHaxl $ \_env _ref -> return (Done a)
GenHaxl m >>= k = GenHaxl $ \env ref -> do
e <- m env ref
case e of
Done a -> unHaxl (k a) env ref
Throw e -> return (Throw e)
Blocked ks -> return (Blocked (ks |> HaxlCont k))
instance Functor (GenHaxl u) where
fmap f (GenHaxl h) = GenHaxl $ \ env ref -> do
r <- h env ref
case r of
Done a -> return (Done (f a))
Throw e -> return (Throw e)
Blocked ks -> return (Blocked (ks |> HaxlCont (return . f)))
instance Applicative (GenHaxl u) where
pure = return
GenHaxl f <*> GenHaxl a = GenHaxl $ \env ref -> do
r <- f env ref
case r of
Throw e -> return (Throw e)
Done f' -> do
ra <- a env ref
case ra of
Done a' -> return (Done (f' a'))
Throw e -> return (Throw e)
Blocked as -> return (Blocked (as |> HaxlCont (return . f')))
Blocked fs -> do
ra <- a env ref -- left is blocked, explore the right
case ra of
Done a' -> return (Blocked (fs |> HaxlCont (return . ($ a'))))
Throw e -> return (Blocked (fs |> HaxlCont (\f -> f <$> throw e)))
Blocked as -> return (Blocked (haxlToHaxls ((haxlsToHaxl fs) <*> (haxlsToHaxl as))))
-- | Runs a 'Haxl' computation in an 'Env'.
runHaxl :: Env u -> GenHaxl u a -> IO a
#ifdef EVENTLOG
runHaxl env h = do
let go !n env (GenHaxl haxl) = do
traceEventIO "START computation"
ref <- newIORef noRequests
e <- haxl env ref
traceEventIO "STOP computation"
case e of
Done a -> return a
Throw e -> Exception.throw e
Blocked cont -> do
bs <- readIORef ref
writeIORef ref noRequests -- Note [RoundId]
traceEventIO "START performFetches"
n' <- performFetches n env bs
traceEventIO "STOP performFetches"
go n' env cont
traceEventIO "START runHaxl"
r <- go 0 env h
traceEventIO "STOP runHaxl"
return r
#else
runHaxl env = runHaxls env . haxlToHaxls
runHaxls :: Env u -> Haxls u () a -> IO a
runHaxls env hs = do
ref <- newIORef noRequests
r <- runQueue env ref hs
case r of
Done a -> return a
Throw e -> Exception.throw e
Blocked ks -> do
bs <- readIORef ref
writeIORef ref noRequests -- Note [RoundId]
void (performFetches 0 env bs)
runHaxls env ks
runQueue :: forall u a. Env u -> IORef (RequestStore u) -> Haxls u () a -> IO (Result u a)
runQueue env ref hs = go hs ()
where go :: Haxls u y z -> y -> IO (Result u z)
go hs x =
case tviewl hs of
TAEmptyL -> return (Done x)
HaxlCont f :< hs' -> do
r <- unHaxl (f x) env ref
case r of
Done x' -> go hs' x'
Throw e -> return (Throw e)
Blocked ks -> return (Blocked (ks >< hs'))
haxlToHaxls :: GenHaxl u a -> Haxls u () a
haxlToHaxls = tsingleton . HaxlCont . const
haxlsToHaxl :: Haxls u () a -> GenHaxl u a
haxlsToHaxl hs = GenHaxl $ \ env ref -> runQueue env ref hs
#endif
-- | Extracts data from the 'Env'.
env :: (Env u -> a) -> GenHaxl u a
env f = GenHaxl $ \env _ref -> return (Done (f env))
-- -----------------------------------------------------------------------------
-- Exceptions
-- | Throw an exception in the Haxl monad
throw :: (Exception e) => e -> GenHaxl u a
throw e = GenHaxl $ \_env _ref -> raise e
raise :: (Exception e) => e -> IO (Result u a)
raise = return . Throw . toException
-- | Catch an exception in the Haxl monad
catch :: Exception e => GenHaxl u a -> (e -> GenHaxl u a) -> GenHaxl u a
catch (GenHaxl m) h = GenHaxl $ \env ref -> do
r <- m env ref
case r of
Done a -> return (Done a)
Throw e | Just e' <- fromException e -> unHaxl (h e') env ref
| otherwise -> return (Throw e)
Blocked ks -> return (Blocked (haxlToHaxls (catch (haxlsToHaxl ks) h)))
-- | Catch exceptions that satisfy a predicate
catchIf
:: Exception e => (e -> Bool) -> GenHaxl u a -> (e -> GenHaxl u a)
-> GenHaxl u a
catchIf cond haxl handler =
catch haxl $ \e -> if cond e then handler e else throw e
-- | Returns @'Left' e@ if the computation throws an exception @e@, or
-- @'Right' a@ if it returns a result @a@.
try :: Exception e => GenHaxl u a -> GenHaxl u (Either e a)
try haxl = (Right <$> haxl) `catch` (return . Left)
-- -----------------------------------------------------------------------------
-- Unsafe operations
-- | Under ordinary circumstances this is unnecessary; users of the Haxl
-- monad should generally /not/ perform arbitrary IO.
unsafeLiftIO :: IO a -> GenHaxl u a
unsafeLiftIO m = GenHaxl $ \_env _ref -> Done <$> m
-- | Convert exceptions in the underlying IO monad to exceptions in
-- the Haxl monad. This is morally unsafe, because you could then
-- catch those exceptions in Haxl and observe the underlying execution
-- order. Not to be exposed to user code.
unsafeToHaxlException :: GenHaxl u a -> GenHaxl u a
unsafeToHaxlException (GenHaxl m) = GenHaxl $ \env ref -> do
r <- m env ref `Exception.catch` \e -> return (Throw e)
case r of
Blocked ks -> return (Blocked (haxlToHaxls (unsafeToHaxlException (haxlsToHaxl ks))))
other -> return other
-- | Like 'try', but lifts all exceptions into the 'HaxlException'
-- hierarchy. Uses 'unsafeToHaxlException' internally. Typically
-- this is used at the top level of a Haxl computation, to ensure that
-- all exceptions are caught.
tryToHaxlException :: GenHaxl u a -> GenHaxl u (Either HaxlException a)
tryToHaxlException h = left asHaxlException <$> try (unsafeToHaxlException h)
-- -----------------------------------------------------------------------------
-- Data fetching and caching
-- | Possible responses when checking the cache.
data CacheResult a
-- | The request hadn't been seen until now.
= Uncached (ResultVar a)
-- | The request has been seen before, but its result has not yet been
-- fetched.
| CachedNotFetched (ResultVar a)
-- | The request has been seen before, and its result has already been
-- fetched.
| Cached (Either SomeException a)
-- | Checks the data cache for the result of a request.
cached :: (Request r a) => Env u -> r a -> IO (CacheResult a)
cached env req = do
cache <- readIORef (cacheRef env)
let
do_fetch = do
rvar <- newEmptyResult
writeIORef (cacheRef env) $! DataCache.insert req rvar cache
return (Uncached rvar)
case DataCache.lookup req cache of
Nothing -> do_fetch
Just rvar -> do
mb <- tryReadResult rvar
case mb of
Nothing -> return (CachedNotFetched rvar)
-- Use the cached result, even if it was an error.
Just r -> do
ifTrace (flags env) 3 $ putStrLn $ case r of
Left _ -> "Cached error: " ++ show req
Right _ -> "Cached request: " ++ show req
return (Cached r)
-- | Performs actual fetching of data for a 'Request' from a 'DataSource'.
dataFetch :: (DataSource u r, Request r a) => r a -> GenHaxl u a
dataFetch req = GenHaxl $ \env ref -> do
-- First, check the cache
res <- cached env req
case res of
-- Not seen before: add the request to the RequestStore, so it
-- will be fetched in the next round.
Uncached rvar -> do
modifyIORef' ref $ \bs -> addRequest (BlockedFetch req rvar) bs
return $ Blocked (haxlToHaxls (continueFetch req rvar))
-- Seen before but not fetched yet. We're blocked, but we don't have
-- to add the request to the RequestStore.
CachedNotFetched rvar -> return
$ Blocked (haxlToHaxls (continueFetch req rvar))
-- Cached: either a result, or an exception
Cached (Left ex) -> return (Throw ex)
Cached (Right a) -> return (Done a)
-- | A data request that is not cached. This is not what you want for
-- normal read requests, because then multiple identical requests may
-- return different results, and this invalidates some of the
-- properties that we expect Haxl computations to respect: that data
-- fetches can be aribtrarily reordered, and identical requests can be
-- commoned up, for example.
--
-- 'uncachedRequest' is useful for performing writes, provided those
-- are done in a safe way - that is, not mixed with reads that might
-- conflict in the same Haxl computation.
--
uncachedRequest :: (DataSource u r, Request r a) => r a -> GenHaxl u a
uncachedRequest req = GenHaxl $ \_env ref -> do
rvar <- newEmptyResult
modifyIORef' ref $ \bs -> addRequest (BlockedFetch req rvar) bs
return $ Blocked (haxlToHaxls (continueFetch req rvar))
continueFetch
:: (DataSource u r, Request r a, Show a)
=> r a -> ResultVar a -> GenHaxl u a
continueFetch req rvar = GenHaxl $ \_env _ref -> do
m <- tryReadResult rvar
case m of
Nothing -> raise . DataSourceError $
textShow req <> " did not set contents of result var"
Just r -> done r
-- | Transparently provides caching. Useful for datasources that can
-- return immediately, but also caches values. Exceptions thrown by
-- the IO operation (except for asynchronous exceptions) are
-- propagated into the Haxl monad and can be caught by 'catch' and
-- 'try'.
cacheResult :: (Request r a) => r a -> IO a -> GenHaxl u a
cacheResult req val = GenHaxl $ \env _ref -> do
cachedResult <- cached env req
case cachedResult of
Uncached rvar -> do
result <- Exception.try val
putResult rvar result
case result of
Left e -> do rethrowAsyncExceptions e; done result
_other -> done result
Cached result -> done result
CachedNotFetched _ -> corruptCache
where
corruptCache = raise . DataSourceError $ Text.concat
[ textShow req
, " has a corrupted cache value: these requests are meant to"
, " return immediately without an intermediate value. Either"
, " the cache was updated incorrectly, or you're calling"
, " cacheResult on a query that involves a blocking fetch."
]
-- We must be careful about turning IO monad exceptions into Haxl
-- exceptions. An IO monad exception will normally propagate right
-- out of runHaxl and terminate the whole computation, whereas a Haxl
-- exception can get dropped on the floor, if it is on the right of
-- <*> and the left side also throws, for example. So turning an IO
-- monad exception into a Haxl exception is a dangerous thing to do.
-- In particular, we never want to do it for an asynchronous exception
-- (AllocationLimitExceeded, ThreadKilled, etc.), because these are
-- supposed to unconditionally terminate the computation.
--
-- There are three places where we take an arbitrary IO monad exception and
-- turn it into a Haxl exception:
--
-- * wrapFetchInCatch. Here we want to propagate a failure of the
-- data source to the callers of the data source, but if the
-- failure came from elsewhere (an asynchronous exception), then we
-- should just propagate it
--
-- * cacheResult (cache the results of IO operations): again,
-- failures of the IO operation should be visible to the caller as
-- a Haxl exception, but we exclude asynchronous exceptions from
-- this.
-- * unsafeToHaxlException: assume the caller knows what they're
-- doing, and just wrap all exceptions.
--
rethrowAsyncExceptions :: SomeException -> IO ()
rethrowAsyncExceptions e
#if __GLASGOW_HASKELL__ >= 708
| Just SomeAsyncException{} <- fromException e = Exception.throw e
#endif
#if __GLASGOW_HASKELL__ >= 710
| Just AllocationLimitExceeded{} <- fromException e = Exception.throw e
-- AllocationLimitExceeded is not a child of SomeAsyncException,
-- but it should be.
#endif
| otherwise = return ()
-- | Inserts a request/result pair into the cache. Throws an exception
-- if the request has already been issued, either via 'dataFetch' or
-- 'cacheRequest'.
--
-- This can be used to pre-populate the cache when running tests, to
-- avoid going to the actual data source and ensure that results are
-- deterministic.
--
cacheRequest
:: (Request req a) => req a -> Either SomeException a -> GenHaxl u ()
cacheRequest request result = GenHaxl $ \env _ref -> do
res <- cached env request
case res of
Uncached rvar -> do
-- request was not in the cache: insert the result and continue
putResult rvar result
return $ Done ()
-- It is an error if the request is already in the cache. We can't test
-- whether the cached result is the same without adding an Eq constraint,
-- and we don't necessarily have Eq for all results.
_other -> raise $
DataSourceError "cacheRequest: request is already in the cache"
instance IsString a => IsString (GenHaxl u a) where
fromString s = return (fromString s)
-- | Issues a batch of fetches in a 'RequestStore'. After
-- 'performFetches', all the requests in the 'RequestStore' are
-- complete, and all of the 'ResultVar's are full.
performFetches :: forall u. Int -> Env u -> RequestStore u -> IO Int
performFetches n env reqs = do
let f = flags env
sref = statsRef env
jobs = contents reqs
!n' = n + length jobs
t0 <- getCurrentTime
let
roundstats =
[ (dataSourceName (getReq reqs), length reqs)
| BlockedFetches reqs <- jobs ]
where
getReq :: [BlockedFetch r] -> r a
getReq = undefined
ifTrace f 1 $
printf "Batch data fetch (%s)\n" $
intercalate (", "::String) $
map (\(name,num) -> printf "%d %s" num (Text.unpack name)) roundstats
ifTrace f 3 $
forM_ jobs $ \(BlockedFetches reqs) ->
forM_ reqs $ \(BlockedFetch r _) -> putStrLn (show1 r)
let
applyFetch (i, BlockedFetches (reqs :: [BlockedFetch r])) =
case stateGet (states env) of
Nothing ->
return (SyncFetch (mapM_ (setError (const e)) reqs))
where req :: r a; req = undefined
e = DataSourceError $
"data source not initialized: " <> dataSourceName req
Just state ->
return $ wrapFetchInTrace i (length reqs)
(dataSourceName (undefined :: r a))
$ wrapFetchInCatch reqs
$ fetch state f (userEnv env) reqs
fetches <- mapM applyFetch $ zip [n..] jobs
times <-
if report f >= 2
then do
(refs, timedfetches) <- mapAndUnzipM wrapFetchInTimer fetches
scheduleFetches timedfetches
mapM (fmap Just . readIORef) refs
else do
scheduleFetches fetches
return $ repeat Nothing
let dsroundstats = HashMap.fromList
[ (name, DataSourceRoundStats { dataSourceFetches = fetches
, dataSourceTime = time
})
| ((name, fetches), time) <- zip roundstats times]
t1 <- getCurrentTime
let roundtime = realToFrac (diffUTCTime t1 t0) :: Double
ifReport f 1 $
modifyIORef' sref $ \(Stats rounds) -> roundstats `deepseq`
Stats (RoundStats (microsecs roundtime) dsroundstats: rounds)
ifTrace f 1 $
printf "Batch data fetch done (%.2fs)\n" (realToFrac roundtime :: Double)
return n'
-- Catch exceptions arising from the data source and stuff them into
-- the appropriate requests. We don't want any exceptions propagating
-- directly from the data sources, because we want the exception to be
-- thrown by dataFetch instead.
--
wrapFetchInCatch :: [BlockedFetch req] -> PerformFetch -> PerformFetch
wrapFetchInCatch reqs fetch =
case fetch of
SyncFetch io ->
SyncFetch (io `Exception.catch` handler)
AsyncFetch fio ->
AsyncFetch (\io -> fio io `Exception.catch` handler)
where
handler :: SomeException -> IO ()
handler e = do
rethrowAsyncExceptions e
mapM_ (forceError e) reqs
-- Set the exception even if the request already had a result.
-- Otherwise we could be discarding an exception.
forceError e (BlockedFetch _ rvar) = do
void $ tryTakeResult rvar
putResult rvar (except e)
wrapFetchInTimer :: PerformFetch -> IO (IORef Microseconds, PerformFetch)
wrapFetchInTimer f = do
r <- newIORef 0
case f of
SyncFetch io -> return (r, SyncFetch (time io >>= writeIORef r))
AsyncFetch f -> do
inner_r <- newIORef 0
return (r, AsyncFetch $ \inner -> do
total <- time (f (time inner >>= writeIORef inner_r))
inner_t <- readIORef inner_r
writeIORef r (total - inner_t))
wrapFetchInTrace :: Int -> Int -> Text.Text -> PerformFetch -> PerformFetch
#ifdef EVENTLOG
wrapFetchInTrace i n dsName f =
case f of
SyncFetch io -> SyncFetch (wrapF "Sync" io)
AsyncFetch fio -> AsyncFetch (wrapF "Async" . fio . unwrapF "Async")
where
d = Text.unpack dsName
wrapF :: String -> IO a -> IO a
wrapF ty = bracket_ (traceEventIO $ printf "START %d %s (%d %s)" i d n ty)
(traceEventIO $ printf "STOP %d %s (%d %s)" i d n ty)
unwrapF :: String -> IO a -> IO a
unwrapF ty = bracket_ (traceEventIO $ printf "STOP %d %s (%d %s)" i d n ty)
(traceEventIO $ printf "START %d %s (%d %s)" i d n ty)
#else
wrapFetchInTrace _ _ _ f = f
#endif
time :: IO () -> IO Microseconds
time io = do
t0 <- getCurrentTime
io
t1 <- getCurrentTime
return . microsecs . realToFrac $ t1 `diffUTCTime` t0
microsecs :: Double -> Microseconds
microsecs t = round (t * 10^(6::Int))
-- | Start all the async fetches first, then perform the sync fetches before
-- getting the results of the async fetches.
scheduleFetches :: [PerformFetch] -> IO()
scheduleFetches fetches = async_fetches sync_fetches
where
async_fetches :: IO () -> IO ()
async_fetches = compose [f | AsyncFetch f <- fetches]
sync_fetches :: IO ()
sync_fetches = sequence_ [io | SyncFetch io <- fetches]
-- -----------------------------------------------------------------------------
-- Memoization
-- | A variable in the cache representing the state of a memoized computation
newtype MemoVar u a = MemoVar (IORef (MemoStatus u a))
-- | The state of a memoized computation
data MemoStatus u a
= MemoInProgress (RoundId u) (Haxls u () a)
-- ^ Under evaluation in the given round, here is the latest
-- continuation. The continuation might be a little out of
-- date, but that's fine, the worst that can happen is we do a
-- little extra work.
| MemoDone (Either SomeException a)
-- fully evaluated, here is the result.
type RoundId u = IORef (RequestStore u)
{-
Note [RoundId]
A token representing the round. This needs to be unique per round,
and it needs to support Eq. Fortunately the IORef RequestStore is
exactly what we need: IORef supports Eq, and we make a new one for
each round. There's a danger that storing this in the DataCache could
cause a space leak, so we stub out the contents after each round (see
runHaxl).
-}
-- | 'cachedComputation' memoizes a Haxl computation. The key is a
-- request.
--
-- /Note:/ These cached computations will /not/ be included in the output
-- of 'dumpCacheAsHaskell'.
--
cachedComputation
:: forall req u a. (Request req a)
=> req a -> GenHaxl u a -> GenHaxl u a
cachedComputation req haxl = GenHaxl $ \env ref -> do
cache <- readIORef (memoRef env)
case DataCache.lookup req cache of
Nothing -> do
memovar <- newIORef (MemoInProgress ref (haxlToHaxls haxl))
writeIORef (memoRef env) $! DataCache.insert req (MemoVar memovar) cache
run memovar (haxlToHaxls haxl) env ref
Just (MemoVar memovar) -> do
status <- readIORef memovar
case status of
MemoDone r -> done r
MemoInProgress round ks
| round == ref -> return (Blocked (haxlToHaxls (retryMemo req)))
| otherwise -> run memovar ks env ref
-- was blocked in a previous round; run the saved continuation to
-- make more progress.
where
-- If we got blocked on this memo in the current round, this is the
-- continuation: just try to evaluate the memo again. We know it is
-- already in the cache (because we just checked), so the computation
-- will never be used.
retryMemo req =
cachedComputation req (throw (CriticalError "retryMemo"))
-- Run the memoized computation and store the result (complete or
-- partial) back in the MemoVar afterwards.
--
-- We don't attempt to catch IO monad exceptions here. That may seem
-- dangerous, because if an IO exception is raised we'll leave the
-- MemoInProgress in the MemoVar. But we always want to just
-- propagate an IO monad exception (it should kill the whole runHaxl,
-- unless there's a unsafeToHaxlException), so we should never be
-- looking at the MemoVar again anyway. Furthermore, storing the
-- exception in the MemoVar is wrong, because that will turn it into
-- a Haxl exception (see rethrowAsyncExceptions).
run memovar ks env ref = do
e <- runQueue env ref ks
case e of
Done a -> complete memovar (Right a)
Throw e -> complete memovar (Left e)
Blocked ks -> do
writeIORef memovar (MemoInProgress ref ks)
return (Blocked (haxlToHaxls (retryMemo req)))
-- We're finished: store the final result
complete memovar r = do
writeIORef memovar (MemoDone r)
done r
-- | Lifts an 'Either' into either 'Throw' or 'Done'.
done :: Either SomeException a -> IO (Result u a)
done = return . either Throw Done
-- -----------------------------------------------------------------------------
-- | Dump the contents of the cache as Haskell code that, when
-- compiled and run, will recreate the same cache contents. For
-- example, the generated code looks something like this:
--
-- > loadCache :: GenHaxl u ()
-- > loadCache = do
-- > cacheRequest (ListWombats 3) (Right ([1,2,3]))
-- > cacheRequest (CountAardvarks "abcabc") (Right (2))
--
dumpCacheAsHaskell :: GenHaxl u String
dumpCacheAsHaskell = do
ref <- env cacheRef -- NB. cacheRef, not memoRef. We ignore memoized
-- results when dumping the cache.
entries <- unsafeLiftIO $ readIORef ref >>= showCache
let
mk_cr (req, res) =
text "cacheRequest" <+> parens (text req) <+> parens (result res)
result (Left e) = text "except" <+> parens (text (show e))
result (Right s) = text "Right" <+> parens (text s)
return $ show $
text "loadCache :: GenHaxl u ()" $$
text "loadCache = do" $$
nest 2 (vcat (map mk_cr (concatMap snd entries))) $$
text "" -- final newline
|
xich/Haxl-1
|
Haxl/Core/Monad.hs
|
bsd-3-clause
| 27,669 | 0 | 27 | 6,592 | 6,197 | 3,148 | 3,049 | 404 | 5 |
module Week4 where
import Data.List
fun1' :: [Integer] -> Integer
--fun1' xs = foldr (*) 1 $ map((-)2) $ filter (even) xs
fun1' = product . map (subtract 2) .filter even
fun2' :: Integer -> Integer
fun2' = sum
.filter (even)
.takeWhile(/= 1)
.iterate(\n-> if even n then n `div` 2 else 3 * n + 1)
data Tree a = Leaf | Node Integer (Tree a) a (Tree a) deriving (Show, Eq)
foldTree :: [a] -> Tree a
foldTree = foldr insert Leaf
--insert into Leaf
--insert into Node -> left --> if the right is not empty null
--insert into Node -> right --> if the left is not empty
--insert into Node -> default left
--after insert ,height + 1 and then still a balance tree
where
insert :: a -> Tree a -> Tree a
insert x Leaf = Node 0 Leaf x Leaf
insert x (Node height left val right)
| treeHeight left >= treeHeight right =
let new_right = insert x right
in Node (treeHeight new_right + 1) left val (insert x right)
| otherwise =
let new_left = insert x left
in Node (treeHeight new_left + 1) (insert x left) val right
treeHeight :: Tree a -> Integer
treeHeight Leaf = -1
treeHeight (Node height _ _ _)= height
xor :: [Bool] -> Bool
xor = foldr eachOne False
where
eachOne :: Bool -> Bool -> Bool
eachOne True False = True
eachOne True True = False
eachOne False True = True
eachOne False False = False
xor2 :: Bool -> Bool -> Bool
xor2 a b = (a || b) && not (a && b)
map' :: (a -> b) -> [a] -> [b]
map' f = foldr (\x y -> f x : y) []
sieveSundaram :: Integer -> [Integer]
sieveSundaram n = map ((+1) . (*2)) $ [1..n] \\ sieve
where sieve = map (\(i, j) -> i + j + 2*i*j)
. filter (\(i, j) -> i + j + 2*i*j <= n)
$ cartProd [1..n] [1..n]
cartProd :: [a] -> [b] -> [(a, b)]
cartProd xs ys = [(x,y) | x <- xs, y <- ys]
|
zach007/cis194
|
src/Week4.hs
|
bsd-3-clause
| 1,852 | 4 | 16 | 510 | 799 | 422 | 377 | 42 | 4 |
-- |
-- Module : Data.Git.Storage.PackIndex
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unix
--
{-# LANGUAGE OverloadedStrings, BangPatterns #-}
module Data.Git.Storage.PackIndex
( PackIndexHeader(..)
, PackIndex(..)
-- * handles and enumeration
, packIndexOpen
, packIndexClose
, withPackIndex
, packIndexEnumerate
-- * read from packIndex
, packIndexHeaderGetNbWithPrefix
, packIndexGetReferenceLocation
, packIndexGetReferencesWithPrefix
, packIndexReadHeader
, packIndexRead
, packIndexGetHeader
) where
import Filesystem
import Filesystem.Path
import qualified Filesystem.Path.Rules as Rules
import Data.List
import Data.Bits
import Data.Word
import Data.String
import Data.Vector (Vector, (!))
import qualified Data.Vector as V
import Data.Git.Internal
import Data.Git.Imports
import Data.Git.Storage.FileReader
import Data.Git.Path
import Data.Git.Ref
import qualified Data.Git.Parser as P
import Prelude hiding (FilePath)
-- | represent an packIndex header with the version and the fanout table
data PackIndexHeader = PackIndexHeader !Word32 !(Vector Word32)
deriving (Show,Eq)
data PackIndex = PackIndex
{ packIndexSha1s :: Vector Ref
, packIndexCRCs :: Vector Word32
, packIndexPackoffs :: Vector Word32
, packIndexPackChecksum :: Ref
, packIndexChecksum :: Ref
}
-- | enumerate every indexes file in the pack directory
packIndexEnumerate repoPath = map onlyHash . filter isPackFile . map (Rules.encodeString Rules.posix . filename) <$> listDirectory (repoPath </> "objects" </> "pack")
where
isPackFile :: String -> Bool
isPackFile x = ".idx" `isSuffixOf` x && "pack-" `isPrefixOf` x
onlyHash = fromHexString . takebut 4 . drop 5
takebut n l = take (length l - n) l
-- | open an index
packIndexOpen :: FilePath -> Ref -> IO FileReader
packIndexOpen repoPath indexRef = openFile (indexPath repoPath indexRef) ReadMode >>= fileReaderNew False
-- | close an index
packIndexClose :: FileReader -> IO ()
packIndexClose = fileReaderClose
-- | variant of withFile on the index file and with a FileReader
withPackIndex repoPath indexRef = withFileReader (indexPath repoPath indexRef)
-- | returns the number of references, referenced in this index.
packIndexHeaderGetSize :: PackIndexHeader -> Word32
packIndexHeaderGetSize (PackIndexHeader _ indexes) = indexes ! 255
-- | byte size of an packIndex header.
packIndexHeaderByteSize :: Int
packIndexHeaderByteSize = 2*4 {- header -} + 256*4 {- fanout table -}
-- | get the number of reference in this index with a specific prefix
packIndexHeaderGetNbWithPrefix :: PackIndexHeader -> Int -> Word32
packIndexHeaderGetNbWithPrefix (PackIndexHeader _ indexes) n
| n < 0 || n > 255 = 0
| n == 0 = indexes ! 0
| otherwise = (indexes ! n) - (indexes ! (n-1))
-- | fold on refs with a specific prefix
packIndexHeaderFoldRef :: PackIndexHeader -> FileReader -> Int -> (a -> Word32 -> Ref -> (a, Bool)) -> a -> IO a
packIndexHeaderFoldRef idxHdr@(PackIndexHeader _ indexes) fr refprefix f initAcc
| nb == 0 = return initAcc
| otherwise = do
let spos = (indexes ! refprefix) - nb
fileReaderSeek fr (fromIntegral (sha1Offset + spos * 20))
loop nb initAcc
where
loop 0 acc = return acc
loop n acc = do
b <- fromBinary <$> fileReaderGetBS 20 fr
let (!nacc, terminate) = f acc (nb-n) b
if terminate
then return nacc
else loop (n-1) nacc
nb = packIndexHeaderGetNbWithPrefix idxHdr refprefix
(sha1Offset,_,_) = packIndexOffsets idxHdr
-- | return the reference offset in the packfile if found
packIndexGetReferenceLocation :: PackIndexHeader -> FileReader -> Ref -> IO (Maybe Word64)
packIndexGetReferenceLocation idxHdr@(PackIndexHeader _ indexes) fr ref = do
mrpos <- packIndexHeaderFoldRef idxHdr fr refprefix f Nothing
case mrpos of
Nothing -> return Nothing
Just rpos -> do
let spos = (indexes ! refprefix) - nb
fileReaderSeek fr (fromIntegral (packOffset + 4 * (spos+rpos)))
Just . fromIntegral . be32 <$> fileReaderGetBS 4 fr
where
f acc rpos rref = if ref == rref then (Just rpos,True) else (acc,False)
refprefix = refPrefix ref
nb = packIndexHeaderGetNbWithPrefix idxHdr refprefix
(_,_,packOffset) = packIndexOffsets idxHdr
-- | get all references that start by prefix.
packIndexGetReferencesWithPrefix :: PackIndexHeader -> FileReader -> String -> IO [Ref]
packIndexGetReferencesWithPrefix idxHdr fr prefix =
packIndexHeaderFoldRef idxHdr fr refprefix f []
where
f acc _ ref = case cmpPrefix prefix ref of
GT -> (acc ,False)
EQ -> (ref:acc,False)
LT -> (acc ,True)
refprefix = read ("0x" ++ take 2 prefix)
-- | returns absolute offset in the index file of the sha1s, the crcs and the packfiles offset.
packIndexOffsets idx = (packIndexSha1sOffset, packIndexCRCsOffset, packIndexPackOffOffset)
where
packIndexPackOffOffset = packIndexCRCsOffset + crcsTableSz
packIndexCRCsOffset = packIndexSha1sOffset + sha1TableSz
packIndexSha1sOffset = fromIntegral packIndexHeaderByteSize
crcsTableSz = 4 * sz
sha1TableSz = 20 * sz
sz = packIndexHeaderGetSize idx
-- | parse index header
parsePackIndexHeader = do
magic <- P.word32
when (magic /= 0xff744f63) $ error "wrong magic number for packIndex"
ver <- P.word32
when (ver /= 2) $ error "unsupported packIndex version"
fanouts <- V.replicateM 256 P.word32
return $ PackIndexHeader ver fanouts
-- | read index header from an index filereader
packIndexReadHeader :: FileReader -> IO PackIndexHeader
packIndexReadHeader fr = fileReaderSeek fr 0 >> fileReaderParse fr parsePackIndexHeader
-- | get index header from an index reference
packIndexGetHeader :: FilePath -> Ref -> IO PackIndexHeader
packIndexGetHeader repoPath indexRef = withPackIndex repoPath indexRef $ packIndexReadHeader
-- | read all index
packIndexRead repoPath indexRef = do
withPackIndex repoPath indexRef $ \fr -> do
idx <- fileReaderParse fr parsePackIndexHeader
liftM2 (,) (return idx) (fileReaderParse fr (parsePackIndex $ packIndexHeaderGetSize idx))
where parsePackIndex sz = do
sha1s <- V.replicateM (fromIntegral sz) P.referenceBin
crcs <- V.replicateM (fromIntegral sz) P.word32
packoffs <- V.replicateM (fromIntegral sz) P.word32
let nbLarge = length $ filter (== True) $ map (\packoff -> packoff `testBit` 31) $ V.toList packoffs
largeoffs <- replicateM nbLarge (P.takeBytes 4)
packfileChecksum <- P.referenceBin
idxfileChecksum <- P.referenceBin
-- large packfile offsets
-- trailer
return (sha1s, crcs, packoffs, largeoffs, packfileChecksum, idxfileChecksum)
|
NicolasDP/hit
|
Data/Git/Storage/PackIndex.hs
|
bsd-3-clause
| 7,735 | 0 | 18 | 2,246 | 1,788 | 937 | 851 | 129 | 3 |
-- |
-- Support for source code annotation feature of GHC. That is the ANN pragma.
--
-- (c) The University of Glasgow 2006
-- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
--
{-# LANGUAGE DeriveFunctor #-}
module Annotations (
-- * Main Annotation data types
Annotation(..), AnnPayload,
AnnTarget(..), CoreAnnTarget,
getAnnTargetName_maybe,
-- * AnnEnv for collecting and querying Annotations
AnnEnv,
mkAnnEnv, extendAnnEnvList, plusAnnEnv, emptyAnnEnv,
findAnns, findAnnsByTypeRep,
deserializeAnns
) where
import GhcPrelude
import Binary
import Module ( Module
, ModuleEnv, emptyModuleEnv, extendModuleEnvWith
, plusModuleEnv_C, lookupWithDefaultModuleEnv
, mapModuleEnv )
import NameEnv
import Name
import Outputable
import GHC.Serialized
import Control.Monad
import Data.Maybe
import Data.Typeable
import Data.Word ( Word8 )
-- | Represents an annotation after it has been sufficiently desugared from
-- it's initial form of 'HsDecls.AnnDecl'
data Annotation = Annotation {
ann_target :: CoreAnnTarget, -- ^ The target of the annotation
ann_value :: AnnPayload
}
type AnnPayload = Serialized -- ^ The "payload" of an annotation
-- allows recovery of its value at a given type,
-- and can be persisted to an interface file
-- | An annotation target
data AnnTarget name
= NamedTarget name -- ^ We are annotating something with a name:
-- a type or identifier
| ModuleTarget Module -- ^ We are annotating a particular module
deriving (Functor)
-- | The kind of annotation target found in the middle end of the compiler
type CoreAnnTarget = AnnTarget Name
-- | Get the 'name' of an annotation target if it exists.
getAnnTargetName_maybe :: AnnTarget name -> Maybe name
getAnnTargetName_maybe (NamedTarget nm) = Just nm
getAnnTargetName_maybe _ = Nothing
instance Outputable name => Outputable (AnnTarget name) where
ppr (NamedTarget nm) = text "Named target" <+> ppr nm
ppr (ModuleTarget mod) = text "Module target" <+> ppr mod
instance Binary name => Binary (AnnTarget name) where
put_ bh (NamedTarget a) = do
putByte bh 0
put_ bh a
put_ bh (ModuleTarget a) = do
putByte bh 1
put_ bh a
get bh = do
h <- getByte bh
case h of
0 -> liftM NamedTarget $ get bh
_ -> liftM ModuleTarget $ get bh
instance Outputable Annotation where
ppr ann = ppr (ann_target ann)
-- | A collection of annotations
data AnnEnv = MkAnnEnv { ann_mod_env :: !(ModuleEnv [AnnPayload])
, ann_name_env :: !(NameEnv [AnnPayload])
}
-- | An empty annotation environment.
emptyAnnEnv :: AnnEnv
emptyAnnEnv = MkAnnEnv emptyModuleEnv emptyNameEnv
-- | Construct a new annotation environment that contains the list of
-- annotations provided.
mkAnnEnv :: [Annotation] -> AnnEnv
mkAnnEnv = extendAnnEnvList emptyAnnEnv
-- | Add the given annotation to the environment.
extendAnnEnvList :: AnnEnv -> [Annotation] -> AnnEnv
extendAnnEnvList env =
foldl' extendAnnEnv env
extendAnnEnv :: AnnEnv -> Annotation -> AnnEnv
extendAnnEnv (MkAnnEnv mod_env name_env) (Annotation tgt payload) =
case tgt of
NamedTarget name -> MkAnnEnv mod_env (extendNameEnv_C (++) name_env name [payload])
ModuleTarget mod -> MkAnnEnv (extendModuleEnvWith (++) mod_env mod [payload]) name_env
-- | Union two annotation environments.
plusAnnEnv :: AnnEnv -> AnnEnv -> AnnEnv
plusAnnEnv a b =
MkAnnEnv { ann_mod_env = plusModuleEnv_C (++) (ann_mod_env a) (ann_mod_env b)
, ann_name_env = plusNameEnv_C (++) (ann_name_env a) (ann_name_env b)
}
-- | Find the annotations attached to the given target as 'Typeable'
-- values of your choice. If no deserializer is specified,
-- only transient annotations will be returned.
findAnns :: Typeable a => ([Word8] -> a) -> AnnEnv -> CoreAnnTarget -> [a]
findAnns deserialize env
= mapMaybe (fromSerialized deserialize) . findAnnPayloads env
-- | Find the annotations attached to the given target as 'Typeable'
-- values of your choice. If no deserializer is specified,
-- only transient annotations will be returned.
findAnnsByTypeRep :: AnnEnv -> CoreAnnTarget -> TypeRep -> [[Word8]]
findAnnsByTypeRep env target tyrep
= [ ws | Serialized tyrep' ws <- findAnnPayloads env target
, tyrep' == tyrep ]
-- | Find payloads for the given 'CoreAnnTarget' in an 'AnnEnv'.
findAnnPayloads :: AnnEnv -> CoreAnnTarget -> [AnnPayload]
findAnnPayloads env target =
case target of
ModuleTarget mod -> lookupWithDefaultModuleEnv (ann_mod_env env) [] mod
NamedTarget name -> fromMaybe [] $ lookupNameEnv (ann_name_env env) name
-- | Deserialize all annotations of a given type. This happens lazily, that is
-- no deserialization will take place until the [a] is actually demanded and
-- the [a] can also be empty (the UniqFM is not filtered).
deserializeAnns :: Typeable a => ([Word8] -> a) -> AnnEnv -> (ModuleEnv [a], NameEnv [a])
deserializeAnns deserialize env
= ( mapModuleEnv deserAnns (ann_mod_env env)
, mapNameEnv deserAnns (ann_name_env env)
)
where deserAnns = mapMaybe (fromSerialized deserialize)
|
sdiehl/ghc
|
compiler/main/Annotations.hs
|
bsd-3-clause
| 5,459 | 0 | 12 | 1,316 | 1,096 | 591 | 505 | 91 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Dampf.Nginx.Types
( Server(..)
, ServerDecl(..)
, pShowServer
, pShowFakeServer
) where
import Data.Text (Text)
import qualified Data.Text as T
import Text.PrettyPrint
newtype Server = Server [ServerDecl]
addDecl :: ServerDecl -> Server -> Server
addDecl d (Server ds) = Server (d : ds)
data ServerDecl
= Listen Int [String]
| ServerName [Text]
| Location Text [(Text, Text)]
| Include FilePath
| SSLCertificate FilePath
| SSLCertificateKey FilePath
| SSLTrustedCertificate FilePath
pShowServer :: Server -> String
pShowServer = render . pprServer
pShowFakeServer :: Server -> String
pShowFakeServer = render . addMoreThings
where addMoreThings doc =
text "events" <+> lbrace
$+$ nest 4 (text "worker_connections 512;")
$+$ rbrace
$+$ text "http" <+> lbrace
$+$ nest 4 (pprServer doc)
$+$ rbrace
pprServer :: Server -> Doc
pprServer (Server ds) =
text "server" <+> lbrace
$+$ nest 4 (vcat $ fmap pprServerDecl ds)
$+$ rbrace
pprServerDecl :: ServerDecl -> Doc
pprServerDecl (Listen p ss) = text "listen"
<+> int p <+> vcat (fmap text ss) <> semi
pprServerDecl (ServerName ns) = text "server_name"
<+> hsep (fmap (text . T.unpack) ns) <> semi
pprServerDecl (Location p kvs) = text "location"
<+> text (T.unpack p) <+> lbrace
$+$ nest 4 (vcat (fmap ppMap kvs))
$+$ rbrace
pprServerDecl (Include p) = text "include"
<+> text p <> semi
pprServerDecl (SSLCertificate p) = text "ssl_certificate"
<+> text p <> semi
pprServerDecl (SSLTrustedCertificate p) = text "ssl_trusted_certificate"
<+> text p <> semi
pprServerDecl (SSLCertificateKey p) = text "ssl_certificate_key"
<+> text p <> semi
ppMap :: (Text, Text) -> Doc
ppMap (k, v) = text (T.unpack k) <+> text (T.unpack v) <> semi
|
filopodia/open
|
dampf/lib/Dampf/Nginx/Types.hs
|
mit
| 1,957 | 0 | 15 | 504 | 653 | 338 | 315 | 55 | 1 |
{-# LANGUAGE TypeApplications, DeriveGeneric #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE DataKinds, FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE StandaloneDeriving #-}
module Database.Persist.TH.SharedPrimaryKeySpec where
import TemplateTestImports
import Data.Time
import Data.Proxy
import Test.Hspec
import Database.Persist
import Database.Persist.EntityDef
import Database.Persist.Sql
import Database.Persist.Sql.Util
import Database.Persist.TH
share [ mkPersist sqlSettings ] [persistLowerCase|
User
name String
Profile
Id UserId
email String
Profile2
Id (Key User)
email String
DayKeyTable
Id Day
name Text
RefDayKey
dayKey DayKeyTableId
|]
spec :: Spec
spec = describe "Shared Primary Keys" $ do
let
getSqlType :: PersistEntity a => Proxy a -> SqlType
getSqlType p =
case getEntityId (entityDef p) of
EntityIdField fd ->
fieldSqlType fd
_ ->
SqlOther "Composite Key"
keyProxy :: Proxy a -> Proxy (Key a)
keyProxy _ = Proxy
sqlTypeEquivalent
:: (PersistFieldSql (Key a), PersistEntity a)
=> Proxy a
-> Expectation
sqlTypeEquivalent proxy =
sqlType (keyProxy proxy) `shouldBe` getSqlType proxy
testSqlTypeEquivalent
:: (PersistFieldSql (Key a), PersistEntity a)
=> Proxy a
-> Spec
testSqlTypeEquivalent prxy =
it "has equivalent SqlType from sqlType and entityId" $
sqlTypeEquivalent prxy
describe "PersistFieldSql" $ do
it "should match underlying key" $ do
sqlType (Proxy @UserId)
`shouldBe`
sqlType (Proxy @ProfileId)
describe "User" $ do
it "has default ID key, SqlInt64" $ do
sqlType (Proxy @UserId)
`shouldBe`
SqlInt64
testSqlTypeEquivalent (Proxy @User)
describe "Profile" $ do
it "has same ID key type as User" $ do
sqlType (Proxy @ProfileId)
`shouldBe`
sqlType (Proxy @UserId)
testSqlTypeEquivalent(Proxy @Profile)
describe "Profile2" $ do
it "has same ID key type as User" $ do
sqlType (Proxy @Profile2Id)
`shouldBe`
sqlType (Proxy @UserId)
testSqlTypeEquivalent (Proxy @Profile2)
describe "getEntityId FieldDef" $ do
it "should match underlying primary key" $ do
getSqlType (Proxy @User)
`shouldBe`
getSqlType (Proxy @Profile)
describe "DayKeyTable" $ do
testSqlTypeEquivalent (Proxy @DayKeyTable)
it "sqlType has Day type" $ do
sqlType (Proxy @Day)
`shouldBe`
sqlType (Proxy @DayKeyTableId)
it "getSqlType has Day type" $ do
sqlType (Proxy @Day)
`shouldBe`
getSqlType (Proxy @DayKeyTable)
describe "RefDayKey" $ do
let
[dayKeyField] =
getEntityFields (entityDef (Proxy @RefDayKey))
testSqlTypeEquivalent (Proxy @RefDayKey)
it "has same sqltype as underlying" $ do
fieldSqlType dayKeyField
`shouldBe`
sqlType (Proxy @Day)
it "has the right fieldType" $ do
fieldType dayKeyField
`shouldBe`
FTTypeCon Nothing "DayKeyTableId"
it "has the right type" $ do
let
_ =
refDayKeyDayKey
:: RefDayKey -> DayKeyTableId
_ =
RefDayKeyDayKey
:: EntityField RefDayKey DayKeyTableId
True `shouldBe` True
it "has a foreign ref" $ do
case fieldReference dayKeyField of
ForeignRef refName -> do
refName `shouldBe` EntityNameHS "DayKeyTable"
other ->
fail $ "expected foreign ref, got: " <> show other
|
yesodweb/persistent
|
persistent/test/Database/Persist/TH/SharedPrimaryKeySpec.hs
|
mit
| 4,478 | 0 | 20 | 1,628 | 921 | 443 | 478 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.MachineLearning.CreateRealtimeEndpoint
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates a real-time endpoint for the 'MLModel'. The endpoint contains the URI
-- of the 'MLModel'; that is, the location to send real-time prediction requests
-- for the specified 'MLModel'.
--
-- <http://http://docs.aws.amazon.com/machine-learning/latest/APIReference/API_CreateRealtimeEndpoint.html>
module Network.AWS.MachineLearning.CreateRealtimeEndpoint
(
-- * Request
CreateRealtimeEndpoint
-- ** Request constructor
, createRealtimeEndpoint
-- ** Request lenses
, creMLModelId
-- * Response
, CreateRealtimeEndpointResponse
-- ** Response constructor
, createRealtimeEndpointResponse
-- ** Response lenses
, crerMLModelId
, crerRealtimeEndpointInfo
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.MachineLearning.Types
import qualified GHC.Exts
newtype CreateRealtimeEndpoint = CreateRealtimeEndpoint
{ _creMLModelId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'CreateRealtimeEndpoint' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'creMLModelId' @::@ 'Text'
--
createRealtimeEndpoint :: Text -- ^ 'creMLModelId'
-> CreateRealtimeEndpoint
createRealtimeEndpoint p1 = CreateRealtimeEndpoint
{ _creMLModelId = p1
}
-- | The ID assigned to the 'MLModel' during creation.
creMLModelId :: Lens' CreateRealtimeEndpoint Text
creMLModelId = lens _creMLModelId (\s a -> s { _creMLModelId = a })
data CreateRealtimeEndpointResponse = CreateRealtimeEndpointResponse
{ _crerMLModelId :: Maybe Text
, _crerRealtimeEndpointInfo :: Maybe RealtimeEndpointInfo
} deriving (Eq, Read, Show)
-- | 'CreateRealtimeEndpointResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'crerMLModelId' @::@ 'Maybe' 'Text'
--
-- * 'crerRealtimeEndpointInfo' @::@ 'Maybe' 'RealtimeEndpointInfo'
--
createRealtimeEndpointResponse :: CreateRealtimeEndpointResponse
createRealtimeEndpointResponse = CreateRealtimeEndpointResponse
{ _crerMLModelId = Nothing
, _crerRealtimeEndpointInfo = Nothing
}
-- | A user-supplied ID that uniquely identifies the 'MLModel'. This value should be
-- identical to the value of the 'MLModelId' in the request.
crerMLModelId :: Lens' CreateRealtimeEndpointResponse (Maybe Text)
crerMLModelId = lens _crerMLModelId (\s a -> s { _crerMLModelId = a })
-- | The endpoint information of the 'MLModel'
crerRealtimeEndpointInfo :: Lens' CreateRealtimeEndpointResponse (Maybe RealtimeEndpointInfo)
crerRealtimeEndpointInfo =
lens _crerRealtimeEndpointInfo
(\s a -> s { _crerRealtimeEndpointInfo = a })
instance ToPath CreateRealtimeEndpoint where
toPath = const "/"
instance ToQuery CreateRealtimeEndpoint where
toQuery = const mempty
instance ToHeaders CreateRealtimeEndpoint
instance ToJSON CreateRealtimeEndpoint where
toJSON CreateRealtimeEndpoint{..} = object
[ "MLModelId" .= _creMLModelId
]
instance AWSRequest CreateRealtimeEndpoint where
type Sv CreateRealtimeEndpoint = MachineLearning
type Rs CreateRealtimeEndpoint = CreateRealtimeEndpointResponse
request = post "CreateRealtimeEndpoint"
response = jsonResponse
instance FromJSON CreateRealtimeEndpointResponse where
parseJSON = withObject "CreateRealtimeEndpointResponse" $ \o -> CreateRealtimeEndpointResponse
<$> o .:? "MLModelId"
<*> o .:? "RealtimeEndpointInfo"
|
romanb/amazonka
|
amazonka-ml/gen/Network/AWS/MachineLearning/CreateRealtimeEndpoint.hs
|
mpl-2.0
| 4,587 | 0 | 11 | 900 | 527 | 318 | 209 | 64 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
--------------------------------------------------------------------
-- |
-- Module : System.Cron.Parser
-- Description : Attoparsec parser for cron formatted intervals
-- Copyright : (c) Michael Xavier 2012
-- License : MIT
--
-- Maintainer: Michael Xavier <[email protected]>
-- Portability: portable
--
-- Attoparsec parser combinator for cron schedules. See cron documentation for
-- how those are formatted.
--
-- > import Data.Attoparsec.Text (parseOnly)
-- > import System.Cron.Parser
-- >
-- > main :: IO ()
-- > main = do
-- > print $ parseOnly cronSchedule "*/2 * 3 * 4,5,6"
--
--------------------------------------------------------------------
module System.Cron.Parser (cronSchedule,
cronScheduleLoose,
crontab,
crontabEntry) where
import System.Cron
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative (pure, (*>), (<$>), (<*), (<*>), (<|>))
#else
import Control.Applicative ((<$>), (<|>))
#endif
import Data.Char (isSpace)
import Data.Attoparsec.Text (Parser)
import qualified Data.Attoparsec.Text as A
import Data.Text (Text)
-- | Attoparsec Parser for a cron schedule. Complies fully with the standard
-- cron format. Also includes the following shorthand formats which cron also
-- supports: \@yearly, \@monthly, \@weekly, \@daily, \@hourly. Note that this
-- parser will fail if there is extraneous input. This is to prevent things
-- like extra fields. If you want a more lax parser, use 'cronScheduleLoose',
-- which is fine with extra input.
cronSchedule :: Parser CronSchedule
cronSchedule = cronScheduleLoose <* A.endOfInput
-- | Same as 'cronSchedule' but does not fail on extraneous input.
cronScheduleLoose :: Parser CronSchedule
cronScheduleLoose = yearlyP <|>
monthlyP <|>
weeklyP <|>
dailyP <|>
hourlyP <|>
classicP
-- | Parses a full crontab file, omitting comments and including environment
-- variable sets (e.g FOO=BAR).
crontab :: Parser Crontab
crontab = Crontab <$> A.sepBy lineP (A.char '\n')
where lineP = A.skipMany commentP *> crontabEntry
commentP = A.skipSpace *> A.char '#' *> skipToEOL
-- | Parses an individual crontab line, which is either a scheduled command or
-- an environmental variable set.
crontabEntry :: Parser CrontabEntry
crontabEntry = A.skipSpace *> parser
where parser = envVariableP <|>
commandEntryP
envVariableP = do var <- A.takeWhile1 (A.notInClass " =")
A.skipSpace
_ <- A.char '='
A.skipSpace
val <- A.takeWhile1 $ not . isSpace
A.skipWhile (\c -> c == ' ' || c == '\t')
return $ EnvVariable var val
commandEntryP = CommandEntry <$> cronScheduleLoose
<*> (A.skipSpace *> takeToEOL)
---- Internals
takeToEOL :: Parser Text
takeToEOL = A.takeTill (== '\n') -- <* A.skip (== '\n')
skipToEOL :: Parser ()
skipToEOL = A.skipWhile (/= '\n')
classicP :: Parser CronSchedule
classicP = CronSchedule <$> (minutesP <* space)
<*> (hoursP <* space)
<*> (dayOfMonthP <* space)
<*> (monthP <* space)
<*> dayOfWeekP
where space = A.char ' '
cronFieldP :: Parser CronField
cronFieldP = steppedP <|>
rangeP <|>
listP <|>
starP <|>
specificP
where starP = A.char '*' *> pure Star
rangeP = do start <- parseInt
_ <- A.char '-'
end <- parseInt
if start <= end
then return $ RangeField start end
else rangeInvalid
rangeInvalid = fail "start of range must be less than or equal to end"
-- Must avoid infinitely recursive parsers
listP = reduceList <$> A.sepBy1 listableP (A.char ',')
listableP = starP <|>
rangeP <|>
steppedP <|>
specificP
stepListP = ListField <$> A.sepBy1 stepListableP (A.char ',')
stepListableP = starP <|>
rangeP
steppedP = StepField <$> steppableP <*> (A.char '/' *> parseInt)
steppableP = starP <|>
rangeP <|>
stepListP <|>
specificP
specificP = SpecificField <$> parseInt
yearlyP :: Parser CronSchedule
yearlyP = A.string "@yearly" *> pure yearly
monthlyP :: Parser CronSchedule
monthlyP = A.string "@monthly" *> pure monthly
weeklyP :: Parser CronSchedule
weeklyP = A.string "@weekly" *> pure weekly
dailyP :: Parser CronSchedule
dailyP = A.string "@daily" *> pure daily
hourlyP :: Parser CronSchedule
hourlyP = A.string "@hourly" *> pure hourly
--TODO: must handle a combination of many of these. EITHER just *, OR a list of
minutesP :: Parser MinuteSpec
minutesP = Minutes <$> cronFieldP
hoursP :: Parser HourSpec
hoursP = Hours <$> cronFieldP
dayOfMonthP :: Parser DayOfMonthSpec
dayOfMonthP = DaysOfMonth <$> cronFieldP
monthP :: Parser MonthSpec
monthP = Months <$> cronFieldP
dayOfWeekP :: Parser DayOfWeekSpec
dayOfWeekP = DaysOfWeek <$> cronFieldP
parseInt :: Parser Int
parseInt = A.decimal
reduceList :: [CronField] -> CronField
reduceList [] = ListField [] -- this should not happen
reduceList [x] = x
reduceList xs = ListField xs
|
proger/cron
|
src/System/Cron/Parser.hs
|
bsd-2-clause
| 5,876 | 0 | 14 | 1,886 | 1,048 | 569 | 479 | 103 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Ef.Type.Set where
import Ef.Type.Bool
import GHC.TypeLits
type family xs ∪ ys where
'[] ∪ ys = ys
'[x] ∪ (x ': xs) = x ': xs
'[x] ∪ (y ': xs) = y ': '[x] ∪ xs
(x ': xs) ∪ ys = '[x] ∪ (xs ∪ ys)
type family (x :: k) ∈ (xs :: [k]) :: Bool where
x ∈ '[] = 'False
x ∈ (x ': xs) = 'True
x ∈ (y ': xs) = x ∈ xs
type family (x :: k) ≠ (y :: k) :: Bool where
x ≠ x = 'False
x ≠ y = 'True
type family (x :: k) ∉ (ys :: [k]) :: Bool where
x ∉ '[] = 'True
x ∉ (x ': ys) = 'False
x ∉ (y ': ys) = x ∉ ys
type family (xs :: [k]) ⊆ (ys :: [k]) :: Bool where
'[] ⊆ ys = 'True
(x ': xs) ⊆ ys = And (x ∈ ys) (xs ⊆ ys)
|
grumply/mop
|
src/Ef/Type/Set.hs
|
bsd-3-clause
| 934 | 0 | 9 | 244 | 458 | 262 | 196 | 29 | 0 |
-- |
--
-- Functions in this module return well-formed 'Encoding''.
-- Polymorphic variants, which return @'Encoding' a@, return a textual JSON
-- value, so it can be used as both @'Encoding'' 'Text'@ and @'Encoding' = 'Encoding'' 'Value'@.
module Data.Aeson.Encoding
(
-- * Encoding
Encoding
, Encoding'
, encodingToLazyByteString
, fromEncoding
, unsafeToEncoding
, Series
, pairs
, pair
-- * Predicates
, nullEncoding
-- * Encoding constructors
, emptyArray_
, emptyObject_
, text
, lazyText
, string
, list
, dict
, null_
, bool
-- ** Decimal numbers
, int8, int16, int32, int64, int
, word8, word16, word32, word64, word
, integer, float, double, scientific
-- ** Decimal numbers as Text
, int8Text, int16Text, int32Text, int64Text, intText
, word8Text, word16Text, word32Text, word64Text, wordText
, integerText, floatText, doubleText, scientificText
-- ** Time
, day
, localTime
, utcTime
, timeOfDay
, zonedTime
-- ** value
, value
) where
import Prelude ()
import Data.Aeson.Encoding.Internal
|
tolysz/prepare-ghcjs
|
spec-lts8/aeson/Data/Aeson/Encoding.hs
|
bsd-3-clause
| 1,165 | 0 | 4 | 318 | 190 | 131 | 59 | 34 | 0 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
module Distribution.Types.PkgconfigName
( PkgconfigName, unPkgconfigName, mkPkgconfigName
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Utils.ShortText
import qualified Text.PrettyPrint as Disp
import Distribution.Compat.ReadP
import Distribution.Text
-- | A pkg-config library name
--
-- This is parsed as any valid argument to the pkg-config utility.
--
-- @since 2.0
newtype PkgconfigName = PkgconfigName ShortText
deriving (Generic, Read, Show, Eq, Ord, Typeable, Data)
-- | Convert 'PkgconfigName' to 'String'
--
-- @since 2.0
unPkgconfigName :: PkgconfigName -> String
unPkgconfigName (PkgconfigName s) = fromShortText s
-- | Construct a 'PkgconfigName' from a 'String'
--
-- 'mkPkgconfigName' is the inverse to 'unPkgconfigName'
--
-- Note: No validations are performed to ensure that the resulting
-- 'PkgconfigName' is valid
--
-- @since 2.0
mkPkgconfigName :: String -> PkgconfigName
mkPkgconfigName = PkgconfigName . toShortText
-- | 'mkPkgconfigName'
--
-- @since 2.0
instance IsString PkgconfigName where
fromString = mkPkgconfigName
instance Binary PkgconfigName
-- pkg-config allows versions and other letters in package names, eg
-- "gtk+-2.0" is a valid pkg-config package _name_. It then has a package
-- version number like 2.10.13
instance Text PkgconfigName where
disp = Disp.text . unPkgconfigName
parse = mkPkgconfigName
<$> munch1 (\c -> isAlphaNum c || c `elem` "+-._")
instance NFData PkgconfigName where
rnf (PkgconfigName pkg) = rnf pkg
|
themoritz/cabal
|
Cabal/Distribution/Types/PkgconfigName.hs
|
bsd-3-clause
| 1,651 | 0 | 12 | 257 | 260 | 156 | 104 | 26 | 1 |
module PackageTests.BuildDeps.TargetSpecificDeps3.Check where
import Test.HUnit
import PackageTests.PackageTester
import System.FilePath
import Data.List
suite :: Test
suite = TestCase $ do
let spec = PackageSpec ("PackageTests" </> "BuildDeps" </> "TargetSpecificDeps3") []
result <- cabal_build spec
assertEqual "cabal build should fail - see test-log.txt" False (successful result)
assertBool "error should be in lemon.hs" $
"lemon.hs:" `isInfixOf` outputText result
assertBool "error should be \"Could not find module `System.Time\"" $
"Could not find module `System.Time'" `isInfixOf` (intercalate " " $ lines $ outputText result)
|
IreneKnapp/Faction
|
libfaction/tests/PackageTests/BuildDeps/TargetSpecificDeps3/Check.hs
|
bsd-3-clause
| 675 | 0 | 14 | 118 | 147 | 75 | 72 | 14 | 1 |
-- | Windows 7 Taskbar Progress plugin: <http://nsis.sourceforge.net/TaskbarProgress_plug-in>
module Development.NSIS.Plugins.Taskbar(taskbar) where
import Development.NSIS
-- | Enable Windows 7 taskbar plugin, called anywhere.
taskbar :: Action ()
taskbar = onPageShow InstFiles $ plugin "w7tbp" "Start" []
|
ndmitchell/nsis
|
src/Development/NSIS/Plugins/Taskbar.hs
|
bsd-3-clause
| 312 | 0 | 7 | 38 | 51 | 29 | 22 | 4 | 1 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Network/Wai/Handler/Warp/HTTP2.hs" #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Network.Wai.Handler.Warp.HTTP2 (isHTTP2, http2) where
import Control.Concurrent (forkIO, killThread)
import qualified Control.Exception as E
import Control.Monad (when, unless, replicateM_)
import Data.ByteString (ByteString)
import Network.HTTP2
import Network.Socket (SockAddr)
import Network.Wai
import Network.Wai.Handler.Warp.HTTP2.EncodeFrame
import Network.Wai.Handler.Warp.HTTP2.Manager
import Network.Wai.Handler.Warp.HTTP2.Receiver
import Network.Wai.Handler.Warp.HTTP2.Request
import Network.Wai.Handler.Warp.HTTP2.Sender
import Network.Wai.Handler.Warp.HTTP2.Types
import Network.Wai.Handler.Warp.HTTP2.Worker
import qualified Network.Wai.Handler.Warp.Settings as S (Settings)
import Network.Wai.Handler.Warp.Types
----------------------------------------------------------------
http2 :: Connection -> InternalInfo1 -> SockAddr -> Transport -> S.Settings -> (BufSize -> IO ByteString) -> Application -> IO ()
http2 conn ii1 addr transport settings readN app = do
checkTLS
ok <- checkPreface
when ok $ do
ctx <- newContext
-- Workers, worker manager and timer manager
mgr <- start settings
let responder = response settings ctx mgr
action = worker ctx settings app responder
setAction mgr action
-- The number of workers is 3.
-- This was carefully chosen based on a lot of benchmarks.
-- If it is 1, we cannot avoid head-of-line blocking.
-- If it is large, huge memory is consumed and many
-- context switches happen.
replicateM_ 3 $ spawnAction mgr
-- Receiver
let mkreq = mkRequest ii1 settings addr
tid <- forkIO $ frameReceiver ctx mkreq readN
-- Sender
-- frameSender is the main thread because it ensures to send
-- a goway frame.
frameSender ctx conn settings mgr `E.finally` do
clearContext ctx
stop mgr
killThread tid
where
checkTLS = case transport of
TCP -> return () -- direct
tls -> unless (tls12orLater tls) $ goaway conn InadequateSecurity "Weak TLS"
tls12orLater tls = tlsMajorVersion tls == 3 && tlsMinorVersion tls >= 3
checkPreface = do
preface <- readN connectionPrefaceLength
if connectionPreface /= preface then do
goaway conn ProtocolError "Preface mismatch"
return False
else
return True
-- connClose must not be called here since Run:fork calls it
goaway :: Connection -> ErrorCodeId -> ByteString -> IO ()
goaway Connection{..} etype debugmsg = connSendAll bytestream
where
bytestream = goawayFrame 0 etype debugmsg
|
phischu/fragnix
|
tests/packages/scotty/Network.Wai.Handler.Warp.HTTP2.hs
|
bsd-3-clause
| 2,816 | 0 | 13 | 634 | 598 | 326 | 272 | 51 | 3 |
module Feature.QuerySpec where
import Test.Hspec
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Network.Wai.Test (SResponse(simpleHeaders))
import SpecHelper
spec :: Spec
spec =
beforeAll (clearTable "items" >> createItems 15)
. beforeAll (clearTable "nullable_integer" >> createNullInteger)
. beforeAll (
clearTable "no_pk" >>
createNulls 2 >>
createLikableStrings >>
createJsonData)
. afterAll_ (clearTable "items" >> clearTable "no_pk" >> clearTable "simple_pk")
. around withApp $ do
describe "Querying a table with a column called count" $
it "should not confuse count column with pg_catalog.count aggregate" $
get "/has_count_column" `shouldRespondWith` 200
describe "Querying a nonexistent table" $
it "causes a 404" $
get "/faketable" `shouldRespondWith` 404
describe "Filtering response" $ do
it "matches with equality" $
get "/items?id=eq.5"
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| [{"id":5}] |]
, matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-0/1"]
}
it "matches items IN" $
get "/items?id=in.1,3,5"
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| [{"id":1},{"id":3},{"id":5}] |]
, matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-2/3"]
}
it "matches nulls in varchar and numeric fields alike" $ do
get "/no_pk?a=is.null" `shouldRespondWith`
[json| [{"a": null, "b": null}] |]
get "/nullable_integer?a=is.null" `shouldRespondWith` "[{\"a\":null}]"
it "matches with like" $ do
get "/simple_pk?k=like.*yx" `shouldRespondWith`
"[{\"k\":\"xyyx\",\"extra\":\"u\"}]"
get "/simple_pk?k=like.xy*" `shouldRespondWith`
"[{\"k\":\"xyyx\",\"extra\":\"u\"}]"
get "/simple_pk?k=like.*YY*" `shouldRespondWith`
"[{\"k\":\"xYYx\",\"extra\":\"v\"}]"
it "matches with ilike" $ do
get "/simple_pk?k=ilike.xy*&order=extra.asc" `shouldRespondWith`
"[{\"k\":\"xyyx\",\"extra\":\"u\"},{\"k\":\"xYYx\",\"extra\":\"v\"}]"
get "/simple_pk?k=ilike.*YY*&order=extra.asc" `shouldRespondWith`
"[{\"k\":\"xyyx\",\"extra\":\"u\"},{\"k\":\"xYYx\",\"extra\":\"v\"}]"
it "matches with tsearch @@" $
get "/tsearch?text_search_vector=@@.foo" `shouldRespondWith`
"[{\"text_search_vector\":\"'bar':2 'foo':1\"}]"
it "matches with computed column" $
get "/items?always_true=eq.true" `shouldRespondWith`
"[{\"id\":1},{\"id\":2},{\"id\":3},{\"id\":4},{\"id\":5},{\"id\":6},{\"id\":7},{\"id\":8},{\"id\":9},{\"id\":10},{\"id\":11},{\"id\":12},{\"id\":13},{\"id\":14},{\"id\":15}]"
describe "ordering response" $ do
it "by a column asc" $
get "/items?id=lte.2&order=id.asc"
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| [{"id":1},{"id":2}] |]
, matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-1/2"]
}
it "by a column desc" $
get "/items?id=lte.2&order=id.desc"
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| [{"id":2},{"id":1}] |]
, matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-1/2"]
}
it "by a column asc with nulls last" $
get "/no_pk?order=a.asc.nullslast"
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| [{"a":"1","b":"0"},
{"a":"2","b":"0"},
{"a":null,"b":null}] |]
, matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-2/3"]
}
it "by a column desc with nulls first" $
get "/no_pk?order=a.desc.nullsfirst"
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| [{"a":null,"b":null},
{"a":"2","b":"0"},
{"a":"1","b":"0"}] |]
, matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-2/3"]
}
it "by a column desc with nulls last" $
get "/no_pk?order=a.desc.nullslast"
`shouldRespondWith` ResponseMatcher {
matchBody = Just [json| [{"a":"2","b":"0"},
{"a":"1","b":"0"},
{"a":null,"b":null}] |]
, matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-2/3"]
}
it "without other constraints" $
get "/items?order=asc.id" `shouldRespondWith` 200
describe "Canonical location" $ do
it "Sets Content-Location with alphabetized params" $
get "/no_pk?b=eq.1&a=eq.1"
`shouldRespondWith` ResponseMatcher {
matchBody = Just "[]"
, matchStatus = 200
, matchHeaders = ["Content-Location" <:> "/no_pk?a=eq.1&b=eq.1"]
}
it "Omits question mark when there are no params" $ do
r <- get "/simple_pk"
liftIO $ do
let respHeaders = simpleHeaders r
respHeaders `shouldSatisfy` matchHeader
"Content-Location" "/simple_pk"
describe "jsonb" $
it "can filter by properties inside json column" $ do
get "/json?data->foo->>bar=eq.baz" `shouldRespondWith`
[json| [{"data": {"foo": {"bar": "baz"}}}] |]
get "/json?data->foo->>bar=eq.fake" `shouldRespondWith`
[json| [] |]
|
acrispin/postgrest
|
test/Feature/QuerySpec.hs
|
mit
| 5,403 | 0 | 20 | 1,423 | 930 | 494 | 436 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..), Counts(..))
import System.Exit (ExitCode(..), exitWith)
import qualified ListOps as L
import Control.Exception (Exception, throw, evaluate, try)
import Data.Typeable (Typeable)
data FoldlIsStrictException = FoldlIsStrictException deriving (Eq, Show, Typeable)
instance Exception FoldlIsStrictException
exitProperly :: IO Counts -> IO ()
exitProperly m = do
counts <- m
exitWith $ if failures counts /= 0 || errors counts /= 0 then ExitFailure 1 else ExitSuccess
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
main :: IO ()
main = exitProperly $ runTestTT $ TestList
[ TestList listOpsTests ]
big :: Int
big = 100000
listOpsTests :: [Test]
listOpsTests =
[ testCase "length of empty list" $ do
0 @=? L.length ([] :: [Int])
, testCase "length of non-empty list" $ do
4 @=? L.length [1 .. 4 :: Int]
, testCase "length of large list" $ do
big @=? L.length [1 .. big :: Int]
, testCase "reverse of empty list" $ do
[] @=? L.reverse ([] :: [Int])
, testCase "reverse of non-empty list" $ do
[100 , 99 .. 1] @=? L.reverse [1 .. 100 :: Int]
, testCase "map of empty list" $ do
[] @=? L.map (+1) ([] :: [Int])
, testCase "map of non-empty list" $ do
[2, 4 .. 8] @=? L.map (+1) [1, 3 .. 7 :: Int]
, testCase "filter of empty list" $ do
[] @=? L.filter undefined ([] :: [Int])
, testCase "filter of normal list" $ do
[1, 3] @=? L.filter odd [1 .. 4 :: Int]
, testCase "foldl' of empty list" $ do
0 @=? L.foldl' (+) (0 :: Int) []
, testCase "foldl' of non-empty list" $ do
7 @=? L.foldl' (+) (-3) [1 .. 4 :: Int]
, testCase "foldl' of huge list" $ do
big * (big + 1) `div` 2 @=? L.foldl' (+) 0 [1 .. big]
, testCase "foldl' with non-commutative function" $ do
0 @=? L.foldl' (-) 10 [1 .. 4 :: Int]
, testCase "foldl' is not just foldr . flip" $ do
"fdsa" @=? L.foldl' (flip (:)) [] "asdf"
, testCase "foldl' is accumulator-strict (use seq or BangPatterns)" $ do
r <- try . evaluate $
L.foldl' (flip const) () [throw FoldlIsStrictException, ()]
Left FoldlIsStrictException @=? (r :: Either FoldlIsStrictException ())
, testCase "foldr as id" $ do
[1 .. big] @=? L.foldr (:) [] [1 .. big]
, testCase "foldr as append" $ do
[1 .. big] @=? L.foldr (:) [100 .. big] [1 .. 99]
, testCase "++ of empty lists" $ do
[] @=? [] L.++ ([] :: [Int])
, testCase "++ of empty and non-empty lists" $ do
[1 .. 4] @=? [] L.++ [1 .. 4 :: Int]
, testCase "++ of non-empty and empty lists" $ do
[1 .. 4] @=? [1 .. 4 :: Int] L.++ []
, testCase "++ of non-empty lists" $ do
[1 .. 5] @=? [1 .. 3] L.++ [4, 5 :: Int]
, testCase "++ of large lists" $ do
[1 .. big] @=? [1 .. big `div` 2] L.++ [1 + big `div` 2 .. big]
, testCase "concat of no lists" $ do
[] @=? L.concat ([] :: [[Int]])
, testCase "concat of list of lists" $ do
[1 .. 6] @=? L.concat [[1, 2], [3], [], [4, 5, 6 :: Int]]
, testCase "concat of large list of small lists" $ do
[1 .. big] @=? L.concat (map (:[]) [1 .. big])
]
|
pminten/xhaskell
|
list-ops/list-ops_test.hs
|
mit
| 3,183 | 0 | 14 | 763 | 1,342 | 729 | 613 | 73 | 2 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[PrelNames]{Definitions of prelude modules and names}
Nota Bene: all Names defined in here should come from the base package
- ModuleNames for prelude modules,
e.g. pREL_BASE_Name :: ModuleName
- Modules for prelude modules
e.g. pREL_Base :: Module
- Uniques for Ids, DataCons, TyCons and Classes that the compiler
"knows about" in some way
e.g. intTyConKey :: Unique
minusClassOpKey :: Unique
- Names for Ids, DataCons, TyCons and Classes that the compiler
"knows about" in some way
e.g. intTyConName :: Name
minusName :: Name
One of these Names contains
(a) the module and occurrence name of the thing
(b) its Unique
The may way the compiler "knows about" one of these things is
where the type checker or desugarer needs to look it up. For
example, when desugaring list comprehensions the desugarer
needs to conjure up 'foldr'. It does this by looking up
foldrName in the environment.
- RdrNames for Ids, DataCons etc that the compiler may emit into
generated code (e.g. for deriving). It's not necessary to know
the uniques for these guys, only their names
Note [Known-key names]
~~~~~~~~~~~~~~~~~~~~~~
It is *very* important that the compiler gives wired-in things and
things with "known-key" names the correct Uniques wherever they
occur. We have to be careful about this in exactly two places:
1. When we parse some source code, renaming the AST better yield an
AST whose Names have the correct uniques
2. When we read an interface file, the read-in gubbins better have
the right uniques
This is accomplished through a combination of mechanisms:
1. When parsing source code, the RdrName-decorated AST has some
RdrNames which are Exact. These are wired-in RdrNames where the
we could directly tell from the parsed syntax what Name to
use. For example, when we parse a [] in a type we can just insert
an Exact RdrName Name with the listTyConKey.
Currently, I believe this is just an optimisation: it would be
equally valid to just output Orig RdrNames that correctly record
the module etc we expect the final Name to come from. However,
were we to eliminate isBuiltInOcc_maybe it would become essential
(see point 3).
2. The knownKeyNames (which consist of the basicKnownKeyNames from
the module, and those names reachable via the wired-in stuff from
TysWiredIn) are used to initialise the "OrigNameCache" in
IfaceEnv. This initialization ensures that when the type checker
or renamer (both of which use IfaceEnv) look up an original name
(i.e. a pair of a Module and an OccName) for a known-key name
they get the correct Unique.
This is the most important mechanism for ensuring that known-key
stuff gets the right Unique, and is why it is so important to
place your known-key names in the appropriate lists.
3. For "infinite families" of known-key names (i.e. tuples and sums), we
have to be extra careful. Because there are an infinite number of
these things, we cannot add them to the list of known-key names
used to initialise the OrigNameCache. Instead, we have to
rely on never having to look them up in that cache. See
Note [Infinite families of known-key names] for details.
Note [Infinite families of known-key names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Infinite families of known-key things (e.g. tuples and sums) pose a tricky
problem: we can't add them to the knownKeyNames finite map which we use to
ensure that, e.g., a reference to (,) gets assigned the right unique (if this
doesn't sound familiar see Note [Known-key names] above).
We instead handle tuples and sums separately from the "vanilla" known-key
things,
a) The parser recognises them specially and generates an Exact Name (hence not
looked up in the orig-name cache)
b) The known infinite families of names are specially serialised by
BinIface.putName, with that special treatment detected when we read back to
ensure that we get back to the correct uniques. See Note [Symbol table
representation of names] in BinIface and Note [How tuples work] in
TysWiredIn.
Most of the infinite families cannot occur in source code, so mechanisms (a) and (b)
suffice to ensure that they always have the right Unique. In particular,
implicit param TyCon names, constraint tuples and Any TyCons cannot be mentioned
by the user. For those things that *can* appear in source programs,
c) IfaceEnv.lookupOrigNameCache uses isBuiltInOcc_maybe to map built-in syntax
directly onto the corresponding name, rather than trying to find it in the
original-name cache.
See also Note [Built-in syntax and the OrigNameCache]
-}
{-# LANGUAGE CPP #-}
module PrelNames (
Unique, Uniquable(..), hasKey, -- Re-exported for convenience
-----------------------------------------------------------
module PrelNames, -- A huge bunch of (a) Names, e.g. intTyConName
-- (b) Uniques e.g. intTyConKey
-- (c) Groups of classes and types
-- (d) miscellaneous things
-- So many that we export them all
) where
#include "HsVersions.h"
import Module
import OccName
import RdrName
import Unique
import Name
import SrcLoc
import FastString
import Config ( cIntegerLibraryType, IntegerLibrary(..) )
import Panic ( panic )
{-
************************************************************************
* *
allNameStrings
* *
************************************************************************
-}
allNameStrings :: [String]
-- Infinite list of a,b,c...z, aa, ab, ac, ... etc
allNameStrings = [ c:cs | cs <- "" : allNameStrings, c <- ['a'..'z'] ]
{-
************************************************************************
* *
\subsection{Local Names}
* *
************************************************************************
This *local* name is used by the interactive stuff
-}
itName :: Unique -> SrcSpan -> Name
itName uniq loc = mkInternalName uniq (mkOccNameFS varName (fsLit "it")) loc
-- mkUnboundName makes a place-holder Name; it shouldn't be looked at except possibly
-- during compiler debugging.
mkUnboundName :: OccName -> Name
mkUnboundName occ = mkInternalName unboundKey occ noSrcSpan
isUnboundName :: Name -> Bool
isUnboundName name = name `hasKey` unboundKey
{-
************************************************************************
* *
\subsection{Known key Names}
* *
************************************************************************
This section tells what the compiler knows about the association of
names with uniques. These ones are the *non* wired-in ones. The
wired in ones are defined in TysWiredIn etc.
-}
basicKnownKeyNames :: [Name]
basicKnownKeyNames
= genericTyConNames
++ [ -- Classes. *Must* include:
-- classes that are grabbed by key (e.g., eqClassKey)
-- classes in "Class.standardClassKeys" (quite a few)
eqClassName, -- mentioned, derivable
ordClassName, -- derivable
boundedClassName, -- derivable
numClassName, -- mentioned, numeric
enumClassName, -- derivable
monadClassName,
functorClassName,
realClassName, -- numeric
integralClassName, -- numeric
fractionalClassName, -- numeric
floatingClassName, -- numeric
realFracClassName, -- numeric
realFloatClassName, -- numeric
dataClassName,
isStringClassName,
applicativeClassName,
alternativeClassName,
foldableClassName,
traversableClassName,
semigroupClassName, sappendName,
monoidClassName, memptyName, mappendName, mconcatName,
-- The IO type
-- See Note [TyConRepNames for non-wired-in TyCons]
ioTyConName, ioDataConName,
runMainIOName,
-- Type representation types
trModuleTyConName, trModuleDataConName,
trNameTyConName, trNameSDataConName, trNameDDataConName,
trTyConTyConName, trTyConDataConName,
-- Typeable
typeableClassName,
typeRepTyConName,
typeRepIdName,
mkPolyTyConAppName,
mkAppTyName,
typeSymbolTypeRepName, typeNatTypeRepName,
trGhcPrimModuleName,
-- Dynamic
toDynName,
-- Numeric stuff
negateName, minusName, geName, eqName,
-- Conversion functions
rationalTyConName,
ratioTyConName, ratioDataConName,
fromRationalName, fromIntegerName,
toIntegerName, toRationalName,
fromIntegralName, realToFracName,
-- Int# stuff
divIntName, modIntName,
-- String stuff
fromStringName,
-- Enum stuff
enumFromName, enumFromThenName,
enumFromThenToName, enumFromToName,
-- Applicative stuff
pureAName, apAName, thenAName,
-- Monad stuff
thenIOName, bindIOName, returnIOName, failIOName, bindMName, thenMName,
returnMName, fmapName, joinMName,
-- MonadFail
monadFailClassName, failMName, failMName_preMFP,
-- MonadFix
monadFixClassName, mfixName,
-- Arrow stuff
arrAName, composeAName, firstAName,
appAName, choiceAName, loopAName,
-- Ix stuff
ixClassName,
-- Show stuff
showClassName,
-- Read stuff
readClassName,
-- Stable pointers
newStablePtrName,
-- GHC Extensions
groupWithName,
-- Strings and lists
unpackCStringName,
unpackCStringFoldrName, unpackCStringUtf8Name,
-- Overloaded lists
isListClassName,
fromListName,
fromListNName,
toListName,
-- List operations
concatName, filterName, mapName,
zipName, foldrName, buildName, augmentName, appendName,
-- FFI primitive types that are not wired-in.
stablePtrTyConName, ptrTyConName, funPtrTyConName,
int8TyConName, int16TyConName, int32TyConName, int64TyConName,
word16TyConName, word32TyConName, word64TyConName,
-- Others
otherwiseIdName, inlineIdName,
eqStringName, assertName, breakpointName, breakpointCondName,
breakpointAutoName, opaqueTyConName,
assertErrorName,
printName, fstName, sndName,
-- Integer
integerTyConName, mkIntegerName,
integerToWord64Name, integerToInt64Name,
word64ToIntegerName, int64ToIntegerName,
plusIntegerName, timesIntegerName, smallIntegerName,
wordToIntegerName,
integerToWordName, integerToIntName, minusIntegerName,
negateIntegerName, eqIntegerPrimName, neqIntegerPrimName,
absIntegerName, signumIntegerName,
leIntegerPrimName, gtIntegerPrimName, ltIntegerPrimName, geIntegerPrimName,
compareIntegerName, quotRemIntegerName, divModIntegerName,
quotIntegerName, remIntegerName, divIntegerName, modIntegerName,
floatFromIntegerName, doubleFromIntegerName,
encodeFloatIntegerName, encodeDoubleIntegerName,
decodeDoubleIntegerName,
gcdIntegerName, lcmIntegerName,
andIntegerName, orIntegerName, xorIntegerName, complementIntegerName,
shiftLIntegerName, shiftRIntegerName, bitIntegerName,
-- Float/Double
rationalToFloatName,
rationalToDoubleName,
-- Other classes
randomClassName, randomGenClassName, monadPlusClassName,
-- Type-level naturals
knownNatClassName, knownSymbolClassName,
-- Overloaded labels
isLabelClassName,
-- Implicit Parameters
ipClassName,
-- Call Stacks
callStackTyConName,
emptyCallStackName, pushCallStackName,
-- Source Locations
srcLocDataConName,
-- Annotation type checking
toAnnotationWrapperName
-- The Ordering type
, orderingTyConName
, ltDataConName, eqDataConName, gtDataConName
-- The SPEC type for SpecConstr
, specTyConName
-- The Either type
, eitherTyConName, leftDataConName, rightDataConName
-- Plugins
, pluginTyConName
, frontendPluginTyConName
-- Generics
, genClassName, gen1ClassName
, datatypeClassName, constructorClassName, selectorClassName
-- Monad comprehensions
, guardMName
, liftMName
, mzipName
-- GHCi Sandbox
, ghciIoClassName, ghciStepIoMName
-- StaticPtr
, staticPtrTyConName
, staticPtrDataConName, staticPtrInfoDataConName
, fromStaticPtrName
-- Fingerprint
, fingerprintDataConName
-- Custom type errors
, errorMessageTypeErrorFamName
, typeErrorTextDataConName
, typeErrorAppendDataConName
, typeErrorVAppendDataConName
, typeErrorShowTypeDataConName
-- homogeneous equality
, eqTyConName
] ++ case cIntegerLibraryType of
IntegerGMP -> [integerSDataConName]
IntegerSimple -> []
genericTyConNames :: [Name]
genericTyConNames = [
v1TyConName, u1TyConName, par1TyConName, rec1TyConName,
k1TyConName, m1TyConName, sumTyConName, prodTyConName,
compTyConName, rTyConName, dTyConName,
cTyConName, sTyConName, rec0TyConName,
d1TyConName, c1TyConName, s1TyConName, noSelTyConName,
repTyConName, rep1TyConName, uRecTyConName,
uAddrTyConName, uCharTyConName, uDoubleTyConName,
uFloatTyConName, uIntTyConName, uWordTyConName,
prefixIDataConName, infixIDataConName, leftAssociativeDataConName,
rightAssociativeDataConName, notAssociativeDataConName,
sourceUnpackDataConName, sourceNoUnpackDataConName,
noSourceUnpackednessDataConName, sourceLazyDataConName,
sourceStrictDataConName, noSourceStrictnessDataConName,
decidedLazyDataConName, decidedStrictDataConName, decidedUnpackDataConName,
metaDataDataConName, metaConsDataConName, metaSelDataConName
]
{-
************************************************************************
* *
\subsection{Module names}
* *
************************************************************************
--MetaHaskell Extension Add a new module here
-}
pRELUDE :: Module
pRELUDE = mkBaseModule_ pRELUDE_NAME
gHC_PRIM, gHC_TYPES, gHC_GENERICS, gHC_MAGIC,
gHC_CLASSES, gHC_BASE, gHC_ENUM, gHC_GHCI, gHC_CSTRING,
gHC_SHOW, gHC_READ, gHC_NUM, gHC_INTEGER_TYPE, gHC_LIST,
gHC_TUPLE, dATA_TUPLE, dATA_EITHER, dATA_STRING,
dATA_FOLDABLE, dATA_TRAVERSABLE, dATA_MONOID, dATA_SEMIGROUP,
gHC_CONC, gHC_IO, gHC_IO_Exception,
gHC_ST, gHC_ARR, gHC_STABLE, gHC_PTR, gHC_ERR, gHC_REAL,
gHC_FLOAT, gHC_TOP_HANDLER, sYSTEM_IO, dYNAMIC,
tYPEABLE, tYPEABLE_INTERNAL, gENERICS,
rEAD_PREC, lEX, gHC_INT, gHC_WORD, mONAD, mONAD_FIX, mONAD_ZIP, mONAD_FAIL,
aRROW, cONTROL_APPLICATIVE, gHC_DESUGAR, rANDOM, gHC_EXTS,
cONTROL_EXCEPTION_BASE, gHC_TYPELITS, dATA_TYPE_EQUALITY,
dATA_COERCE :: Module
gHC_PRIM = mkPrimModule (fsLit "GHC.Prim") -- Primitive types and values
gHC_TYPES = mkPrimModule (fsLit "GHC.Types")
gHC_MAGIC = mkPrimModule (fsLit "GHC.Magic")
gHC_CSTRING = mkPrimModule (fsLit "GHC.CString")
gHC_CLASSES = mkPrimModule (fsLit "GHC.Classes")
gHC_BASE = mkBaseModule (fsLit "GHC.Base")
gHC_ENUM = mkBaseModule (fsLit "GHC.Enum")
gHC_GHCI = mkBaseModule (fsLit "GHC.GHCi")
gHC_SHOW = mkBaseModule (fsLit "GHC.Show")
gHC_READ = mkBaseModule (fsLit "GHC.Read")
gHC_NUM = mkBaseModule (fsLit "GHC.Num")
gHC_INTEGER_TYPE= mkIntegerModule (fsLit "GHC.Integer.Type")
gHC_LIST = mkBaseModule (fsLit "GHC.List")
gHC_TUPLE = mkPrimModule (fsLit "GHC.Tuple")
dATA_TUPLE = mkBaseModule (fsLit "Data.Tuple")
dATA_EITHER = mkBaseModule (fsLit "Data.Either")
dATA_STRING = mkBaseModule (fsLit "Data.String")
dATA_FOLDABLE = mkBaseModule (fsLit "Data.Foldable")
dATA_TRAVERSABLE= mkBaseModule (fsLit "Data.Traversable")
dATA_SEMIGROUP = mkBaseModule (fsLit "Data.Semigroup")
dATA_MONOID = mkBaseModule (fsLit "Data.Monoid")
gHC_CONC = mkBaseModule (fsLit "GHC.Conc")
gHC_IO = mkBaseModule (fsLit "GHC.IO")
gHC_IO_Exception = mkBaseModule (fsLit "GHC.IO.Exception")
gHC_ST = mkBaseModule (fsLit "GHC.ST")
gHC_ARR = mkBaseModule (fsLit "GHC.Arr")
gHC_STABLE = mkBaseModule (fsLit "GHC.Stable")
gHC_PTR = mkBaseModule (fsLit "GHC.Ptr")
gHC_ERR = mkBaseModule (fsLit "GHC.Err")
gHC_REAL = mkBaseModule (fsLit "GHC.Real")
gHC_FLOAT = mkBaseModule (fsLit "GHC.Float")
gHC_TOP_HANDLER = mkBaseModule (fsLit "GHC.TopHandler")
sYSTEM_IO = mkBaseModule (fsLit "System.IO")
dYNAMIC = mkBaseModule (fsLit "Data.Dynamic")
tYPEABLE = mkBaseModule (fsLit "Data.Typeable")
tYPEABLE_INTERNAL = mkBaseModule (fsLit "Data.Typeable.Internal")
gENERICS = mkBaseModule (fsLit "Data.Data")
rEAD_PREC = mkBaseModule (fsLit "Text.ParserCombinators.ReadPrec")
lEX = mkBaseModule (fsLit "Text.Read.Lex")
gHC_INT = mkBaseModule (fsLit "GHC.Int")
gHC_WORD = mkBaseModule (fsLit "GHC.Word")
mONAD = mkBaseModule (fsLit "Control.Monad")
mONAD_FIX = mkBaseModule (fsLit "Control.Monad.Fix")
mONAD_ZIP = mkBaseModule (fsLit "Control.Monad.Zip")
mONAD_FAIL = mkBaseModule (fsLit "Control.Monad.Fail")
aRROW = mkBaseModule (fsLit "Control.Arrow")
cONTROL_APPLICATIVE = mkBaseModule (fsLit "Control.Applicative")
gHC_DESUGAR = mkBaseModule (fsLit "GHC.Desugar")
rANDOM = mkBaseModule (fsLit "System.Random")
gHC_EXTS = mkBaseModule (fsLit "GHC.Exts")
cONTROL_EXCEPTION_BASE = mkBaseModule (fsLit "Control.Exception.Base")
gHC_GENERICS = mkBaseModule (fsLit "GHC.Generics")
gHC_TYPELITS = mkBaseModule (fsLit "GHC.TypeLits")
dATA_TYPE_EQUALITY = mkBaseModule (fsLit "Data.Type.Equality")
dATA_COERCE = mkBaseModule (fsLit "Data.Coerce")
gHC_PARR' :: Module
gHC_PARR' = mkBaseModule (fsLit "GHC.PArr")
gHC_SRCLOC :: Module
gHC_SRCLOC = mkBaseModule (fsLit "GHC.SrcLoc")
gHC_STACK, gHC_STACK_TYPES :: Module
gHC_STACK = mkBaseModule (fsLit "GHC.Stack")
gHC_STACK_TYPES = mkBaseModule (fsLit "GHC.Stack.Types")
gHC_STATICPTR :: Module
gHC_STATICPTR = mkBaseModule (fsLit "GHC.StaticPtr")
gHC_FINGERPRINT_TYPE :: Module
gHC_FINGERPRINT_TYPE = mkBaseModule (fsLit "GHC.Fingerprint.Type")
gHC_OVER_LABELS :: Module
gHC_OVER_LABELS = mkBaseModule (fsLit "GHC.OverloadedLabels")
mAIN, rOOT_MAIN :: Module
mAIN = mkMainModule_ mAIN_NAME
rOOT_MAIN = mkMainModule (fsLit ":Main") -- Root module for initialisation
mkInteractiveModule :: Int -> Module
-- (mkInteractiveMoudule 9) makes module 'interactive:M9'
mkInteractiveModule n = mkModule interactiveUnitId (mkModuleName ("Ghci" ++ show n))
pRELUDE_NAME, mAIN_NAME :: ModuleName
pRELUDE_NAME = mkModuleNameFS (fsLit "Prelude")
mAIN_NAME = mkModuleNameFS (fsLit "Main")
dATA_ARRAY_PARALLEL_NAME, dATA_ARRAY_PARALLEL_PRIM_NAME :: ModuleName
dATA_ARRAY_PARALLEL_NAME = mkModuleNameFS (fsLit "Data.Array.Parallel")
dATA_ARRAY_PARALLEL_PRIM_NAME = mkModuleNameFS (fsLit "Data.Array.Parallel.Prim")
mkPrimModule :: FastString -> Module
mkPrimModule m = mkModule primUnitId (mkModuleNameFS m)
mkIntegerModule :: FastString -> Module
mkIntegerModule m = mkModule integerUnitId (mkModuleNameFS m)
mkBaseModule :: FastString -> Module
mkBaseModule m = mkModule baseUnitId (mkModuleNameFS m)
mkBaseModule_ :: ModuleName -> Module
mkBaseModule_ m = mkModule baseUnitId m
mkThisGhcModule :: FastString -> Module
mkThisGhcModule m = mkModule thisGhcUnitId (mkModuleNameFS m)
mkThisGhcModule_ :: ModuleName -> Module
mkThisGhcModule_ m = mkModule thisGhcUnitId m
mkMainModule :: FastString -> Module
mkMainModule m = mkModule mainUnitId (mkModuleNameFS m)
mkMainModule_ :: ModuleName -> Module
mkMainModule_ m = mkModule mainUnitId m
{-
************************************************************************
* *
RdrNames
* *
************************************************************************
-}
main_RDR_Unqual :: RdrName
main_RDR_Unqual = mkUnqual varName (fsLit "main")
-- We definitely don't want an Orig RdrName, because
-- main might, in principle, be imported into module Main
forall_tv_RDR, dot_tv_RDR :: RdrName
forall_tv_RDR = mkUnqual tvName (fsLit "forall")
dot_tv_RDR = mkUnqual tvName (fsLit ".")
eq_RDR, ge_RDR, ne_RDR, le_RDR, lt_RDR, gt_RDR, compare_RDR,
ltTag_RDR, eqTag_RDR, gtTag_RDR :: RdrName
eq_RDR = nameRdrName eqName
ge_RDR = nameRdrName geName
ne_RDR = varQual_RDR gHC_CLASSES (fsLit "/=")
le_RDR = varQual_RDR gHC_CLASSES (fsLit "<=")
lt_RDR = varQual_RDR gHC_CLASSES (fsLit "<")
gt_RDR = varQual_RDR gHC_CLASSES (fsLit ">")
compare_RDR = varQual_RDR gHC_CLASSES (fsLit "compare")
ltTag_RDR = dataQual_RDR gHC_TYPES (fsLit "LT")
eqTag_RDR = dataQual_RDR gHC_TYPES (fsLit "EQ")
gtTag_RDR = dataQual_RDR gHC_TYPES (fsLit "GT")
eqClass_RDR, numClass_RDR, ordClass_RDR, enumClass_RDR, monadClass_RDR
:: RdrName
eqClass_RDR = nameRdrName eqClassName
numClass_RDR = nameRdrName numClassName
ordClass_RDR = nameRdrName ordClassName
enumClass_RDR = nameRdrName enumClassName
monadClass_RDR = nameRdrName monadClassName
map_RDR, append_RDR :: RdrName
map_RDR = varQual_RDR gHC_BASE (fsLit "map")
append_RDR = varQual_RDR gHC_BASE (fsLit "++")
foldr_RDR, build_RDR, returnM_RDR, bindM_RDR, failM_RDR_preMFP, failM_RDR:: RdrName
foldr_RDR = nameRdrName foldrName
build_RDR = nameRdrName buildName
returnM_RDR = nameRdrName returnMName
bindM_RDR = nameRdrName bindMName
failM_RDR_preMFP = nameRdrName failMName_preMFP
failM_RDR = nameRdrName failMName
left_RDR, right_RDR :: RdrName
left_RDR = nameRdrName leftDataConName
right_RDR = nameRdrName rightDataConName
fromEnum_RDR, toEnum_RDR :: RdrName
fromEnum_RDR = varQual_RDR gHC_ENUM (fsLit "fromEnum")
toEnum_RDR = varQual_RDR gHC_ENUM (fsLit "toEnum")
enumFrom_RDR, enumFromTo_RDR, enumFromThen_RDR, enumFromThenTo_RDR :: RdrName
enumFrom_RDR = nameRdrName enumFromName
enumFromTo_RDR = nameRdrName enumFromToName
enumFromThen_RDR = nameRdrName enumFromThenName
enumFromThenTo_RDR = nameRdrName enumFromThenToName
ratioDataCon_RDR, plusInteger_RDR, timesInteger_RDR :: RdrName
ratioDataCon_RDR = nameRdrName ratioDataConName
plusInteger_RDR = nameRdrName plusIntegerName
timesInteger_RDR = nameRdrName timesIntegerName
ioDataCon_RDR :: RdrName
ioDataCon_RDR = nameRdrName ioDataConName
eqString_RDR, unpackCString_RDR, unpackCStringFoldr_RDR,
unpackCStringUtf8_RDR :: RdrName
eqString_RDR = nameRdrName eqStringName
unpackCString_RDR = nameRdrName unpackCStringName
unpackCStringFoldr_RDR = nameRdrName unpackCStringFoldrName
unpackCStringUtf8_RDR = nameRdrName unpackCStringUtf8Name
newStablePtr_RDR :: RdrName
newStablePtr_RDR = nameRdrName newStablePtrName
bindIO_RDR, returnIO_RDR :: RdrName
bindIO_RDR = nameRdrName bindIOName
returnIO_RDR = nameRdrName returnIOName
fromInteger_RDR, fromRational_RDR, minus_RDR, times_RDR, plus_RDR :: RdrName
fromInteger_RDR = nameRdrName fromIntegerName
fromRational_RDR = nameRdrName fromRationalName
minus_RDR = nameRdrName minusName
times_RDR = varQual_RDR gHC_NUM (fsLit "*")
plus_RDR = varQual_RDR gHC_NUM (fsLit "+")
toInteger_RDR, toRational_RDR, fromIntegral_RDR :: RdrName
toInteger_RDR = nameRdrName toIntegerName
toRational_RDR = nameRdrName toRationalName
fromIntegral_RDR = nameRdrName fromIntegralName
stringTy_RDR, fromString_RDR :: RdrName
stringTy_RDR = tcQual_RDR gHC_BASE (fsLit "String")
fromString_RDR = nameRdrName fromStringName
fromList_RDR, fromListN_RDR, toList_RDR :: RdrName
fromList_RDR = nameRdrName fromListName
fromListN_RDR = nameRdrName fromListNName
toList_RDR = nameRdrName toListName
compose_RDR :: RdrName
compose_RDR = varQual_RDR gHC_BASE (fsLit ".")
not_RDR, getTag_RDR, succ_RDR, pred_RDR, minBound_RDR, maxBound_RDR,
and_RDR, range_RDR, inRange_RDR, index_RDR,
unsafeIndex_RDR, unsafeRangeSize_RDR :: RdrName
and_RDR = varQual_RDR gHC_CLASSES (fsLit "&&")
not_RDR = varQual_RDR gHC_CLASSES (fsLit "not")
getTag_RDR = varQual_RDR gHC_BASE (fsLit "getTag")
succ_RDR = varQual_RDR gHC_ENUM (fsLit "succ")
pred_RDR = varQual_RDR gHC_ENUM (fsLit "pred")
minBound_RDR = varQual_RDR gHC_ENUM (fsLit "minBound")
maxBound_RDR = varQual_RDR gHC_ENUM (fsLit "maxBound")
range_RDR = varQual_RDR gHC_ARR (fsLit "range")
inRange_RDR = varQual_RDR gHC_ARR (fsLit "inRange")
index_RDR = varQual_RDR gHC_ARR (fsLit "index")
unsafeIndex_RDR = varQual_RDR gHC_ARR (fsLit "unsafeIndex")
unsafeRangeSize_RDR = varQual_RDR gHC_ARR (fsLit "unsafeRangeSize")
readList_RDR, readListDefault_RDR, readListPrec_RDR, readListPrecDefault_RDR,
readPrec_RDR, parens_RDR, choose_RDR, lexP_RDR, expectP_RDR :: RdrName
readList_RDR = varQual_RDR gHC_READ (fsLit "readList")
readListDefault_RDR = varQual_RDR gHC_READ (fsLit "readListDefault")
readListPrec_RDR = varQual_RDR gHC_READ (fsLit "readListPrec")
readListPrecDefault_RDR = varQual_RDR gHC_READ (fsLit "readListPrecDefault")
readPrec_RDR = varQual_RDR gHC_READ (fsLit "readPrec")
parens_RDR = varQual_RDR gHC_READ (fsLit "parens")
choose_RDR = varQual_RDR gHC_READ (fsLit "choose")
lexP_RDR = varQual_RDR gHC_READ (fsLit "lexP")
expectP_RDR = varQual_RDR gHC_READ (fsLit "expectP")
punc_RDR, ident_RDR, symbol_RDR :: RdrName
punc_RDR = dataQual_RDR lEX (fsLit "Punc")
ident_RDR = dataQual_RDR lEX (fsLit "Ident")
symbol_RDR = dataQual_RDR lEX (fsLit "Symbol")
step_RDR, alt_RDR, reset_RDR, prec_RDR, pfail_RDR :: RdrName
step_RDR = varQual_RDR rEAD_PREC (fsLit "step")
alt_RDR = varQual_RDR rEAD_PREC (fsLit "+++")
reset_RDR = varQual_RDR rEAD_PREC (fsLit "reset")
prec_RDR = varQual_RDR rEAD_PREC (fsLit "prec")
pfail_RDR = varQual_RDR rEAD_PREC (fsLit "pfail")
showList_RDR, showList___RDR, showsPrec_RDR, shows_RDR, showString_RDR,
showSpace_RDR, showParen_RDR :: RdrName
showList_RDR = varQual_RDR gHC_SHOW (fsLit "showList")
showList___RDR = varQual_RDR gHC_SHOW (fsLit "showList__")
showsPrec_RDR = varQual_RDR gHC_SHOW (fsLit "showsPrec")
shows_RDR = varQual_RDR gHC_SHOW (fsLit "shows")
showString_RDR = varQual_RDR gHC_SHOW (fsLit "showString")
showSpace_RDR = varQual_RDR gHC_SHOW (fsLit "showSpace")
showParen_RDR = varQual_RDR gHC_SHOW (fsLit "showParen")
undefined_RDR :: RdrName
undefined_RDR = varQual_RDR gHC_ERR (fsLit "undefined")
error_RDR :: RdrName
error_RDR = varQual_RDR gHC_ERR (fsLit "error")
-- Generics (constructors and functions)
u1DataCon_RDR, par1DataCon_RDR, rec1DataCon_RDR,
k1DataCon_RDR, m1DataCon_RDR, l1DataCon_RDR, r1DataCon_RDR,
prodDataCon_RDR, comp1DataCon_RDR,
unPar1_RDR, unRec1_RDR, unK1_RDR, unComp1_RDR,
from_RDR, from1_RDR, to_RDR, to1_RDR,
datatypeName_RDR, moduleName_RDR, packageName_RDR, isNewtypeName_RDR,
conName_RDR, conFixity_RDR, conIsRecord_RDR, selName_RDR,
prefixDataCon_RDR, infixDataCon_RDR, leftAssocDataCon_RDR,
rightAssocDataCon_RDR, notAssocDataCon_RDR,
uAddrDataCon_RDR, uCharDataCon_RDR, uDoubleDataCon_RDR,
uFloatDataCon_RDR, uIntDataCon_RDR, uWordDataCon_RDR,
uAddrHash_RDR, uCharHash_RDR, uDoubleHash_RDR,
uFloatHash_RDR, uIntHash_RDR, uWordHash_RDR :: RdrName
u1DataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "U1")
par1DataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "Par1")
rec1DataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "Rec1")
k1DataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "K1")
m1DataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "M1")
l1DataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "L1")
r1DataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "R1")
prodDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit ":*:")
comp1DataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "Comp1")
unPar1_RDR = varQual_RDR gHC_GENERICS (fsLit "unPar1")
unRec1_RDR = varQual_RDR gHC_GENERICS (fsLit "unRec1")
unK1_RDR = varQual_RDR gHC_GENERICS (fsLit "unK1")
unComp1_RDR = varQual_RDR gHC_GENERICS (fsLit "unComp1")
from_RDR = varQual_RDR gHC_GENERICS (fsLit "from")
from1_RDR = varQual_RDR gHC_GENERICS (fsLit "from1")
to_RDR = varQual_RDR gHC_GENERICS (fsLit "to")
to1_RDR = varQual_RDR gHC_GENERICS (fsLit "to1")
datatypeName_RDR = varQual_RDR gHC_GENERICS (fsLit "datatypeName")
moduleName_RDR = varQual_RDR gHC_GENERICS (fsLit "moduleName")
packageName_RDR = varQual_RDR gHC_GENERICS (fsLit "packageName")
isNewtypeName_RDR = varQual_RDR gHC_GENERICS (fsLit "isNewtype")
selName_RDR = varQual_RDR gHC_GENERICS (fsLit "selName")
conName_RDR = varQual_RDR gHC_GENERICS (fsLit "conName")
conFixity_RDR = varQual_RDR gHC_GENERICS (fsLit "conFixity")
conIsRecord_RDR = varQual_RDR gHC_GENERICS (fsLit "conIsRecord")
prefixDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "Prefix")
infixDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "Infix")
leftAssocDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "LeftAssociative")
rightAssocDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "RightAssociative")
notAssocDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "NotAssociative")
uAddrDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "UAddr")
uCharDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "UChar")
uDoubleDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "UDouble")
uFloatDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "UFloat")
uIntDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "UInt")
uWordDataCon_RDR = dataQual_RDR gHC_GENERICS (fsLit "UWord")
uAddrHash_RDR = varQual_RDR gHC_GENERICS (fsLit "uAddr#")
uCharHash_RDR = varQual_RDR gHC_GENERICS (fsLit "uChar#")
uDoubleHash_RDR = varQual_RDR gHC_GENERICS (fsLit "uDouble#")
uFloatHash_RDR = varQual_RDR gHC_GENERICS (fsLit "uFloat#")
uIntHash_RDR = varQual_RDR gHC_GENERICS (fsLit "uInt#")
uWordHash_RDR = varQual_RDR gHC_GENERICS (fsLit "uWord#")
fmap_RDR, pure_RDR, ap_RDR, foldable_foldr_RDR, foldMap_RDR,
traverse_RDR, mempty_RDR, mappend_RDR :: RdrName
fmap_RDR = varQual_RDR gHC_BASE (fsLit "fmap")
pure_RDR = nameRdrName pureAName
ap_RDR = nameRdrName apAName
foldable_foldr_RDR = varQual_RDR dATA_FOLDABLE (fsLit "foldr")
foldMap_RDR = varQual_RDR dATA_FOLDABLE (fsLit "foldMap")
traverse_RDR = varQual_RDR dATA_TRAVERSABLE (fsLit "traverse")
mempty_RDR = varQual_RDR gHC_BASE (fsLit "mempty")
mappend_RDR = varQual_RDR gHC_BASE (fsLit "mappend")
eqTyCon_RDR :: RdrName
eqTyCon_RDR = tcQual_RDR dATA_TYPE_EQUALITY (fsLit "~")
----------------------
varQual_RDR, tcQual_RDR, clsQual_RDR, dataQual_RDR
:: Module -> FastString -> RdrName
varQual_RDR mod str = mkOrig mod (mkOccNameFS varName str)
tcQual_RDR mod str = mkOrig mod (mkOccNameFS tcName str)
clsQual_RDR mod str = mkOrig mod (mkOccNameFS clsName str)
dataQual_RDR mod str = mkOrig mod (mkOccNameFS dataName str)
{-
************************************************************************
* *
\subsection{Known-key names}
* *
************************************************************************
Many of these Names are not really "built in", but some parts of the
compiler (notably the deriving mechanism) need to mention their names,
and it's convenient to write them all down in one place.
--MetaHaskell Extension add the constrs and the lower case case
-- guys as well (perhaps) e.g. see trueDataConName below
-}
wildCardName :: Name
wildCardName = mkSystemVarName wildCardKey (fsLit "wild")
runMainIOName :: Name
runMainIOName = varQual gHC_TOP_HANDLER (fsLit "runMainIO") runMainKey
orderingTyConName, ltDataConName, eqDataConName, gtDataConName :: Name
orderingTyConName = tcQual gHC_TYPES (fsLit "Ordering") orderingTyConKey
ltDataConName = dcQual gHC_TYPES (fsLit "LT") ltDataConKey
eqDataConName = dcQual gHC_TYPES (fsLit "EQ") eqDataConKey
gtDataConName = dcQual gHC_TYPES (fsLit "GT") gtDataConKey
specTyConName :: Name
specTyConName = tcQual gHC_TYPES (fsLit "SPEC") specTyConKey
eitherTyConName, leftDataConName, rightDataConName :: Name
eitherTyConName = tcQual dATA_EITHER (fsLit "Either") eitherTyConKey
leftDataConName = dcQual dATA_EITHER (fsLit "Left") leftDataConKey
rightDataConName = dcQual dATA_EITHER (fsLit "Right") rightDataConKey
-- Generics (types)
v1TyConName, u1TyConName, par1TyConName, rec1TyConName,
k1TyConName, m1TyConName, sumTyConName, prodTyConName,
compTyConName, rTyConName, dTyConName,
cTyConName, sTyConName, rec0TyConName,
d1TyConName, c1TyConName, s1TyConName, noSelTyConName,
repTyConName, rep1TyConName, uRecTyConName,
uAddrTyConName, uCharTyConName, uDoubleTyConName,
uFloatTyConName, uIntTyConName, uWordTyConName,
prefixIDataConName, infixIDataConName, leftAssociativeDataConName,
rightAssociativeDataConName, notAssociativeDataConName,
sourceUnpackDataConName, sourceNoUnpackDataConName,
noSourceUnpackednessDataConName, sourceLazyDataConName,
sourceStrictDataConName, noSourceStrictnessDataConName,
decidedLazyDataConName, decidedStrictDataConName, decidedUnpackDataConName,
metaDataDataConName, metaConsDataConName, metaSelDataConName :: Name
v1TyConName = tcQual gHC_GENERICS (fsLit "V1") v1TyConKey
u1TyConName = tcQual gHC_GENERICS (fsLit "U1") u1TyConKey
par1TyConName = tcQual gHC_GENERICS (fsLit "Par1") par1TyConKey
rec1TyConName = tcQual gHC_GENERICS (fsLit "Rec1") rec1TyConKey
k1TyConName = tcQual gHC_GENERICS (fsLit "K1") k1TyConKey
m1TyConName = tcQual gHC_GENERICS (fsLit "M1") m1TyConKey
sumTyConName = tcQual gHC_GENERICS (fsLit ":+:") sumTyConKey
prodTyConName = tcQual gHC_GENERICS (fsLit ":*:") prodTyConKey
compTyConName = tcQual gHC_GENERICS (fsLit ":.:") compTyConKey
rTyConName = tcQual gHC_GENERICS (fsLit "R") rTyConKey
dTyConName = tcQual gHC_GENERICS (fsLit "D") dTyConKey
cTyConName = tcQual gHC_GENERICS (fsLit "C") cTyConKey
sTyConName = tcQual gHC_GENERICS (fsLit "S") sTyConKey
rec0TyConName = tcQual gHC_GENERICS (fsLit "Rec0") rec0TyConKey
d1TyConName = tcQual gHC_GENERICS (fsLit "D1") d1TyConKey
c1TyConName = tcQual gHC_GENERICS (fsLit "C1") c1TyConKey
s1TyConName = tcQual gHC_GENERICS (fsLit "S1") s1TyConKey
noSelTyConName = tcQual gHC_GENERICS (fsLit "NoSelector") noSelTyConKey
repTyConName = tcQual gHC_GENERICS (fsLit "Rep") repTyConKey
rep1TyConName = tcQual gHC_GENERICS (fsLit "Rep1") rep1TyConKey
uRecTyConName = tcQual gHC_GENERICS (fsLit "URec") uRecTyConKey
uAddrTyConName = tcQual gHC_GENERICS (fsLit "UAddr") uAddrTyConKey
uCharTyConName = tcQual gHC_GENERICS (fsLit "UChar") uCharTyConKey
uDoubleTyConName = tcQual gHC_GENERICS (fsLit "UDouble") uDoubleTyConKey
uFloatTyConName = tcQual gHC_GENERICS (fsLit "UFloat") uFloatTyConKey
uIntTyConName = tcQual gHC_GENERICS (fsLit "UInt") uIntTyConKey
uWordTyConName = tcQual gHC_GENERICS (fsLit "UWord") uWordTyConKey
prefixIDataConName = dcQual gHC_GENERICS (fsLit "PrefixI") prefixIDataConKey
infixIDataConName = dcQual gHC_GENERICS (fsLit "InfixI") infixIDataConKey
leftAssociativeDataConName = dcQual gHC_GENERICS (fsLit "LeftAssociative") leftAssociativeDataConKey
rightAssociativeDataConName = dcQual gHC_GENERICS (fsLit "RightAssociative") rightAssociativeDataConKey
notAssociativeDataConName = dcQual gHC_GENERICS (fsLit "NotAssociative") notAssociativeDataConKey
sourceUnpackDataConName = dcQual gHC_GENERICS (fsLit "SourceUnpack") sourceUnpackDataConKey
sourceNoUnpackDataConName = dcQual gHC_GENERICS (fsLit "SourceNoUnpack") sourceNoUnpackDataConKey
noSourceUnpackednessDataConName = dcQual gHC_GENERICS (fsLit "NoSourceUnpackedness") noSourceUnpackednessDataConKey
sourceLazyDataConName = dcQual gHC_GENERICS (fsLit "SourceLazy") sourceLazyDataConKey
sourceStrictDataConName = dcQual gHC_GENERICS (fsLit "SourceStrict") sourceStrictDataConKey
noSourceStrictnessDataConName = dcQual gHC_GENERICS (fsLit "NoSourceStrictness") noSourceStrictnessDataConKey
decidedLazyDataConName = dcQual gHC_GENERICS (fsLit "DecidedLazy") decidedLazyDataConKey
decidedStrictDataConName = dcQual gHC_GENERICS (fsLit "DecidedStrict") decidedStrictDataConKey
decidedUnpackDataConName = dcQual gHC_GENERICS (fsLit "DecidedUnpack") decidedUnpackDataConKey
metaDataDataConName = dcQual gHC_GENERICS (fsLit "MetaData") metaDataDataConKey
metaConsDataConName = dcQual gHC_GENERICS (fsLit "MetaCons") metaConsDataConKey
metaSelDataConName = dcQual gHC_GENERICS (fsLit "MetaSel") metaSelDataConKey
-- Primitive Int
divIntName, modIntName :: Name
divIntName = varQual gHC_CLASSES (fsLit "divInt#") divIntIdKey
modIntName = varQual gHC_CLASSES (fsLit "modInt#") modIntIdKey
-- Base strings Strings
unpackCStringName, unpackCStringFoldrName,
unpackCStringUtf8Name, eqStringName :: Name
unpackCStringName = varQual gHC_CSTRING (fsLit "unpackCString#") unpackCStringIdKey
unpackCStringFoldrName = varQual gHC_CSTRING (fsLit "unpackFoldrCString#") unpackCStringFoldrIdKey
unpackCStringUtf8Name = varQual gHC_CSTRING (fsLit "unpackCStringUtf8#") unpackCStringUtf8IdKey
eqStringName = varQual gHC_BASE (fsLit "eqString") eqStringIdKey
-- The 'inline' function
inlineIdName :: Name
inlineIdName = varQual gHC_MAGIC (fsLit "inline") inlineIdKey
-- Base classes (Eq, Ord, Functor)
fmapName, eqClassName, eqName, ordClassName, geName, functorClassName :: Name
eqClassName = clsQual gHC_CLASSES (fsLit "Eq") eqClassKey
eqName = varQual gHC_CLASSES (fsLit "==") eqClassOpKey
ordClassName = clsQual gHC_CLASSES (fsLit "Ord") ordClassKey
geName = varQual gHC_CLASSES (fsLit ">=") geClassOpKey
functorClassName = clsQual gHC_BASE (fsLit "Functor") functorClassKey
fmapName = varQual gHC_BASE (fsLit "fmap") fmapClassOpKey
-- Class Monad
monadClassName, thenMName, bindMName, returnMName, failMName_preMFP :: Name
monadClassName = clsQual gHC_BASE (fsLit "Monad") monadClassKey
thenMName = varQual gHC_BASE (fsLit ">>") thenMClassOpKey
bindMName = varQual gHC_BASE (fsLit ">>=") bindMClassOpKey
returnMName = varQual gHC_BASE (fsLit "return") returnMClassOpKey
failMName_preMFP = varQual gHC_BASE (fsLit "fail") failMClassOpKey_preMFP
-- Class MonadFail
monadFailClassName, failMName :: Name
monadFailClassName = clsQual mONAD_FAIL (fsLit "MonadFail") monadFailClassKey
failMName = varQual mONAD_FAIL (fsLit "fail") failMClassOpKey
-- Class Applicative
applicativeClassName, pureAName, apAName, thenAName :: Name
applicativeClassName = clsQual gHC_BASE (fsLit "Applicative") applicativeClassKey
apAName = varQual gHC_BASE (fsLit "<*>") apAClassOpKey
pureAName = varQual gHC_BASE (fsLit "pure") pureAClassOpKey
thenAName = varQual gHC_BASE (fsLit "*>") thenAClassOpKey
-- Classes (Foldable, Traversable)
foldableClassName, traversableClassName :: Name
foldableClassName = clsQual dATA_FOLDABLE (fsLit "Foldable") foldableClassKey
traversableClassName = clsQual dATA_TRAVERSABLE (fsLit "Traversable") traversableClassKey
-- Classes (Semigroup, Monoid)
semigroupClassName, sappendName :: Name
semigroupClassName = clsQual dATA_SEMIGROUP (fsLit "Semigroup") semigroupClassKey
sappendName = varQual dATA_SEMIGROUP (fsLit "<>") sappendClassOpKey
monoidClassName, memptyName, mappendName, mconcatName :: Name
monoidClassName = clsQual gHC_BASE (fsLit "Monoid") monoidClassKey
memptyName = varQual gHC_BASE (fsLit "mempty") memptyClassOpKey
mappendName = varQual gHC_BASE (fsLit "mappend") mappendClassOpKey
mconcatName = varQual gHC_BASE (fsLit "mconcat") mconcatClassOpKey
-- AMP additions
joinMName, alternativeClassName :: Name
joinMName = varQual gHC_BASE (fsLit "join") joinMIdKey
alternativeClassName = clsQual mONAD (fsLit "Alternative") alternativeClassKey
--
joinMIdKey, apAClassOpKey, pureAClassOpKey, thenAClassOpKey,
alternativeClassKey :: Unique
joinMIdKey = mkPreludeMiscIdUnique 750
apAClassOpKey = mkPreludeMiscIdUnique 751 -- <*>
pureAClassOpKey = mkPreludeMiscIdUnique 752
thenAClassOpKey = mkPreludeMiscIdUnique 753
alternativeClassKey = mkPreludeMiscIdUnique 754
-- Functions for GHC extensions
groupWithName :: Name
groupWithName = varQual gHC_EXTS (fsLit "groupWith") groupWithIdKey
-- Random PrelBase functions
fromStringName, otherwiseIdName, foldrName, buildName, augmentName,
mapName, appendName, assertName,
breakpointName, breakpointCondName, breakpointAutoName,
opaqueTyConName :: Name
fromStringName = varQual dATA_STRING (fsLit "fromString") fromStringClassOpKey
otherwiseIdName = varQual gHC_BASE (fsLit "otherwise") otherwiseIdKey
foldrName = varQual gHC_BASE (fsLit "foldr") foldrIdKey
buildName = varQual gHC_BASE (fsLit "build") buildIdKey
augmentName = varQual gHC_BASE (fsLit "augment") augmentIdKey
mapName = varQual gHC_BASE (fsLit "map") mapIdKey
appendName = varQual gHC_BASE (fsLit "++") appendIdKey
assertName = varQual gHC_BASE (fsLit "assert") assertIdKey
breakpointName = varQual gHC_BASE (fsLit "breakpoint") breakpointIdKey
breakpointCondName= varQual gHC_BASE (fsLit "breakpointCond") breakpointCondIdKey
breakpointAutoName= varQual gHC_BASE (fsLit "breakpointAuto") breakpointAutoIdKey
opaqueTyConName = tcQual gHC_BASE (fsLit "Opaque") opaqueTyConKey
breakpointJumpName :: Name
breakpointJumpName
= mkInternalName
breakpointJumpIdKey
(mkOccNameFS varName (fsLit "breakpointJump"))
noSrcSpan
breakpointCondJumpName :: Name
breakpointCondJumpName
= mkInternalName
breakpointCondJumpIdKey
(mkOccNameFS varName (fsLit "breakpointCondJump"))
noSrcSpan
breakpointAutoJumpName :: Name
breakpointAutoJumpName
= mkInternalName
breakpointAutoJumpIdKey
(mkOccNameFS varName (fsLit "breakpointAutoJump"))
noSrcSpan
-- PrelTup
fstName, sndName :: Name
fstName = varQual dATA_TUPLE (fsLit "fst") fstIdKey
sndName = varQual dATA_TUPLE (fsLit "snd") sndIdKey
-- Module GHC.Num
numClassName, fromIntegerName, minusName, negateName :: Name
numClassName = clsQual gHC_NUM (fsLit "Num") numClassKey
fromIntegerName = varQual gHC_NUM (fsLit "fromInteger") fromIntegerClassOpKey
minusName = varQual gHC_NUM (fsLit "-") minusClassOpKey
negateName = varQual gHC_NUM (fsLit "negate") negateClassOpKey
integerTyConName, mkIntegerName, integerSDataConName,
integerToWord64Name, integerToInt64Name,
word64ToIntegerName, int64ToIntegerName,
plusIntegerName, timesIntegerName, smallIntegerName,
wordToIntegerName,
integerToWordName, integerToIntName, minusIntegerName,
negateIntegerName, eqIntegerPrimName, neqIntegerPrimName,
absIntegerName, signumIntegerName,
leIntegerPrimName, gtIntegerPrimName, ltIntegerPrimName, geIntegerPrimName,
compareIntegerName, quotRemIntegerName, divModIntegerName,
quotIntegerName, remIntegerName, divIntegerName, modIntegerName,
floatFromIntegerName, doubleFromIntegerName,
encodeFloatIntegerName, encodeDoubleIntegerName,
decodeDoubleIntegerName,
gcdIntegerName, lcmIntegerName,
andIntegerName, orIntegerName, xorIntegerName, complementIntegerName,
shiftLIntegerName, shiftRIntegerName, bitIntegerName :: Name
integerTyConName = tcQual gHC_INTEGER_TYPE (fsLit "Integer") integerTyConKey
integerSDataConName = dcQual gHC_INTEGER_TYPE (fsLit n) integerSDataConKey
where n = case cIntegerLibraryType of
IntegerGMP -> "S#"
IntegerSimple -> panic "integerSDataConName evaluated for integer-simple"
mkIntegerName = varQual gHC_INTEGER_TYPE (fsLit "mkInteger") mkIntegerIdKey
integerToWord64Name = varQual gHC_INTEGER_TYPE (fsLit "integerToWord64") integerToWord64IdKey
integerToInt64Name = varQual gHC_INTEGER_TYPE (fsLit "integerToInt64") integerToInt64IdKey
word64ToIntegerName = varQual gHC_INTEGER_TYPE (fsLit "word64ToInteger") word64ToIntegerIdKey
int64ToIntegerName = varQual gHC_INTEGER_TYPE (fsLit "int64ToInteger") int64ToIntegerIdKey
plusIntegerName = varQual gHC_INTEGER_TYPE (fsLit "plusInteger") plusIntegerIdKey
timesIntegerName = varQual gHC_INTEGER_TYPE (fsLit "timesInteger") timesIntegerIdKey
smallIntegerName = varQual gHC_INTEGER_TYPE (fsLit "smallInteger") smallIntegerIdKey
wordToIntegerName = varQual gHC_INTEGER_TYPE (fsLit "wordToInteger") wordToIntegerIdKey
integerToWordName = varQual gHC_INTEGER_TYPE (fsLit "integerToWord") integerToWordIdKey
integerToIntName = varQual gHC_INTEGER_TYPE (fsLit "integerToInt") integerToIntIdKey
minusIntegerName = varQual gHC_INTEGER_TYPE (fsLit "minusInteger") minusIntegerIdKey
negateIntegerName = varQual gHC_INTEGER_TYPE (fsLit "negateInteger") negateIntegerIdKey
eqIntegerPrimName = varQual gHC_INTEGER_TYPE (fsLit "eqInteger#") eqIntegerPrimIdKey
neqIntegerPrimName = varQual gHC_INTEGER_TYPE (fsLit "neqInteger#") neqIntegerPrimIdKey
absIntegerName = varQual gHC_INTEGER_TYPE (fsLit "absInteger") absIntegerIdKey
signumIntegerName = varQual gHC_INTEGER_TYPE (fsLit "signumInteger") signumIntegerIdKey
leIntegerPrimName = varQual gHC_INTEGER_TYPE (fsLit "leInteger#") leIntegerPrimIdKey
gtIntegerPrimName = varQual gHC_INTEGER_TYPE (fsLit "gtInteger#") gtIntegerPrimIdKey
ltIntegerPrimName = varQual gHC_INTEGER_TYPE (fsLit "ltInteger#") ltIntegerPrimIdKey
geIntegerPrimName = varQual gHC_INTEGER_TYPE (fsLit "geInteger#") geIntegerPrimIdKey
compareIntegerName = varQual gHC_INTEGER_TYPE (fsLit "compareInteger") compareIntegerIdKey
quotRemIntegerName = varQual gHC_INTEGER_TYPE (fsLit "quotRemInteger") quotRemIntegerIdKey
divModIntegerName = varQual gHC_INTEGER_TYPE (fsLit "divModInteger") divModIntegerIdKey
quotIntegerName = varQual gHC_INTEGER_TYPE (fsLit "quotInteger") quotIntegerIdKey
remIntegerName = varQual gHC_INTEGER_TYPE (fsLit "remInteger") remIntegerIdKey
divIntegerName = varQual gHC_INTEGER_TYPE (fsLit "divInteger") divIntegerIdKey
modIntegerName = varQual gHC_INTEGER_TYPE (fsLit "modInteger") modIntegerIdKey
floatFromIntegerName = varQual gHC_INTEGER_TYPE (fsLit "floatFromInteger") floatFromIntegerIdKey
doubleFromIntegerName = varQual gHC_INTEGER_TYPE (fsLit "doubleFromInteger") doubleFromIntegerIdKey
encodeFloatIntegerName = varQual gHC_INTEGER_TYPE (fsLit "encodeFloatInteger") encodeFloatIntegerIdKey
encodeDoubleIntegerName = varQual gHC_INTEGER_TYPE (fsLit "encodeDoubleInteger") encodeDoubleIntegerIdKey
decodeDoubleIntegerName = varQual gHC_INTEGER_TYPE (fsLit "decodeDoubleInteger") decodeDoubleIntegerIdKey
gcdIntegerName = varQual gHC_INTEGER_TYPE (fsLit "gcdInteger") gcdIntegerIdKey
lcmIntegerName = varQual gHC_INTEGER_TYPE (fsLit "lcmInteger") lcmIntegerIdKey
andIntegerName = varQual gHC_INTEGER_TYPE (fsLit "andInteger") andIntegerIdKey
orIntegerName = varQual gHC_INTEGER_TYPE (fsLit "orInteger") orIntegerIdKey
xorIntegerName = varQual gHC_INTEGER_TYPE (fsLit "xorInteger") xorIntegerIdKey
complementIntegerName = varQual gHC_INTEGER_TYPE (fsLit "complementInteger") complementIntegerIdKey
shiftLIntegerName = varQual gHC_INTEGER_TYPE (fsLit "shiftLInteger") shiftLIntegerIdKey
shiftRIntegerName = varQual gHC_INTEGER_TYPE (fsLit "shiftRInteger") shiftRIntegerIdKey
bitIntegerName = varQual gHC_INTEGER_TYPE (fsLit "bitInteger") bitIntegerIdKey
-- GHC.Real types and classes
rationalTyConName, ratioTyConName, ratioDataConName, realClassName,
integralClassName, realFracClassName, fractionalClassName,
fromRationalName, toIntegerName, toRationalName, fromIntegralName,
realToFracName :: Name
rationalTyConName = tcQual gHC_REAL (fsLit "Rational") rationalTyConKey
ratioTyConName = tcQual gHC_REAL (fsLit "Ratio") ratioTyConKey
ratioDataConName = dcQual gHC_REAL (fsLit ":%") ratioDataConKey
realClassName = clsQual gHC_REAL (fsLit "Real") realClassKey
integralClassName = clsQual gHC_REAL (fsLit "Integral") integralClassKey
realFracClassName = clsQual gHC_REAL (fsLit "RealFrac") realFracClassKey
fractionalClassName = clsQual gHC_REAL (fsLit "Fractional") fractionalClassKey
fromRationalName = varQual gHC_REAL (fsLit "fromRational") fromRationalClassOpKey
toIntegerName = varQual gHC_REAL (fsLit "toInteger") toIntegerClassOpKey
toRationalName = varQual gHC_REAL (fsLit "toRational") toRationalClassOpKey
fromIntegralName = varQual gHC_REAL (fsLit "fromIntegral")fromIntegralIdKey
realToFracName = varQual gHC_REAL (fsLit "realToFrac") realToFracIdKey
-- PrelFloat classes
floatingClassName, realFloatClassName :: Name
floatingClassName = clsQual gHC_FLOAT (fsLit "Floating") floatingClassKey
realFloatClassName = clsQual gHC_FLOAT (fsLit "RealFloat") realFloatClassKey
-- other GHC.Float functions
rationalToFloatName, rationalToDoubleName :: Name
rationalToFloatName = varQual gHC_FLOAT (fsLit "rationalToFloat") rationalToFloatIdKey
rationalToDoubleName = varQual gHC_FLOAT (fsLit "rationalToDouble") rationalToDoubleIdKey
-- Class Ix
ixClassName :: Name
ixClassName = clsQual gHC_ARR (fsLit "Ix") ixClassKey
-- Typeable representation types
trModuleTyConName
, trModuleDataConName
, trNameTyConName
, trNameSDataConName
, trNameDDataConName
, trTyConTyConName
, trTyConDataConName
:: Name
trModuleTyConName = tcQual gHC_TYPES (fsLit "Module") trModuleTyConKey
trModuleDataConName = dcQual gHC_TYPES (fsLit "Module") trModuleDataConKey
trNameTyConName = tcQual gHC_TYPES (fsLit "TrName") trNameTyConKey
trNameSDataConName = dcQual gHC_TYPES (fsLit "TrNameS") trNameSDataConKey
trNameDDataConName = dcQual gHC_TYPES (fsLit "TrNameD") trNameDDataConKey
trTyConTyConName = tcQual gHC_TYPES (fsLit "TyCon") trTyConTyConKey
trTyConDataConName = dcQual gHC_TYPES (fsLit "TyCon") trTyConDataConKey
-- Class Typeable, and functions for constructing `Typeable` dictionaries
typeableClassName
, typeRepTyConName
, mkPolyTyConAppName
, mkAppTyName
, typeRepIdName
, typeNatTypeRepName
, typeSymbolTypeRepName
, trGhcPrimModuleName
:: Name
typeableClassName = clsQual tYPEABLE_INTERNAL (fsLit "Typeable") typeableClassKey
typeRepTyConName = tcQual tYPEABLE_INTERNAL (fsLit "TypeRep") typeRepTyConKey
typeRepIdName = varQual tYPEABLE_INTERNAL (fsLit "typeRep#") typeRepIdKey
mkPolyTyConAppName = varQual tYPEABLE_INTERNAL (fsLit "mkPolyTyConApp") mkPolyTyConAppKey
mkAppTyName = varQual tYPEABLE_INTERNAL (fsLit "mkAppTy") mkAppTyKey
typeNatTypeRepName = varQual tYPEABLE_INTERNAL (fsLit "typeNatTypeRep") typeNatTypeRepKey
typeSymbolTypeRepName = varQual tYPEABLE_INTERNAL (fsLit "typeSymbolTypeRep") typeSymbolTypeRepKey
-- this is the Typeable 'Module' for GHC.Prim (which has no code, so we place in GHC.Types)
-- See Note [Grand plan for Typeable] in TcTypeable.
trGhcPrimModuleName = varQual gHC_TYPES (fsLit "tr$ModuleGHCPrim") trGhcPrimModuleKey
-- Custom type errors
errorMessageTypeErrorFamName
, typeErrorTextDataConName
, typeErrorAppendDataConName
, typeErrorVAppendDataConName
, typeErrorShowTypeDataConName
:: Name
errorMessageTypeErrorFamName =
tcQual gHC_TYPELITS (fsLit "TypeError") errorMessageTypeErrorFamKey
typeErrorTextDataConName =
dcQual gHC_TYPELITS (fsLit "Text") typeErrorTextDataConKey
typeErrorAppendDataConName =
dcQual gHC_TYPELITS (fsLit ":<>:") typeErrorAppendDataConKey
typeErrorVAppendDataConName =
dcQual gHC_TYPELITS (fsLit ":$$:") typeErrorVAppendDataConKey
typeErrorShowTypeDataConName =
dcQual gHC_TYPELITS (fsLit "ShowType") typeErrorShowTypeDataConKey
-- Dynamic
toDynName :: Name
toDynName = varQual dYNAMIC (fsLit "toDyn") toDynIdKey
-- Class Data
dataClassName :: Name
dataClassName = clsQual gENERICS (fsLit "Data") dataClassKey
-- Error module
assertErrorName :: Name
assertErrorName = varQual gHC_IO_Exception (fsLit "assertError") assertErrorIdKey
-- Enum module (Enum, Bounded)
enumClassName, enumFromName, enumFromToName, enumFromThenName,
enumFromThenToName, boundedClassName :: Name
enumClassName = clsQual gHC_ENUM (fsLit "Enum") enumClassKey
enumFromName = varQual gHC_ENUM (fsLit "enumFrom") enumFromClassOpKey
enumFromToName = varQual gHC_ENUM (fsLit "enumFromTo") enumFromToClassOpKey
enumFromThenName = varQual gHC_ENUM (fsLit "enumFromThen") enumFromThenClassOpKey
enumFromThenToName = varQual gHC_ENUM (fsLit "enumFromThenTo") enumFromThenToClassOpKey
boundedClassName = clsQual gHC_ENUM (fsLit "Bounded") boundedClassKey
-- List functions
concatName, filterName, zipName :: Name
concatName = varQual gHC_LIST (fsLit "concat") concatIdKey
filterName = varQual gHC_LIST (fsLit "filter") filterIdKey
zipName = varQual gHC_LIST (fsLit "zip") zipIdKey
-- Overloaded lists
isListClassName, fromListName, fromListNName, toListName :: Name
isListClassName = clsQual gHC_EXTS (fsLit "IsList") isListClassKey
fromListName = varQual gHC_EXTS (fsLit "fromList") fromListClassOpKey
fromListNName = varQual gHC_EXTS (fsLit "fromListN") fromListNClassOpKey
toListName = varQual gHC_EXTS (fsLit "toList") toListClassOpKey
-- Class Show
showClassName :: Name
showClassName = clsQual gHC_SHOW (fsLit "Show") showClassKey
-- Class Read
readClassName :: Name
readClassName = clsQual gHC_READ (fsLit "Read") readClassKey
-- Classes Generic and Generic1, Datatype, Constructor and Selector
genClassName, gen1ClassName, datatypeClassName, constructorClassName,
selectorClassName :: Name
genClassName = clsQual gHC_GENERICS (fsLit "Generic") genClassKey
gen1ClassName = clsQual gHC_GENERICS (fsLit "Generic1") gen1ClassKey
datatypeClassName = clsQual gHC_GENERICS (fsLit "Datatype") datatypeClassKey
constructorClassName = clsQual gHC_GENERICS (fsLit "Constructor") constructorClassKey
selectorClassName = clsQual gHC_GENERICS (fsLit "Selector") selectorClassKey
genericClassNames :: [Name]
genericClassNames = [genClassName, gen1ClassName]
-- GHCi things
ghciIoClassName, ghciStepIoMName :: Name
ghciIoClassName = clsQual gHC_GHCI (fsLit "GHCiSandboxIO") ghciIoClassKey
ghciStepIoMName = varQual gHC_GHCI (fsLit "ghciStepIO") ghciStepIoMClassOpKey
-- IO things
ioTyConName, ioDataConName,
thenIOName, bindIOName, returnIOName, failIOName :: Name
ioTyConName = tcQual gHC_TYPES (fsLit "IO") ioTyConKey
ioDataConName = dcQual gHC_TYPES (fsLit "IO") ioDataConKey
thenIOName = varQual gHC_BASE (fsLit "thenIO") thenIOIdKey
bindIOName = varQual gHC_BASE (fsLit "bindIO") bindIOIdKey
returnIOName = varQual gHC_BASE (fsLit "returnIO") returnIOIdKey
failIOName = varQual gHC_IO (fsLit "failIO") failIOIdKey
-- IO things
printName :: Name
printName = varQual sYSTEM_IO (fsLit "print") printIdKey
-- Int, Word, and Addr things
int8TyConName, int16TyConName, int32TyConName, int64TyConName :: Name
int8TyConName = tcQual gHC_INT (fsLit "Int8") int8TyConKey
int16TyConName = tcQual gHC_INT (fsLit "Int16") int16TyConKey
int32TyConName = tcQual gHC_INT (fsLit "Int32") int32TyConKey
int64TyConName = tcQual gHC_INT (fsLit "Int64") int64TyConKey
-- Word module
word16TyConName, word32TyConName, word64TyConName :: Name
word16TyConName = tcQual gHC_WORD (fsLit "Word16") word16TyConKey
word32TyConName = tcQual gHC_WORD (fsLit "Word32") word32TyConKey
word64TyConName = tcQual gHC_WORD (fsLit "Word64") word64TyConKey
-- PrelPtr module
ptrTyConName, funPtrTyConName :: Name
ptrTyConName = tcQual gHC_PTR (fsLit "Ptr") ptrTyConKey
funPtrTyConName = tcQual gHC_PTR (fsLit "FunPtr") funPtrTyConKey
-- Foreign objects and weak pointers
stablePtrTyConName, newStablePtrName :: Name
stablePtrTyConName = tcQual gHC_STABLE (fsLit "StablePtr") stablePtrTyConKey
newStablePtrName = varQual gHC_STABLE (fsLit "newStablePtr") newStablePtrIdKey
-- Recursive-do notation
monadFixClassName, mfixName :: Name
monadFixClassName = clsQual mONAD_FIX (fsLit "MonadFix") monadFixClassKey
mfixName = varQual mONAD_FIX (fsLit "mfix") mfixIdKey
-- Arrow notation
arrAName, composeAName, firstAName, appAName, choiceAName, loopAName :: Name
arrAName = varQual aRROW (fsLit "arr") arrAIdKey
composeAName = varQual gHC_DESUGAR (fsLit ">>>") composeAIdKey
firstAName = varQual aRROW (fsLit "first") firstAIdKey
appAName = varQual aRROW (fsLit "app") appAIdKey
choiceAName = varQual aRROW (fsLit "|||") choiceAIdKey
loopAName = varQual aRROW (fsLit "loop") loopAIdKey
-- Monad comprehensions
guardMName, liftMName, mzipName :: Name
guardMName = varQual mONAD (fsLit "guard") guardMIdKey
liftMName = varQual mONAD (fsLit "liftM") liftMIdKey
mzipName = varQual mONAD_ZIP (fsLit "mzip") mzipIdKey
-- Annotation type checking
toAnnotationWrapperName :: Name
toAnnotationWrapperName = varQual gHC_DESUGAR (fsLit "toAnnotationWrapper") toAnnotationWrapperIdKey
-- Other classes, needed for type defaulting
monadPlusClassName, randomClassName, randomGenClassName,
isStringClassName :: Name
monadPlusClassName = clsQual mONAD (fsLit "MonadPlus") monadPlusClassKey
randomClassName = clsQual rANDOM (fsLit "Random") randomClassKey
randomGenClassName = clsQual rANDOM (fsLit "RandomGen") randomGenClassKey
isStringClassName = clsQual dATA_STRING (fsLit "IsString") isStringClassKey
-- Type-level naturals
knownNatClassName :: Name
knownNatClassName = clsQual gHC_TYPELITS (fsLit "KnownNat") knownNatClassNameKey
knownSymbolClassName :: Name
knownSymbolClassName = clsQual gHC_TYPELITS (fsLit "KnownSymbol") knownSymbolClassNameKey
-- Overloaded labels
isLabelClassName :: Name
isLabelClassName
= clsQual gHC_OVER_LABELS (fsLit "IsLabel") isLabelClassNameKey
-- Implicit Parameters
ipClassName :: Name
ipClassName
= clsQual gHC_CLASSES (fsLit "IP") ipClassKey
-- Source Locations
callStackTyConName, emptyCallStackName, pushCallStackName,
srcLocDataConName :: Name
callStackTyConName
= tcQual gHC_STACK_TYPES (fsLit "CallStack") callStackTyConKey
emptyCallStackName
= varQual gHC_STACK_TYPES (fsLit "emptyCallStack") emptyCallStackKey
pushCallStackName
= varQual gHC_STACK_TYPES (fsLit "pushCallStack") pushCallStackKey
srcLocDataConName
= dcQual gHC_STACK_TYPES (fsLit "SrcLoc") srcLocDataConKey
-- plugins
pLUGINS :: Module
pLUGINS = mkThisGhcModule (fsLit "Plugins")
pluginTyConName :: Name
pluginTyConName = tcQual pLUGINS (fsLit "Plugin") pluginTyConKey
frontendPluginTyConName :: Name
frontendPluginTyConName = tcQual pLUGINS (fsLit "FrontendPlugin") frontendPluginTyConKey
-- Static pointers
staticPtrInfoTyConName :: Name
staticPtrInfoTyConName =
tcQual gHC_STATICPTR (fsLit "StaticPtrInfo") staticPtrInfoTyConKey
staticPtrInfoDataConName :: Name
staticPtrInfoDataConName =
dcQual gHC_STATICPTR (fsLit "StaticPtrInfo") staticPtrInfoDataConKey
staticPtrTyConName :: Name
staticPtrTyConName =
tcQual gHC_STATICPTR (fsLit "StaticPtr") staticPtrTyConKey
staticPtrDataConName :: Name
staticPtrDataConName =
dcQual gHC_STATICPTR (fsLit "StaticPtr") staticPtrDataConKey
fromStaticPtrName :: Name
fromStaticPtrName =
varQual gHC_STATICPTR (fsLit "fromStaticPtr") fromStaticPtrClassOpKey
fingerprintDataConName :: Name
fingerprintDataConName =
dcQual gHC_FINGERPRINT_TYPE (fsLit "Fingerprint") fingerprintDataConKey
-- homogeneous equality. See Note [The equality types story] in TysPrim
eqTyConName :: Name
eqTyConName = tcQual dATA_TYPE_EQUALITY (fsLit "~") eqTyConKey
{-
************************************************************************
* *
\subsection{Local helpers}
* *
************************************************************************
All these are original names; hence mkOrig
-}
varQual, tcQual, clsQual, dcQual :: Module -> FastString -> Unique -> Name
varQual = mk_known_key_name varName
tcQual = mk_known_key_name tcName
clsQual = mk_known_key_name clsName
dcQual = mk_known_key_name dataName
mk_known_key_name :: NameSpace -> Module -> FastString -> Unique -> Name
mk_known_key_name space modu str unique
= mkExternalName unique modu (mkOccNameFS space str) noSrcSpan
{-
************************************************************************
* *
\subsubsection[Uniques-prelude-Classes]{@Uniques@ for wired-in @Classes@}
* *
************************************************************************
--MetaHaskell extension hand allocate keys here
-}
boundedClassKey, enumClassKey, eqClassKey, floatingClassKey,
fractionalClassKey, integralClassKey, monadClassKey, dataClassKey,
functorClassKey, numClassKey, ordClassKey, readClassKey, realClassKey,
realFloatClassKey, realFracClassKey, showClassKey, ixClassKey :: Unique
boundedClassKey = mkPreludeClassUnique 1
enumClassKey = mkPreludeClassUnique 2
eqClassKey = mkPreludeClassUnique 3
floatingClassKey = mkPreludeClassUnique 5
fractionalClassKey = mkPreludeClassUnique 6
integralClassKey = mkPreludeClassUnique 7
monadClassKey = mkPreludeClassUnique 8
dataClassKey = mkPreludeClassUnique 9
functorClassKey = mkPreludeClassUnique 10
numClassKey = mkPreludeClassUnique 11
ordClassKey = mkPreludeClassUnique 12
readClassKey = mkPreludeClassUnique 13
realClassKey = mkPreludeClassUnique 14
realFloatClassKey = mkPreludeClassUnique 15
realFracClassKey = mkPreludeClassUnique 16
showClassKey = mkPreludeClassUnique 17
ixClassKey = mkPreludeClassUnique 18
typeableClassKey, typeable1ClassKey, typeable2ClassKey, typeable3ClassKey,
typeable4ClassKey, typeable5ClassKey, typeable6ClassKey, typeable7ClassKey
:: Unique
typeableClassKey = mkPreludeClassUnique 20
typeable1ClassKey = mkPreludeClassUnique 21
typeable2ClassKey = mkPreludeClassUnique 22
typeable3ClassKey = mkPreludeClassUnique 23
typeable4ClassKey = mkPreludeClassUnique 24
typeable5ClassKey = mkPreludeClassUnique 25
typeable6ClassKey = mkPreludeClassUnique 26
typeable7ClassKey = mkPreludeClassUnique 27
monadFixClassKey :: Unique
monadFixClassKey = mkPreludeClassUnique 28
monadFailClassKey :: Unique
monadFailClassKey = mkPreludeClassUnique 29
monadPlusClassKey, randomClassKey, randomGenClassKey :: Unique
monadPlusClassKey = mkPreludeClassUnique 30
randomClassKey = mkPreludeClassUnique 31
randomGenClassKey = mkPreludeClassUnique 32
isStringClassKey :: Unique
isStringClassKey = mkPreludeClassUnique 33
applicativeClassKey, foldableClassKey, traversableClassKey :: Unique
applicativeClassKey = mkPreludeClassUnique 34
foldableClassKey = mkPreludeClassUnique 35
traversableClassKey = mkPreludeClassUnique 36
genClassKey, gen1ClassKey, datatypeClassKey, constructorClassKey,
selectorClassKey :: Unique
genClassKey = mkPreludeClassUnique 37
gen1ClassKey = mkPreludeClassUnique 38
datatypeClassKey = mkPreludeClassUnique 39
constructorClassKey = mkPreludeClassUnique 40
selectorClassKey = mkPreludeClassUnique 41
-- KnownNat: see Note [KnowNat & KnownSymbol and EvLit] in TcEvidence
knownNatClassNameKey :: Unique
knownNatClassNameKey = mkPreludeClassUnique 42
-- KnownSymbol: see Note [KnownNat & KnownSymbol and EvLit] in TcEvidence
knownSymbolClassNameKey :: Unique
knownSymbolClassNameKey = mkPreludeClassUnique 43
ghciIoClassKey :: Unique
ghciIoClassKey = mkPreludeClassUnique 44
isLabelClassNameKey :: Unique
isLabelClassNameKey = mkPreludeClassUnique 45
semigroupClassKey, monoidClassKey :: Unique
semigroupClassKey = mkPreludeClassUnique 46
monoidClassKey = mkPreludeClassUnique 47
-- Implicit Parameters
ipClassKey :: Unique
ipClassKey = mkPreludeClassUnique 48
---------------- Template Haskell -------------------
-- THNames.hs: USES ClassUniques 200-299
-----------------------------------------------------
{-
************************************************************************
* *
\subsubsection[Uniques-prelude-TyCons]{@Uniques@ for wired-in @TyCons@}
* *
************************************************************************
-}
addrPrimTyConKey, arrayPrimTyConKey, arrayArrayPrimTyConKey, boolTyConKey,
byteArrayPrimTyConKey, charPrimTyConKey, charTyConKey, doublePrimTyConKey,
doubleTyConKey, floatPrimTyConKey, floatTyConKey, funTyConKey,
intPrimTyConKey, intTyConKey, int8TyConKey, int16TyConKey,
int32PrimTyConKey, int32TyConKey, int64PrimTyConKey, int64TyConKey,
integerTyConKey, listTyConKey, foreignObjPrimTyConKey, maybeTyConKey,
weakPrimTyConKey, mutableArrayPrimTyConKey, mutableArrayArrayPrimTyConKey,
mutableByteArrayPrimTyConKey, orderingTyConKey, mVarPrimTyConKey,
ratioTyConKey, rationalTyConKey, realWorldTyConKey, stablePtrPrimTyConKey,
stablePtrTyConKey, eqTyConKey, heqTyConKey,
smallArrayPrimTyConKey, smallMutableArrayPrimTyConKey :: Unique
addrPrimTyConKey = mkPreludeTyConUnique 1
arrayPrimTyConKey = mkPreludeTyConUnique 3
boolTyConKey = mkPreludeTyConUnique 4
byteArrayPrimTyConKey = mkPreludeTyConUnique 5
charPrimTyConKey = mkPreludeTyConUnique 7
charTyConKey = mkPreludeTyConUnique 8
doublePrimTyConKey = mkPreludeTyConUnique 9
doubleTyConKey = mkPreludeTyConUnique 10
floatPrimTyConKey = mkPreludeTyConUnique 11
floatTyConKey = mkPreludeTyConUnique 12
funTyConKey = mkPreludeTyConUnique 13
intPrimTyConKey = mkPreludeTyConUnique 14
intTyConKey = mkPreludeTyConUnique 15
int8TyConKey = mkPreludeTyConUnique 16
int16TyConKey = mkPreludeTyConUnique 17
int32PrimTyConKey = mkPreludeTyConUnique 18
int32TyConKey = mkPreludeTyConUnique 19
int64PrimTyConKey = mkPreludeTyConUnique 20
int64TyConKey = mkPreludeTyConUnique 21
integerTyConKey = mkPreludeTyConUnique 22
listTyConKey = mkPreludeTyConUnique 24
foreignObjPrimTyConKey = mkPreludeTyConUnique 25
maybeTyConKey = mkPreludeTyConUnique 26
weakPrimTyConKey = mkPreludeTyConUnique 27
mutableArrayPrimTyConKey = mkPreludeTyConUnique 28
mutableByteArrayPrimTyConKey = mkPreludeTyConUnique 29
orderingTyConKey = mkPreludeTyConUnique 30
mVarPrimTyConKey = mkPreludeTyConUnique 31
ratioTyConKey = mkPreludeTyConUnique 32
rationalTyConKey = mkPreludeTyConUnique 33
realWorldTyConKey = mkPreludeTyConUnique 34
stablePtrPrimTyConKey = mkPreludeTyConUnique 35
stablePtrTyConKey = mkPreludeTyConUnique 36
eqTyConKey = mkPreludeTyConUnique 38
heqTyConKey = mkPreludeTyConUnique 39
arrayArrayPrimTyConKey = mkPreludeTyConUnique 40
mutableArrayArrayPrimTyConKey = mkPreludeTyConUnique 41
statePrimTyConKey, stableNamePrimTyConKey, stableNameTyConKey,
mutVarPrimTyConKey, ioTyConKey,
wordPrimTyConKey, wordTyConKey, word8TyConKey, word16TyConKey,
word32PrimTyConKey, word32TyConKey, word64PrimTyConKey, word64TyConKey,
liftedConKey, unliftedConKey, anyBoxConKey, kindConKey, boxityConKey,
typeConKey, threadIdPrimTyConKey, bcoPrimTyConKey, ptrTyConKey,
funPtrTyConKey, tVarPrimTyConKey, eqPrimTyConKey,
eqReprPrimTyConKey, eqPhantPrimTyConKey, voidPrimTyConKey,
compactPrimTyConKey :: Unique
statePrimTyConKey = mkPreludeTyConUnique 50
stableNamePrimTyConKey = mkPreludeTyConUnique 51
stableNameTyConKey = mkPreludeTyConUnique 52
eqPrimTyConKey = mkPreludeTyConUnique 53
eqReprPrimTyConKey = mkPreludeTyConUnique 54
eqPhantPrimTyConKey = mkPreludeTyConUnique 55
mutVarPrimTyConKey = mkPreludeTyConUnique 56
ioTyConKey = mkPreludeTyConUnique 57
voidPrimTyConKey = mkPreludeTyConUnique 58
wordPrimTyConKey = mkPreludeTyConUnique 59
wordTyConKey = mkPreludeTyConUnique 60
word8TyConKey = mkPreludeTyConUnique 61
word16TyConKey = mkPreludeTyConUnique 62
word32PrimTyConKey = mkPreludeTyConUnique 63
word32TyConKey = mkPreludeTyConUnique 64
word64PrimTyConKey = mkPreludeTyConUnique 65
word64TyConKey = mkPreludeTyConUnique 66
liftedConKey = mkPreludeTyConUnique 67
unliftedConKey = mkPreludeTyConUnique 68
anyBoxConKey = mkPreludeTyConUnique 69
kindConKey = mkPreludeTyConUnique 70
boxityConKey = mkPreludeTyConUnique 71
typeConKey = mkPreludeTyConUnique 72
threadIdPrimTyConKey = mkPreludeTyConUnique 73
bcoPrimTyConKey = mkPreludeTyConUnique 74
ptrTyConKey = mkPreludeTyConUnique 75
funPtrTyConKey = mkPreludeTyConUnique 76
tVarPrimTyConKey = mkPreludeTyConUnique 77
compactPrimTyConKey = mkPreludeTyConUnique 78
-- Parallel array type constructor
parrTyConKey :: Unique
parrTyConKey = mkPreludeTyConUnique 82
-- dotnet interop
objectTyConKey :: Unique
objectTyConKey = mkPreludeTyConUnique 83
eitherTyConKey :: Unique
eitherTyConKey = mkPreludeTyConUnique 84
-- Kind constructors
liftedTypeKindTyConKey, tYPETyConKey,
unliftedTypeKindTyConKey, constraintKindTyConKey,
starKindTyConKey, unicodeStarKindTyConKey, runtimeRepTyConKey,
vecCountTyConKey, vecElemTyConKey :: Unique
liftedTypeKindTyConKey = mkPreludeTyConUnique 87
tYPETyConKey = mkPreludeTyConUnique 88
unliftedTypeKindTyConKey = mkPreludeTyConUnique 89
constraintKindTyConKey = mkPreludeTyConUnique 92
starKindTyConKey = mkPreludeTyConUnique 93
unicodeStarKindTyConKey = mkPreludeTyConUnique 94
runtimeRepTyConKey = mkPreludeTyConUnique 95
vecCountTyConKey = mkPreludeTyConUnique 96
vecElemTyConKey = mkPreludeTyConUnique 97
pluginTyConKey, frontendPluginTyConKey :: Unique
pluginTyConKey = mkPreludeTyConUnique 102
frontendPluginTyConKey = mkPreludeTyConUnique 103
unknownTyConKey, unknown1TyConKey, unknown2TyConKey, unknown3TyConKey,
opaqueTyConKey :: Unique
unknownTyConKey = mkPreludeTyConUnique 129
unknown1TyConKey = mkPreludeTyConUnique 130
unknown2TyConKey = mkPreludeTyConUnique 131
unknown3TyConKey = mkPreludeTyConUnique 132
opaqueTyConKey = mkPreludeTyConUnique 133
-- Generics (Unique keys)
v1TyConKey, u1TyConKey, par1TyConKey, rec1TyConKey,
k1TyConKey, m1TyConKey, sumTyConKey, prodTyConKey,
compTyConKey, rTyConKey, dTyConKey,
cTyConKey, sTyConKey, rec0TyConKey,
d1TyConKey, c1TyConKey, s1TyConKey, noSelTyConKey,
repTyConKey, rep1TyConKey, uRecTyConKey,
uAddrTyConKey, uCharTyConKey, uDoubleTyConKey,
uFloatTyConKey, uIntTyConKey, uWordTyConKey :: Unique
v1TyConKey = mkPreludeTyConUnique 135
u1TyConKey = mkPreludeTyConUnique 136
par1TyConKey = mkPreludeTyConUnique 137
rec1TyConKey = mkPreludeTyConUnique 138
k1TyConKey = mkPreludeTyConUnique 139
m1TyConKey = mkPreludeTyConUnique 140
sumTyConKey = mkPreludeTyConUnique 141
prodTyConKey = mkPreludeTyConUnique 142
compTyConKey = mkPreludeTyConUnique 143
rTyConKey = mkPreludeTyConUnique 144
dTyConKey = mkPreludeTyConUnique 146
cTyConKey = mkPreludeTyConUnique 147
sTyConKey = mkPreludeTyConUnique 148
rec0TyConKey = mkPreludeTyConUnique 149
d1TyConKey = mkPreludeTyConUnique 151
c1TyConKey = mkPreludeTyConUnique 152
s1TyConKey = mkPreludeTyConUnique 153
noSelTyConKey = mkPreludeTyConUnique 154
repTyConKey = mkPreludeTyConUnique 155
rep1TyConKey = mkPreludeTyConUnique 156
uRecTyConKey = mkPreludeTyConUnique 157
uAddrTyConKey = mkPreludeTyConUnique 158
uCharTyConKey = mkPreludeTyConUnique 159
uDoubleTyConKey = mkPreludeTyConUnique 160
uFloatTyConKey = mkPreludeTyConUnique 161
uIntTyConKey = mkPreludeTyConUnique 162
uWordTyConKey = mkPreludeTyConUnique 163
-- Type-level naturals
typeNatKindConNameKey, typeSymbolKindConNameKey,
typeNatAddTyFamNameKey, typeNatMulTyFamNameKey, typeNatExpTyFamNameKey,
typeNatLeqTyFamNameKey, typeNatSubTyFamNameKey
, typeSymbolCmpTyFamNameKey, typeNatCmpTyFamNameKey
:: Unique
typeNatKindConNameKey = mkPreludeTyConUnique 164
typeSymbolKindConNameKey = mkPreludeTyConUnique 165
typeNatAddTyFamNameKey = mkPreludeTyConUnique 166
typeNatMulTyFamNameKey = mkPreludeTyConUnique 167
typeNatExpTyFamNameKey = mkPreludeTyConUnique 168
typeNatLeqTyFamNameKey = mkPreludeTyConUnique 169
typeNatSubTyFamNameKey = mkPreludeTyConUnique 170
typeSymbolCmpTyFamNameKey = mkPreludeTyConUnique 171
typeNatCmpTyFamNameKey = mkPreludeTyConUnique 172
-- Custom user type-errors
errorMessageTypeErrorFamKey :: Unique
errorMessageTypeErrorFamKey = mkPreludeTyConUnique 173
ntTyConKey:: Unique
ntTyConKey = mkPreludeTyConUnique 174
coercibleTyConKey :: Unique
coercibleTyConKey = mkPreludeTyConUnique 175
proxyPrimTyConKey :: Unique
proxyPrimTyConKey = mkPreludeTyConUnique 176
specTyConKey :: Unique
specTyConKey = mkPreludeTyConUnique 177
anyTyConKey :: Unique
anyTyConKey = mkPreludeTyConUnique 178
smallArrayPrimTyConKey = mkPreludeTyConUnique 179
smallMutableArrayPrimTyConKey = mkPreludeTyConUnique 180
staticPtrTyConKey :: Unique
staticPtrTyConKey = mkPreludeTyConUnique 181
staticPtrInfoTyConKey :: Unique
staticPtrInfoTyConKey = mkPreludeTyConUnique 182
callStackTyConKey :: Unique
callStackTyConKey = mkPreludeTyConUnique 183
-- Typeables
typeRepTyConKey :: Unique
typeRepTyConKey = mkPreludeTyConUnique 184
---------------- Template Haskell -------------------
-- THNames.hs: USES TyConUniques 200-299
-----------------------------------------------------
----------------------- SIMD ------------------------
-- USES TyConUniques 300-399
-----------------------------------------------------
#include "primop-vector-uniques.hs-incl"
{-
************************************************************************
* *
\subsubsection[Uniques-prelude-DataCons]{@Uniques@ for wired-in @DataCons@}
* *
************************************************************************
-}
charDataConKey, consDataConKey, doubleDataConKey, falseDataConKey,
floatDataConKey, intDataConKey, integerSDataConKey, nilDataConKey,
ratioDataConKey, stableNameDataConKey, trueDataConKey, wordDataConKey,
word8DataConKey, ioDataConKey, integerDataConKey, heqDataConKey,
coercibleDataConKey, nothingDataConKey, justDataConKey :: Unique
charDataConKey = mkPreludeDataConUnique 1
consDataConKey = mkPreludeDataConUnique 2
doubleDataConKey = mkPreludeDataConUnique 3
falseDataConKey = mkPreludeDataConUnique 4
floatDataConKey = mkPreludeDataConUnique 5
intDataConKey = mkPreludeDataConUnique 6
integerSDataConKey = mkPreludeDataConUnique 7
nothingDataConKey = mkPreludeDataConUnique 8
justDataConKey = mkPreludeDataConUnique 9
nilDataConKey = mkPreludeDataConUnique 11
ratioDataConKey = mkPreludeDataConUnique 12
word8DataConKey = mkPreludeDataConUnique 13
stableNameDataConKey = mkPreludeDataConUnique 14
trueDataConKey = mkPreludeDataConUnique 15
wordDataConKey = mkPreludeDataConUnique 16
ioDataConKey = mkPreludeDataConUnique 17
integerDataConKey = mkPreludeDataConUnique 18
heqDataConKey = mkPreludeDataConUnique 19
-- Generic data constructors
crossDataConKey, inlDataConKey, inrDataConKey, genUnitDataConKey :: Unique
crossDataConKey = mkPreludeDataConUnique 20
inlDataConKey = mkPreludeDataConUnique 21
inrDataConKey = mkPreludeDataConUnique 22
genUnitDataConKey = mkPreludeDataConUnique 23
-- Data constructor for parallel arrays
parrDataConKey :: Unique
parrDataConKey = mkPreludeDataConUnique 24
leftDataConKey, rightDataConKey :: Unique
leftDataConKey = mkPreludeDataConUnique 25
rightDataConKey = mkPreludeDataConUnique 26
ltDataConKey, eqDataConKey, gtDataConKey :: Unique
ltDataConKey = mkPreludeDataConUnique 27
eqDataConKey = mkPreludeDataConUnique 28
gtDataConKey = mkPreludeDataConUnique 29
coercibleDataConKey = mkPreludeDataConUnique 32
staticPtrDataConKey :: Unique
staticPtrDataConKey = mkPreludeDataConUnique 33
staticPtrInfoDataConKey :: Unique
staticPtrInfoDataConKey = mkPreludeDataConUnique 34
fingerprintDataConKey :: Unique
fingerprintDataConKey = mkPreludeDataConUnique 35
srcLocDataConKey :: Unique
srcLocDataConKey = mkPreludeDataConUnique 37
trTyConTyConKey, trTyConDataConKey,
trModuleTyConKey, trModuleDataConKey,
trNameTyConKey, trNameSDataConKey, trNameDDataConKey,
trGhcPrimModuleKey :: Unique
trTyConTyConKey = mkPreludeDataConUnique 41
trTyConDataConKey = mkPreludeDataConUnique 42
trModuleTyConKey = mkPreludeDataConUnique 43
trModuleDataConKey = mkPreludeDataConUnique 44
trNameTyConKey = mkPreludeDataConUnique 45
trNameSDataConKey = mkPreludeDataConUnique 46
trNameDDataConKey = mkPreludeDataConUnique 47
trGhcPrimModuleKey = mkPreludeDataConUnique 48
typeErrorTextDataConKey,
typeErrorAppendDataConKey,
typeErrorVAppendDataConKey,
typeErrorShowTypeDataConKey
:: Unique
typeErrorTextDataConKey = mkPreludeDataConUnique 50
typeErrorAppendDataConKey = mkPreludeDataConUnique 51
typeErrorVAppendDataConKey = mkPreludeDataConUnique 52
typeErrorShowTypeDataConKey = mkPreludeDataConUnique 53
prefixIDataConKey, infixIDataConKey, leftAssociativeDataConKey,
rightAssociativeDataConKey, notAssociativeDataConKey,
sourceUnpackDataConKey, sourceNoUnpackDataConKey,
noSourceUnpackednessDataConKey, sourceLazyDataConKey,
sourceStrictDataConKey, noSourceStrictnessDataConKey,
decidedLazyDataConKey, decidedStrictDataConKey, decidedUnpackDataConKey,
metaDataDataConKey, metaConsDataConKey, metaSelDataConKey :: Unique
prefixIDataConKey = mkPreludeDataConUnique 54
infixIDataConKey = mkPreludeDataConUnique 55
leftAssociativeDataConKey = mkPreludeDataConUnique 56
rightAssociativeDataConKey = mkPreludeDataConUnique 57
notAssociativeDataConKey = mkPreludeDataConUnique 58
sourceUnpackDataConKey = mkPreludeDataConUnique 59
sourceNoUnpackDataConKey = mkPreludeDataConUnique 60
noSourceUnpackednessDataConKey = mkPreludeDataConUnique 61
sourceLazyDataConKey = mkPreludeDataConUnique 62
sourceStrictDataConKey = mkPreludeDataConUnique 63
noSourceStrictnessDataConKey = mkPreludeDataConUnique 64
decidedLazyDataConKey = mkPreludeDataConUnique 65
decidedStrictDataConKey = mkPreludeDataConUnique 66
decidedUnpackDataConKey = mkPreludeDataConUnique 67
metaDataDataConKey = mkPreludeDataConUnique 68
metaConsDataConKey = mkPreludeDataConUnique 69
metaSelDataConKey = mkPreludeDataConUnique 70
vecRepDataConKey :: Unique
vecRepDataConKey = mkPreludeDataConUnique 71
-- See Note [Wiring in RuntimeRep] in TysWiredIn
runtimeRepSimpleDataConKeys :: [Unique]
ptrRepLiftedDataConKey, ptrRepUnliftedDataConKey :: Unique
runtimeRepSimpleDataConKeys@(
ptrRepLiftedDataConKey : ptrRepUnliftedDataConKey : _)
= map mkPreludeDataConUnique [72..83]
-- See Note [Wiring in RuntimeRep] in TysWiredIn
-- VecCount
vecCountDataConKeys :: [Unique]
vecCountDataConKeys = map mkPreludeDataConUnique [84..89]
-- See Note [Wiring in RuntimeRep] in TysWiredIn
-- VecElem
vecElemDataConKeys :: [Unique]
vecElemDataConKeys = map mkPreludeDataConUnique [90..99]
---------------- Template Haskell -------------------
-- THNames.hs: USES DataUniques 100-150
-----------------------------------------------------
{-
************************************************************************
* *
\subsubsection[Uniques-prelude-Ids]{@Uniques@ for wired-in @Ids@ (except @DataCons@)}
* *
************************************************************************
-}
wildCardKey, absentErrorIdKey, augmentIdKey, appendIdKey,
buildIdKey, errorIdKey, foldrIdKey, recSelErrorIdKey,
seqIdKey, irrefutPatErrorIdKey, eqStringIdKey,
noMethodBindingErrorIdKey, nonExhaustiveGuardsErrorIdKey,
runtimeErrorIdKey, patErrorIdKey, voidPrimIdKey,
realWorldPrimIdKey, recConErrorIdKey,
unpackCStringUtf8IdKey, unpackCStringAppendIdKey,
unpackCStringFoldrIdKey, unpackCStringIdKey,
typeErrorIdKey, divIntIdKey, modIntIdKey :: Unique
wildCardKey = mkPreludeMiscIdUnique 0 -- See Note [WildCard binders]
absentErrorIdKey = mkPreludeMiscIdUnique 1
augmentIdKey = mkPreludeMiscIdUnique 2
appendIdKey = mkPreludeMiscIdUnique 3
buildIdKey = mkPreludeMiscIdUnique 4
errorIdKey = mkPreludeMiscIdUnique 5
foldrIdKey = mkPreludeMiscIdUnique 6
recSelErrorIdKey = mkPreludeMiscIdUnique 7
seqIdKey = mkPreludeMiscIdUnique 8
irrefutPatErrorIdKey = mkPreludeMiscIdUnique 9
eqStringIdKey = mkPreludeMiscIdUnique 10
noMethodBindingErrorIdKey = mkPreludeMiscIdUnique 11
nonExhaustiveGuardsErrorIdKey = mkPreludeMiscIdUnique 12
runtimeErrorIdKey = mkPreludeMiscIdUnique 13
patErrorIdKey = mkPreludeMiscIdUnique 14
realWorldPrimIdKey = mkPreludeMiscIdUnique 15
recConErrorIdKey = mkPreludeMiscIdUnique 16
unpackCStringUtf8IdKey = mkPreludeMiscIdUnique 17
unpackCStringAppendIdKey = mkPreludeMiscIdUnique 18
unpackCStringFoldrIdKey = mkPreludeMiscIdUnique 19
unpackCStringIdKey = mkPreludeMiscIdUnique 20
voidPrimIdKey = mkPreludeMiscIdUnique 21
typeErrorIdKey = mkPreludeMiscIdUnique 22
divIntIdKey = mkPreludeMiscIdUnique 23
modIntIdKey = mkPreludeMiscIdUnique 24
unsafeCoerceIdKey, concatIdKey, filterIdKey, zipIdKey, bindIOIdKey,
returnIOIdKey, newStablePtrIdKey,
printIdKey, failIOIdKey, nullAddrIdKey, voidArgIdKey,
fstIdKey, sndIdKey, otherwiseIdKey, assertIdKey :: Unique
unsafeCoerceIdKey = mkPreludeMiscIdUnique 30
concatIdKey = mkPreludeMiscIdUnique 31
filterIdKey = mkPreludeMiscIdUnique 32
zipIdKey = mkPreludeMiscIdUnique 33
bindIOIdKey = mkPreludeMiscIdUnique 34
returnIOIdKey = mkPreludeMiscIdUnique 35
newStablePtrIdKey = mkPreludeMiscIdUnique 36
printIdKey = mkPreludeMiscIdUnique 37
failIOIdKey = mkPreludeMiscIdUnique 38
nullAddrIdKey = mkPreludeMiscIdUnique 39
voidArgIdKey = mkPreludeMiscIdUnique 40
fstIdKey = mkPreludeMiscIdUnique 41
sndIdKey = mkPreludeMiscIdUnique 42
otherwiseIdKey = mkPreludeMiscIdUnique 43
assertIdKey = mkPreludeMiscIdUnique 44
mkIntegerIdKey, smallIntegerIdKey, wordToIntegerIdKey,
integerToWordIdKey, integerToIntIdKey,
integerToWord64IdKey, integerToInt64IdKey,
word64ToIntegerIdKey, int64ToIntegerIdKey,
plusIntegerIdKey, timesIntegerIdKey, minusIntegerIdKey,
negateIntegerIdKey,
eqIntegerPrimIdKey, neqIntegerPrimIdKey, absIntegerIdKey, signumIntegerIdKey,
leIntegerPrimIdKey, gtIntegerPrimIdKey, ltIntegerPrimIdKey, geIntegerPrimIdKey,
compareIntegerIdKey, quotRemIntegerIdKey, divModIntegerIdKey,
quotIntegerIdKey, remIntegerIdKey, divIntegerIdKey, modIntegerIdKey,
floatFromIntegerIdKey, doubleFromIntegerIdKey,
encodeFloatIntegerIdKey, encodeDoubleIntegerIdKey,
decodeDoubleIntegerIdKey,
gcdIntegerIdKey, lcmIntegerIdKey,
andIntegerIdKey, orIntegerIdKey, xorIntegerIdKey, complementIntegerIdKey,
shiftLIntegerIdKey, shiftRIntegerIdKey :: Unique
mkIntegerIdKey = mkPreludeMiscIdUnique 60
smallIntegerIdKey = mkPreludeMiscIdUnique 61
integerToWordIdKey = mkPreludeMiscIdUnique 62
integerToIntIdKey = mkPreludeMiscIdUnique 63
integerToWord64IdKey = mkPreludeMiscIdUnique 64
integerToInt64IdKey = mkPreludeMiscIdUnique 65
plusIntegerIdKey = mkPreludeMiscIdUnique 66
timesIntegerIdKey = mkPreludeMiscIdUnique 67
minusIntegerIdKey = mkPreludeMiscIdUnique 68
negateIntegerIdKey = mkPreludeMiscIdUnique 69
eqIntegerPrimIdKey = mkPreludeMiscIdUnique 70
neqIntegerPrimIdKey = mkPreludeMiscIdUnique 71
absIntegerIdKey = mkPreludeMiscIdUnique 72
signumIntegerIdKey = mkPreludeMiscIdUnique 73
leIntegerPrimIdKey = mkPreludeMiscIdUnique 74
gtIntegerPrimIdKey = mkPreludeMiscIdUnique 75
ltIntegerPrimIdKey = mkPreludeMiscIdUnique 76
geIntegerPrimIdKey = mkPreludeMiscIdUnique 77
compareIntegerIdKey = mkPreludeMiscIdUnique 78
quotIntegerIdKey = mkPreludeMiscIdUnique 79
remIntegerIdKey = mkPreludeMiscIdUnique 80
divIntegerIdKey = mkPreludeMiscIdUnique 81
modIntegerIdKey = mkPreludeMiscIdUnique 82
divModIntegerIdKey = mkPreludeMiscIdUnique 83
quotRemIntegerIdKey = mkPreludeMiscIdUnique 84
floatFromIntegerIdKey = mkPreludeMiscIdUnique 85
doubleFromIntegerIdKey = mkPreludeMiscIdUnique 86
encodeFloatIntegerIdKey = mkPreludeMiscIdUnique 87
encodeDoubleIntegerIdKey = mkPreludeMiscIdUnique 88
gcdIntegerIdKey = mkPreludeMiscIdUnique 89
lcmIntegerIdKey = mkPreludeMiscIdUnique 90
andIntegerIdKey = mkPreludeMiscIdUnique 91
orIntegerIdKey = mkPreludeMiscIdUnique 92
xorIntegerIdKey = mkPreludeMiscIdUnique 93
complementIntegerIdKey = mkPreludeMiscIdUnique 94
shiftLIntegerIdKey = mkPreludeMiscIdUnique 95
shiftRIntegerIdKey = mkPreludeMiscIdUnique 96
wordToIntegerIdKey = mkPreludeMiscIdUnique 97
word64ToIntegerIdKey = mkPreludeMiscIdUnique 98
int64ToIntegerIdKey = mkPreludeMiscIdUnique 99
decodeDoubleIntegerIdKey = mkPreludeMiscIdUnique 100
rootMainKey, runMainKey :: Unique
rootMainKey = mkPreludeMiscIdUnique 101
runMainKey = mkPreludeMiscIdUnique 102
thenIOIdKey, lazyIdKey, assertErrorIdKey, oneShotKey, runRWKey :: Unique
thenIOIdKey = mkPreludeMiscIdUnique 103
lazyIdKey = mkPreludeMiscIdUnique 104
assertErrorIdKey = mkPreludeMiscIdUnique 105
oneShotKey = mkPreludeMiscIdUnique 106
runRWKey = mkPreludeMiscIdUnique 107
breakpointIdKey, breakpointCondIdKey, breakpointAutoIdKey,
breakpointJumpIdKey, breakpointCondJumpIdKey,
breakpointAutoJumpIdKey :: Unique
breakpointIdKey = mkPreludeMiscIdUnique 110
breakpointCondIdKey = mkPreludeMiscIdUnique 111
breakpointAutoIdKey = mkPreludeMiscIdUnique 112
breakpointJumpIdKey = mkPreludeMiscIdUnique 113
breakpointCondJumpIdKey = mkPreludeMiscIdUnique 114
breakpointAutoJumpIdKey = mkPreludeMiscIdUnique 115
inlineIdKey, noinlineIdKey :: Unique
inlineIdKey = mkPreludeMiscIdUnique 120
-- see below
mapIdKey, groupWithIdKey, dollarIdKey :: Unique
mapIdKey = mkPreludeMiscIdUnique 121
groupWithIdKey = mkPreludeMiscIdUnique 122
dollarIdKey = mkPreludeMiscIdUnique 123
coercionTokenIdKey :: Unique
coercionTokenIdKey = mkPreludeMiscIdUnique 124
noinlineIdKey = mkPreludeMiscIdUnique 125
rationalToFloatIdKey, rationalToDoubleIdKey :: Unique
rationalToFloatIdKey = mkPreludeMiscIdUnique 130
rationalToDoubleIdKey = mkPreludeMiscIdUnique 131
-- dotnet interop
unmarshalObjectIdKey, marshalObjectIdKey, marshalStringIdKey,
unmarshalStringIdKey, checkDotnetResNameIdKey :: Unique
unmarshalObjectIdKey = mkPreludeMiscIdUnique 150
marshalObjectIdKey = mkPreludeMiscIdUnique 151
marshalStringIdKey = mkPreludeMiscIdUnique 152
unmarshalStringIdKey = mkPreludeMiscIdUnique 153
checkDotnetResNameIdKey = mkPreludeMiscIdUnique 154
undefinedKey :: Unique
undefinedKey = mkPreludeMiscIdUnique 155
magicDictKey :: Unique
magicDictKey = mkPreludeMiscIdUnique 156
coerceKey :: Unique
coerceKey = mkPreludeMiscIdUnique 157
{-
Certain class operations from Prelude classes. They get their own
uniques so we can look them up easily when we want to conjure them up
during type checking.
-}
-- Just a placeholder for unbound variables produced by the renamer:
unboundKey :: Unique
unboundKey = mkPreludeMiscIdUnique 158
fromIntegerClassOpKey, minusClassOpKey, fromRationalClassOpKey,
enumFromClassOpKey, enumFromThenClassOpKey, enumFromToClassOpKey,
enumFromThenToClassOpKey, eqClassOpKey, geClassOpKey, negateClassOpKey,
failMClassOpKey_preMFP, bindMClassOpKey, thenMClassOpKey, returnMClassOpKey,
fmapClassOpKey
:: Unique
fromIntegerClassOpKey = mkPreludeMiscIdUnique 160
minusClassOpKey = mkPreludeMiscIdUnique 161
fromRationalClassOpKey = mkPreludeMiscIdUnique 162
enumFromClassOpKey = mkPreludeMiscIdUnique 163
enumFromThenClassOpKey = mkPreludeMiscIdUnique 164
enumFromToClassOpKey = mkPreludeMiscIdUnique 165
enumFromThenToClassOpKey = mkPreludeMiscIdUnique 166
eqClassOpKey = mkPreludeMiscIdUnique 167
geClassOpKey = mkPreludeMiscIdUnique 168
negateClassOpKey = mkPreludeMiscIdUnique 169
failMClassOpKey_preMFP = mkPreludeMiscIdUnique 170
bindMClassOpKey = mkPreludeMiscIdUnique 171 -- (>>=)
thenMClassOpKey = mkPreludeMiscIdUnique 172 -- (>>)
fmapClassOpKey = mkPreludeMiscIdUnique 173
returnMClassOpKey = mkPreludeMiscIdUnique 174
-- Recursive do notation
mfixIdKey :: Unique
mfixIdKey = mkPreludeMiscIdUnique 175
-- MonadFail operations
failMClassOpKey :: Unique
failMClassOpKey = mkPreludeMiscIdUnique 176
-- Arrow notation
arrAIdKey, composeAIdKey, firstAIdKey, appAIdKey, choiceAIdKey,
loopAIdKey :: Unique
arrAIdKey = mkPreludeMiscIdUnique 180
composeAIdKey = mkPreludeMiscIdUnique 181 -- >>>
firstAIdKey = mkPreludeMiscIdUnique 182
appAIdKey = mkPreludeMiscIdUnique 183
choiceAIdKey = mkPreludeMiscIdUnique 184 -- |||
loopAIdKey = mkPreludeMiscIdUnique 185
fromStringClassOpKey :: Unique
fromStringClassOpKey = mkPreludeMiscIdUnique 186
-- Annotation type checking
toAnnotationWrapperIdKey :: Unique
toAnnotationWrapperIdKey = mkPreludeMiscIdUnique 187
-- Conversion functions
fromIntegralIdKey, realToFracIdKey, toIntegerClassOpKey, toRationalClassOpKey :: Unique
fromIntegralIdKey = mkPreludeMiscIdUnique 190
realToFracIdKey = mkPreludeMiscIdUnique 191
toIntegerClassOpKey = mkPreludeMiscIdUnique 192
toRationalClassOpKey = mkPreludeMiscIdUnique 193
-- Monad comprehensions
guardMIdKey, liftMIdKey, mzipIdKey :: Unique
guardMIdKey = mkPreludeMiscIdUnique 194
liftMIdKey = mkPreludeMiscIdUnique 195
mzipIdKey = mkPreludeMiscIdUnique 196
-- GHCi
ghciStepIoMClassOpKey :: Unique
ghciStepIoMClassOpKey = mkPreludeMiscIdUnique 197
-- Overloaded lists
isListClassKey, fromListClassOpKey, fromListNClassOpKey, toListClassOpKey :: Unique
isListClassKey = mkPreludeMiscIdUnique 198
fromListClassOpKey = mkPreludeMiscIdUnique 199
fromListNClassOpKey = mkPreludeMiscIdUnique 500
toListClassOpKey = mkPreludeMiscIdUnique 501
proxyHashKey :: Unique
proxyHashKey = mkPreludeMiscIdUnique 502
---------------- Template Haskell -------------------
-- THNames.hs: USES IdUniques 200-499
-----------------------------------------------------
-- Used to make `Typeable` dictionaries
mkTyConKey
, mkPolyTyConAppKey
, mkAppTyKey
, typeNatTypeRepKey
, typeSymbolTypeRepKey
, typeRepIdKey
:: Unique
mkTyConKey = mkPreludeMiscIdUnique 503
mkPolyTyConAppKey = mkPreludeMiscIdUnique 504
mkAppTyKey = mkPreludeMiscIdUnique 505
typeNatTypeRepKey = mkPreludeMiscIdUnique 506
typeSymbolTypeRepKey = mkPreludeMiscIdUnique 507
typeRepIdKey = mkPreludeMiscIdUnique 508
-- Dynamic
toDynIdKey :: Unique
toDynIdKey = mkPreludeMiscIdUnique 509
bitIntegerIdKey :: Unique
bitIntegerIdKey = mkPreludeMiscIdUnique 510
heqSCSelIdKey, coercibleSCSelIdKey :: Unique
heqSCSelIdKey = mkPreludeMiscIdUnique 511
coercibleSCSelIdKey = mkPreludeMiscIdUnique 512
sappendClassOpKey :: Unique
sappendClassOpKey = mkPreludeMiscIdUnique 513
memptyClassOpKey, mappendClassOpKey, mconcatClassOpKey :: Unique
memptyClassOpKey = mkPreludeMiscIdUnique 514
mappendClassOpKey = mkPreludeMiscIdUnique 515
mconcatClassOpKey = mkPreludeMiscIdUnique 516
emptyCallStackKey, pushCallStackKey :: Unique
emptyCallStackKey = mkPreludeMiscIdUnique 517
pushCallStackKey = mkPreludeMiscIdUnique 518
fromStaticPtrClassOpKey :: Unique
fromStaticPtrClassOpKey = mkPreludeMiscIdUnique 519
{-
************************************************************************
* *
\subsection[Class-std-groups]{Standard groups of Prelude classes}
* *
************************************************************************
NOTE: @Eq@ and @Text@ do need to appear in @standardClasses@
even though every numeric class has these two as a superclass,
because the list of ambiguous dictionaries hasn't been simplified.
-}
numericClassKeys :: [Unique]
numericClassKeys =
[ numClassKey
, realClassKey
, integralClassKey
]
++ fractionalClassKeys
fractionalClassKeys :: [Unique]
fractionalClassKeys =
[ fractionalClassKey
, floatingClassKey
, realFracClassKey
, realFloatClassKey
]
-- The "standard classes" are used in defaulting (Haskell 98 report 4.3.4),
-- and are: "classes defined in the Prelude or a standard library"
standardClassKeys :: [Unique]
standardClassKeys = derivableClassKeys ++ numericClassKeys
++ [randomClassKey, randomGenClassKey,
functorClassKey,
monadClassKey, monadPlusClassKey, monadFailClassKey,
semigroupClassKey, monoidClassKey,
isStringClassKey,
applicativeClassKey, foldableClassKey,
traversableClassKey, alternativeClassKey
]
{-
@derivableClassKeys@ is also used in checking \tr{deriving} constructs
(@TcDeriv@).
-}
derivableClassKeys :: [Unique]
derivableClassKeys
= [ eqClassKey, ordClassKey, enumClassKey, ixClassKey,
boundedClassKey, showClassKey, readClassKey ]
-- These are the "interactive classes" that are consulted when doing
-- defaulting. Does not include Num or IsString, which have special
-- handling.
interactiveClassNames :: [Name]
interactiveClassNames
= [ showClassName, eqClassName, ordClassName, foldableClassName
, traversableClassName ]
interactiveClassKeys :: [Unique]
interactiveClassKeys = map getUnique interactiveClassNames
{-
************************************************************************
* *
Semi-builtin names
* *
************************************************************************
The following names should be considered by GHCi to be in scope always.
-}
pretendNameIsInScope :: Name -> Bool
pretendNameIsInScope n
= any (n `hasKey`)
[ starKindTyConKey, liftedTypeKindTyConKey, tYPETyConKey
, unliftedTypeKindTyConKey
, runtimeRepTyConKey, ptrRepLiftedDataConKey, ptrRepUnliftedDataConKey ]
|
olsner/ghc
|
compiler/prelude/PrelNames.hs
|
bsd-3-clause
| 102,324 | 0 | 10 | 22,929 | 15,582 | 8,890 | 6,692 | 1,558 | 2 |
-- | List of lightsoff levels. Each level is a list of pairs of positions. Some
-- are defined giving all the positions, others by constraining the list with
-- all board positions.
module Levels where
import LightsOff
levels :: [LightsOffLevel]
levels =
[ [(x,y) | x <- [c1-1..c1+1], y <- [c2-1..c2+1], x == c1 || y == c2]
, filter (\(x,y) -> (x /= 1 || y /= 1)) lightsOffBoardPositions
, filter (\(x,y) -> (x == y || x == (lightsOffBoardSize - 1 - y))) lightsOffBoardPositions
, filter (\(x,y) -> (x == y)) lightsOffBoardPositions
, ( centre : [(x,y) | x <- [c1-1, c1+1] , y <- [c2-1, c2+1]])
, [(1,1)]
, [(0,1), (1,1), (2,1)]
, [centre]
, [(2,1), (2,3)]
, [(0,0)]
, [ head lightsOffBoardPositions, last lightsOffBoardPositions ]
]
where centre@(c1, c2) = let p = lightsOffBoardSize `div` 2 in (p,p)
|
keera-studios/gtk-helpers
|
gtk3/examples/lights-off/Levels.hs
|
bsd-3-clause
| 841 | 0 | 13 | 182 | 406 | 240 | 166 | 16 | 1 |
{-# LANGUAGE Safe, FlexibleContexts, CPP #-}
module Cryptol.Utils.Misc where
import MonadLib
import Data.Maybe(fromMaybe)
#if __GLASGOW_HASKELL__ < 710
import Data.Traversable (Traversable, traverse)
#endif
-- | Apply a function to all elements of a container.
-- Returns `Nothing` if nothing changed, and @Just container@ otherwise.
anyJust :: Traversable t => (a -> Maybe a) -> t a -> Maybe (t a)
anyJust f m = mk $ runId $ runStateT False $ traverse upd m
where
mk (a,changes) = if changes then Just a else Nothing
upd x = case f x of
Just y -> set True >> return y
Nothing -> return x
-- | Apply functions to both elements of a pair.
-- Returns `Nothing` if neither changed, and @Just pair@ otherwise.
anyJust2 :: (a -> Maybe a) -> (b -> Maybe b) -> (a,b) -> Maybe (a,b)
anyJust2 f g (a,b) =
case (f a, g b) of
(Nothing, Nothing) -> Nothing
(x,y) -> Just (fromMaybe a x, fromMaybe b y)
|
ntc2/cryptol
|
src/Cryptol/Utils/Misc.hs
|
bsd-3-clause
| 953 | 0 | 11 | 226 | 311 | 165 | 146 | 16 | 3 |
module A4 where
import B4
import C4
import D4
main :: Tree Int ->Bool
main t = isSame (sumSquares (fringe t))
(sumSquares (B4.myFringe t)+sumSquares (C4.myFringe t))
|
kmate/HaRe
|
old/testing/renaming/A4.hs
|
bsd-3-clause
| 186 | 0 | 11 | 46 | 79 | 41 | 38 | 7 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude, BangPatterns, MagicHash #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Bits
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This module defines bitwise operations for signed and unsigned
-- integers. Instances of the class 'Bits' for the 'Int' and
-- 'Integer' types are available from this module, and instances for
-- explicitly sized integral types are available from the
-- "Data.Int" and "Data.Word" modules.
--
-----------------------------------------------------------------------------
module Data.Bits (
Bits(
(.&.), (.|.), xor,
complement,
shift,
rotate,
zeroBits,
bit,
setBit,
clearBit,
complementBit,
testBit,
bitSizeMaybe,
bitSize,
isSigned,
shiftL, shiftR,
unsafeShiftL, unsafeShiftR,
rotateL, rotateR,
popCount
),
FiniteBits(finiteBitSize),
bitDefault,
testBitDefault,
popCountDefault
) where
-- Defines the @Bits@ class containing bit-based operations.
-- See library document for details on the semantics of the
-- individual operations.
#include "MachDeps.h"
import Data.Maybe
import GHC.Enum
import GHC.Num
import GHC.Base
infixl 8 `shift`, `rotate`, `shiftL`, `shiftR`, `rotateL`, `rotateR`
infixl 7 .&.
infixl 6 `xor`
infixl 5 .|.
{-# DEPRECATED bitSize "Use 'bitSizeMaybe' or 'finiteBitSize' instead" #-} -- deprecated in 7.8
{-|
The 'Bits' class defines bitwise operations over integral types.
* Bits are numbered from 0 with bit 0 being the least
significant bit.
Minimal complete definition: '.&.', '.|.', 'xor', 'complement',
('shift' or ('shiftL' and 'shiftR')), ('rotate' or ('rotateL' and 'rotateR')),
'bitSize', 'isSigned', 'testBit', 'bit', and 'popCount'. The latter three can
be implemented using `testBitDefault', 'bitDefault', and 'popCountDefault', if
@a@ is also an instance of 'Num'.
-}
class Eq a => Bits a where
-- | Bitwise \"and\"
(.&.) :: a -> a -> a
-- | Bitwise \"or\"
(.|.) :: a -> a -> a
-- | Bitwise \"xor\"
xor :: a -> a -> a
{-| Reverse all the bits in the argument -}
complement :: a -> a
{-| @'shift' x i@ shifts @x@ left by @i@ bits if @i@ is positive,
or right by @-i@ bits otherwise.
Right shifts perform sign extension on signed number types;
i.e. they fill the top bits with 1 if the @x@ is negative
and with 0 otherwise.
An instance can define either this unified 'shift' or 'shiftL' and
'shiftR', depending on which is more convenient for the type in
question. -}
shift :: a -> Int -> a
x `shift` i | i<0 = x `shiftR` (-i)
| i>0 = x `shiftL` i
| otherwise = x
{-| @'rotate' x i@ rotates @x@ left by @i@ bits if @i@ is positive,
or right by @-i@ bits otherwise.
For unbounded types like 'Integer', 'rotate' is equivalent to 'shift'.
An instance can define either this unified 'rotate' or 'rotateL' and
'rotateR', depending on which is more convenient for the type in
question. -}
rotate :: a -> Int -> a
x `rotate` i | i<0 = x `rotateR` (-i)
| i>0 = x `rotateL` i
| otherwise = x
{-
-- Rotation can be implemented in terms of two shifts, but care is
-- needed for negative values. This suggested implementation assumes
-- 2's-complement arithmetic. It is commented out because it would
-- require an extra context (Ord a) on the signature of 'rotate'.
x `rotate` i | i<0 && isSigned x && x<0
= let left = i+bitSize x in
((x `shift` i) .&. complement ((-1) `shift` left))
.|. (x `shift` left)
| i<0 = (x `shift` i) .|. (x `shift` (i+bitSize x))
| i==0 = x
| i>0 = (x `shift` i) .|. (x `shift` (i-bitSize x))
-}
-- | 'zeroBits' is the value with all bits unset.
--
-- The following laws ought to hold (for all valid bit indices @/n/@):
--
-- * @'clearBit' 'zeroBits' /n/ == 'zeroBits'@
-- * @'setBit' 'zeroBits' /n/ == 'bit' /n/@
-- * @'testBit' 'zeroBits' /n/ == False@
-- * @'popCount' 'zeroBits' == 0@
--
-- This method uses @'clearBit' ('bit' 0) 0@ as its default
-- implementation (which ought to be equivalent to 'zeroBits' for
-- types which possess a 0th bit).
--
-- /Since: 4.7.0.0/
zeroBits :: a
zeroBits = clearBit (bit 0) 0
-- | @bit /i/@ is a value with the @/i/@th bit set and all other bits clear.
--
-- See also 'zeroBits'.
bit :: Int -> a
-- | @x \`setBit\` i@ is the same as @x .|. bit i@
setBit :: a -> Int -> a
-- | @x \`clearBit\` i@ is the same as @x .&. complement (bit i)@
clearBit :: a -> Int -> a
-- | @x \`complementBit\` i@ is the same as @x \`xor\` bit i@
complementBit :: a -> Int -> a
-- | Return 'True' if the @n@th bit of the argument is 1
testBit :: a -> Int -> Bool
{-| Return the number of bits in the type of the argument. The actual
value of the argument is ignored. Returns Nothing
for types that do not have a fixed bitsize, like 'Integer'.
/Since: 4.7.0.0/
-}
bitSizeMaybe :: a -> Maybe Int
{-| Return the number of bits in the type of the argument. The actual
value of the argument is ignored. The function 'bitSize' is
undefined for types that do not have a fixed bitsize, like 'Integer'.
-}
bitSize :: a -> Int
{-| Return 'True' if the argument is a signed type. The actual
value of the argument is ignored -}
isSigned :: a -> Bool
{-# INLINE setBit #-}
{-# INLINE clearBit #-}
{-# INLINE complementBit #-}
x `setBit` i = x .|. bit i
x `clearBit` i = x .&. complement (bit i)
x `complementBit` i = x `xor` bit i
{-| Shift the argument left by the specified number of bits
(which must be non-negative).
An instance can define either this and 'shiftR' or the unified
'shift', depending on which is more convenient for the type in
question. -}
shiftL :: a -> Int -> a
{-# INLINE shiftL #-}
x `shiftL` i = x `shift` i
{-| Shift the argument left by the specified number of bits. The
result is undefined for negative shift amounts and shift amounts
greater or equal to the 'bitSize'.
Defaults to 'shiftL' unless defined explicitly by an instance.
/Since: 4.5.0.0/ -}
unsafeShiftL :: a -> Int -> a
{-# INLINE unsafeShiftL #-}
x `unsafeShiftL` i = x `shiftL` i
{-| Shift the first argument right by the specified number of bits. The
result is undefined for negative shift amounts and shift amounts
greater or equal to the 'bitSize'.
Right shifts perform sign extension on signed number types;
i.e. they fill the top bits with 1 if the @x@ is negative
and with 0 otherwise.
An instance can define either this and 'shiftL' or the unified
'shift', depending on which is more convenient for the type in
question. -}
shiftR :: a -> Int -> a
{-# INLINE shiftR #-}
x `shiftR` i = x `shift` (-i)
{-| Shift the first argument right by the specified number of bits, which
must be non-negative an smaller than the number of bits in the type.
Right shifts perform sign extension on signed number types;
i.e. they fill the top bits with 1 if the @x@ is negative
and with 0 otherwise.
Defaults to 'shiftR' unless defined explicitly by an instance.
/Since: 4.5.0.0/ -}
unsafeShiftR :: a -> Int -> a
{-# INLINE unsafeShiftR #-}
x `unsafeShiftR` i = x `shiftR` i
{-| Rotate the argument left by the specified number of bits
(which must be non-negative).
An instance can define either this and 'rotateR' or the unified
'rotate', depending on which is more convenient for the type in
question. -}
rotateL :: a -> Int -> a
{-# INLINE rotateL #-}
x `rotateL` i = x `rotate` i
{-| Rotate the argument right by the specified number of bits
(which must be non-negative).
An instance can define either this and 'rotateL' or the unified
'rotate', depending on which is more convenient for the type in
question. -}
rotateR :: a -> Int -> a
{-# INLINE rotateR #-}
x `rotateR` i = x `rotate` (-i)
{-| Return the number of set bits in the argument. This number is
known as the population count or the Hamming weight.
/Since: 4.5.0.0/ -}
popCount :: a -> Int
{-# MINIMAL (.&.), (.|.), xor, complement,
(shift | (shiftL, shiftR)),
(rotate | (rotateL, rotateR)),
bitSize, bitSizeMaybe, isSigned, testBit, bit, popCount #-}
-- |The 'FiniteBits' class denotes types with a finite, fixed number of bits.
--
-- /Since: 4.7.0.0/
class Bits b => FiniteBits b where
-- | Return the number of bits in the type of the argument.
-- The actual value of the argument is ignored. Moreover, 'finiteBitSize'
-- is total, in contrast to the deprecated 'bitSize' function it replaces.
--
-- @
-- 'finiteBitSize' = 'bitSize'
-- 'bitSizeMaybe' = 'Just' . 'finiteBitSize'
-- @
--
-- /Since: 4.7.0.0/
finiteBitSize :: b -> Int
-- The defaults below are written with lambdas so that e.g.
-- bit = bitDefault
-- is fully applied, so inlining will happen
-- | Default implementation for 'bit'.
--
-- Note that: @bitDefault i = 1 `shiftL` i@
--
-- /Since: 4.6.0.0/
bitDefault :: (Bits a, Num a) => Int -> a
bitDefault = \i -> 1 `shiftL` i
{-# INLINE bitDefault #-}
-- | Default implementation for 'testBit'.
--
-- Note that: @testBitDefault x i = (x .&. bit i) /= 0@
--
-- /Since: 4.6.0.0/
testBitDefault :: (Bits a, Num a) => a -> Int -> Bool
testBitDefault = \x i -> (x .&. bit i) /= 0
{-# INLINE testBitDefault #-}
-- | Default implementation for 'popCount'.
--
-- This implementation is intentionally naive. Instances are expected to provide
-- an optimized implementation for their size.
--
-- /Since: 4.6.0.0/
popCountDefault :: (Bits a, Num a) => a -> Int
popCountDefault = go 0
where
go !c 0 = c
go c w = go (c+1) (w .&. (w - 1)) -- clear the least significant
{-# INLINABLE popCountDefault #-}
-- Interpret 'Bool' as 1-bit bit-field; /Since: 4.7.0.0/
instance Bits Bool where
(.&.) = (&&)
(.|.) = (||)
xor = (/=)
complement = not
shift x 0 = x
shift _ _ = False
rotate x _ = x
bit 0 = True
bit _ = False
testBit x 0 = x
testBit _ _ = False
bitSizeMaybe _ = Just 1
bitSize _ = 1
isSigned _ = False
popCount False = 0
popCount True = 1
instance FiniteBits Bool where
finiteBitSize _ = 1
instance Bits Int where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
zeroBits = 0
bit = bitDefault
testBit = testBitDefault
(I# x#) .&. (I# y#) = I# (x# `andI#` y#)
(I# x#) .|. (I# y#) = I# (x# `orI#` y#)
(I# x#) `xor` (I# y#) = I# (x# `xorI#` y#)
complement (I# x#) = I# (notI# x#)
(I# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = I# (x# `iShiftL#` i#)
| otherwise = I# (x# `iShiftRA#` negateInt# i#)
(I# x#) `shiftL` (I# i#) = I# (x# `iShiftL#` i#)
(I# x#) `unsafeShiftL` (I# i#) = I# (x# `uncheckedIShiftL#` i#)
(I# x#) `shiftR` (I# i#) = I# (x# `iShiftRA#` i#)
(I# x#) `unsafeShiftR` (I# i#) = I# (x# `uncheckedIShiftRA#` i#)
{-# INLINE rotate #-} -- See Note [Constant folding for rotate]
(I# x#) `rotate` (I# i#) =
I# ((x# `uncheckedIShiftL#` i'#) `orI#` (x# `uncheckedIShiftRL#` (wsib -# i'#)))
where
!i'# = i# `andI#` (wsib -# 1#)
!wsib = WORD_SIZE_IN_BITS# {- work around preprocessor problem (??) -}
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
popCount (I# x#) = I# (word2Int# (popCnt# (int2Word# x#)))
isSigned _ = True
instance FiniteBits Int where
finiteBitSize _ = WORD_SIZE_IN_BITS
instance Bits Word where
{-# INLINE shift #-}
{-# INLINE bit #-}
{-# INLINE testBit #-}
(W# x#) .&. (W# y#) = W# (x# `and#` y#)
(W# x#) .|. (W# y#) = W# (x# `or#` y#)
(W# x#) `xor` (W# y#) = W# (x# `xor#` y#)
complement (W# x#) = W# (x# `xor#` mb#)
where !(W# mb#) = maxBound
(W# x#) `shift` (I# i#)
| isTrue# (i# >=# 0#) = W# (x# `shiftL#` i#)
| otherwise = W# (x# `shiftRL#` negateInt# i#)
(W# x#) `shiftL` (I# i#) = W# (x# `shiftL#` i#)
(W# x#) `unsafeShiftL` (I# i#) = W# (x# `uncheckedShiftL#` i#)
(W# x#) `shiftR` (I# i#) = W# (x# `shiftRL#` i#)
(W# x#) `unsafeShiftR` (I# i#) = W# (x# `uncheckedShiftRL#` i#)
(W# x#) `rotate` (I# i#)
| isTrue# (i'# ==# 0#) = W# x#
| otherwise = W# ((x# `uncheckedShiftL#` i'#) `or#` (x# `uncheckedShiftRL#` (wsib -# i'#)))
where
!i'# = i# `andI#` (wsib -# 1#)
!wsib = WORD_SIZE_IN_BITS# {- work around preprocessor problem (??) -}
bitSizeMaybe i = Just (finiteBitSize i)
bitSize i = finiteBitSize i
isSigned _ = False
popCount (W# x#) = I# (word2Int# (popCnt# x#))
bit = bitDefault
testBit = testBitDefault
instance FiniteBits Word where
finiteBitSize _ = WORD_SIZE_IN_BITS
instance Bits Integer where
(.&.) = andInteger
(.|.) = orInteger
xor = xorInteger
complement = complementInteger
shift x i@(I# i#) | i >= 0 = shiftLInteger x i#
| otherwise = shiftRInteger x (negateInt# i#)
shiftL x (I# i#) = shiftLInteger x i#
shiftR x (I# i#) = shiftRInteger x i#
testBit x (I# i) = testBitInteger x i
zeroBits = 0
bit = bitDefault
popCount = popCountDefault
rotate x i = shift x i -- since an Integer never wraps around
bitSizeMaybe _ = Nothing
bitSize _ = error "Data.Bits.bitSize(Integer)"
isSigned _ = True
{- Note [Constant folding for rotate]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The INLINE on the Int instance of rotate enables it to be constant
folded. For example:
sumU . mapU (`rotate` 3) . replicateU 10000000 $ (7 :: Int)
goes to:
Main.$wfold =
\ (ww_sO7 :: Int#) (ww1_sOb :: Int#) ->
case ww1_sOb of wild_XM {
__DEFAULT -> Main.$wfold (+# ww_sO7 56) (+# wild_XM 1);
10000000 -> ww_sO7
whereas before it was left as a call to $wrotate.
All other Bits instances seem to inline well enough on their
own to enable constant folding; for example 'shift':
sumU . mapU (`shift` 3) . replicateU 10000000 $ (7 :: Int)
goes to:
Main.$wfold =
\ (ww_sOb :: Int#) (ww1_sOf :: Int#) ->
case ww1_sOf of wild_XM {
__DEFAULT -> Main.$wfold (+# ww_sOb 56) (+# wild_XM 1);
10000000 -> ww_sOb
}
-}
|
lukexi/ghc
|
libraries/base/Data/Bits.hs
|
bsd-3-clause
| 15,678 | 0 | 13 | 4,688 | 2,594 | 1,465 | 1,129 | 197 | 2 |
{-# LANGUAGE Unsafe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Typeable.Internal
-- Copyright : (c) The University of Glasgow, CWI 2001--2011
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- The representations of the types TyCon and TypeRep, and the
-- function mkTyCon which is used by derived instances of Typeable to
-- construct a TyCon.
--
-----------------------------------------------------------------------------
{-# LANGUAGE CPP
, NoImplicitPrelude
, OverlappingInstances
, ScopedTypeVariables
, FlexibleInstances
, MagicHash #-}
#ifdef __GLASGOW_HASKELL__
{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-}
#endif
module Data.Typeable.Internal (
TypeRep(..),
TyCon(..),
mkTyCon,
mkTyCon3,
mkTyConApp,
mkAppTy,
typeRepTyCon,
typeOfDefault,
typeOf1Default,
typeOf2Default,
typeOf3Default,
typeOf4Default,
typeOf5Default,
typeOf6Default,
Typeable(..),
Typeable1(..),
Typeable2(..),
Typeable3(..),
Typeable4(..),
Typeable5(..),
Typeable6(..),
Typeable7(..),
mkFunTy,
splitTyConApp,
funResultTy,
typeRepArgs,
showsTypeRep,
tyConString,
#if defined(__GLASGOW_HASKELL__)
listTc, funTc
#endif
) where
import GHC.Base
import GHC.Word
import GHC.Show
import GHC.Err (undefined)
import Data.Maybe
import Data.List
import GHC.Num
import GHC.Real
import GHC.IORef
import GHC.IOArray
import GHC.MVar
import GHC.ST ( ST )
import GHC.STRef ( STRef )
import GHC.Ptr ( Ptr, FunPtr )
import GHC.Stable
import GHC.Arr ( Array, STArray )
import Data.Int
import GHC.Fingerprint.Type
import {-# SOURCE #-} GHC.Fingerprint
-- loop: GHC.Fingerprint -> Foreign.Ptr -> Data.Typeable
-- Better to break the loop here, because we want non-SOURCE imports
-- of Data.Typeable as much as possible so we can optimise the derived
-- instances.
-- | A concrete representation of a (monomorphic) type. 'TypeRep'
-- supports reasonably efficient equality.
data TypeRep = TypeRep {-# UNPACK #-} !Fingerprint TyCon [TypeRep]
-- Compare keys for equality
instance Eq TypeRep where
(TypeRep k1 _ _) == (TypeRep k2 _ _) = k1 == k2
instance Ord TypeRep where
(TypeRep k1 _ _) <= (TypeRep k2 _ _) = k1 <= k2
-- | An abstract representation of a type constructor. 'TyCon' objects can
-- be built using 'mkTyCon'.
data TyCon = TyCon {
tyConHash :: {-# UNPACK #-} !Fingerprint,
tyConPackage :: String,
tyConModule :: String,
tyConName :: String
}
instance Eq TyCon where
(TyCon t1 _ _ _) == (TyCon t2 _ _ _) = t1 == t2
instance Ord TyCon where
(TyCon k1 _ _ _) <= (TyCon k2 _ _ _) = k1 <= k2
----------------- Construction --------------------
#include "MachDeps.h"
-- mkTyCon is an internal function to make it easier for GHC to
-- generate derived instances. GHC precomputes the MD5 hash for the
-- TyCon and passes it as two separate 64-bit values to mkTyCon. The
-- TyCon for a derived Typeable instance will end up being statically
-- allocated.
#if WORD_SIZE_IN_BITS < 64
mkTyCon :: Word64# -> Word64# -> String -> String -> String -> TyCon
#else
mkTyCon :: Word# -> Word# -> String -> String -> String -> TyCon
#endif
mkTyCon high# low# pkg modl name
= TyCon (Fingerprint (W64# high#) (W64# low#)) pkg modl name
-- | Applies a type constructor to a sequence of types
mkTyConApp :: TyCon -> [TypeRep] -> TypeRep
mkTyConApp tc@(TyCon tc_k _ _ _) []
= TypeRep tc_k tc [] -- optimisation: all derived Typeable instances
-- end up here, and it helps generate smaller
-- code for derived Typeable.
mkTyConApp tc@(TyCon tc_k _ _ _) args
= TypeRep (fingerprintFingerprints (tc_k : arg_ks)) tc args
where
arg_ks = [k | TypeRep k _ _ <- args]
-- | A special case of 'mkTyConApp', which applies the function
-- type constructor to a pair of types.
mkFunTy :: TypeRep -> TypeRep -> TypeRep
mkFunTy f a = mkTyConApp funTc [f,a]
-- | Splits a type constructor application
splitTyConApp :: TypeRep -> (TyCon,[TypeRep])
splitTyConApp (TypeRep _ tc trs) = (tc,trs)
-- | Applies a type to a function type. Returns: @'Just' u@ if the
-- first argument represents a function of type @t -> u@ and the
-- second argument represents a function of type @t@. Otherwise,
-- returns 'Nothing'.
funResultTy :: TypeRep -> TypeRep -> Maybe TypeRep
funResultTy trFun trArg
= case splitTyConApp trFun of
(tc, [t1,t2]) | tc == funTc && t1 == trArg -> Just t2
_ -> Nothing
-- | Adds a TypeRep argument to a TypeRep.
mkAppTy :: TypeRep -> TypeRep -> TypeRep
mkAppTy (TypeRep tr_k tc trs) arg_tr
= let (TypeRep arg_k _ _) = arg_tr
in TypeRep (fingerprintFingerprints [tr_k,arg_k]) tc (trs++[arg_tr])
-- | Builds a 'TyCon' object representing a type constructor. An
-- implementation of "Data.Typeable" should ensure that the following holds:
--
-- > A==A' ^ B==B' ^ C==C' ==> mkTyCon A B C == mkTyCon A' B' C'
--
--
mkTyCon3 :: String -- ^ package name
-> String -- ^ module name
-> String -- ^ the name of the type constructor
-> TyCon -- ^ A unique 'TyCon' object
mkTyCon3 pkg modl name =
TyCon (fingerprintString (unwords [pkg, modl, name])) pkg modl name
----------------- Observation ---------------------
-- | Observe the type constructor of a type representation
typeRepTyCon :: TypeRep -> TyCon
typeRepTyCon (TypeRep _ tc _) = tc
-- | Observe the argument types of a type representation
typeRepArgs :: TypeRep -> [TypeRep]
typeRepArgs (TypeRep _ _ args) = args
-- | Observe string encoding of a type representation
{-# DEPRECATED tyConString "renamed to tyConName; tyConModule and tyConPackage are also available." #-}
tyConString :: TyCon -> String
tyConString = tyConName
-------------------------------------------------------------
--
-- The Typeable class and friends
--
-------------------------------------------------------------
{- Note [Memoising typeOf]
~~~~~~~~~~~~~~~~~~~~~~~~~~
IMPORTANT: we don't want to recalculate the type-rep once per
call to the dummy argument. This is what went wrong in Trac #3245
So we help GHC by manually keeping the 'rep' *outside* the value
lambda, thus
typeOfDefault :: forall t a. (Typeable1 t, Typeable a) => t a -> TypeRep
typeOfDefault = \_ -> rep
where
rep = typeOf1 (undefined :: t a) `mkAppTy`
typeOf (undefined :: a)
Notice the crucial use of scoped type variables here!
-}
-- | The class 'Typeable' allows a concrete representation of a type to
-- be calculated.
class Typeable a where
typeOf :: a -> TypeRep
-- ^ Takes a value of type @a@ and returns a concrete representation
-- of that type. The /value/ of the argument should be ignored by
-- any instance of 'Typeable', so that it is safe to pass 'undefined' as
-- the argument.
-- | Variant for unary type constructors
class Typeable1 t where
typeOf1 :: t a -> TypeRep
#ifdef __GLASGOW_HASKELL__
-- | For defining a 'Typeable' instance from any 'Typeable1' instance.
typeOfDefault :: forall t a. (Typeable1 t, Typeable a) => t a -> TypeRep
typeOfDefault = \_ -> rep
where
rep = typeOf1 (undefined :: t a) `mkAppTy`
typeOf (undefined :: a)
-- Note [Memoising typeOf]
#else
-- | For defining a 'Typeable' instance from any 'Typeable1' instance.
typeOfDefault :: (Typeable1 t, Typeable a) => t a -> TypeRep
typeOfDefault x = typeOf1 x `mkAppTy` typeOf (argType x)
where
argType :: t a -> a
argType = undefined
#endif
-- | Variant for binary type constructors
class Typeable2 t where
typeOf2 :: t a b -> TypeRep
#ifdef __GLASGOW_HASKELL__
-- | For defining a 'Typeable1' instance from any 'Typeable2' instance.
typeOf1Default :: forall t a b. (Typeable2 t, Typeable a) => t a b -> TypeRep
typeOf1Default = \_ -> rep
where
rep = typeOf2 (undefined :: t a b) `mkAppTy`
typeOf (undefined :: a)
-- Note [Memoising typeOf]
#else
-- | For defining a 'Typeable1' instance from any 'Typeable2' instance.
typeOf1Default :: (Typeable2 t, Typeable a) => t a b -> TypeRep
typeOf1Default x = typeOf2 x `mkAppTy` typeOf (argType x)
where
argType :: t a b -> a
argType = undefined
#endif
-- | Variant for 3-ary type constructors
class Typeable3 t where
typeOf3 :: t a b c -> TypeRep
#ifdef __GLASGOW_HASKELL__
-- | For defining a 'Typeable2' instance from any 'Typeable3' instance.
typeOf2Default :: forall t a b c. (Typeable3 t, Typeable a) => t a b c -> TypeRep
typeOf2Default = \_ -> rep
where
rep = typeOf3 (undefined :: t a b c) `mkAppTy`
typeOf (undefined :: a)
-- Note [Memoising typeOf]
#else
-- | For defining a 'Typeable2' instance from any 'Typeable3' instance.
typeOf2Default :: (Typeable3 t, Typeable a) => t a b c -> TypeRep
typeOf2Default x = typeOf3 x `mkAppTy` typeOf (argType x)
where
argType :: t a b c -> a
argType = undefined
#endif
-- | Variant for 4-ary type constructors
class Typeable4 t where
typeOf4 :: t a b c d -> TypeRep
#ifdef __GLASGOW_HASKELL__
-- | For defining a 'Typeable3' instance from any 'Typeable4' instance.
typeOf3Default :: forall t a b c d. (Typeable4 t, Typeable a) => t a b c d -> TypeRep
typeOf3Default = \_ -> rep
where
rep = typeOf4 (undefined :: t a b c d) `mkAppTy`
typeOf (undefined :: a)
-- Note [Memoising typeOf]
#else
-- | For defining a 'Typeable3' instance from any 'Typeable4' instance.
typeOf3Default :: (Typeable4 t, Typeable a) => t a b c d -> TypeRep
typeOf3Default x = typeOf4 x `mkAppTy` typeOf (argType x)
where
argType :: t a b c d -> a
argType = undefined
#endif
-- | Variant for 5-ary type constructors
class Typeable5 t where
typeOf5 :: t a b c d e -> TypeRep
#ifdef __GLASGOW_HASKELL__
-- | For defining a 'Typeable4' instance from any 'Typeable5' instance.
typeOf4Default :: forall t a b c d e. (Typeable5 t, Typeable a) => t a b c d e -> TypeRep
typeOf4Default = \_ -> rep
where
rep = typeOf5 (undefined :: t a b c d e) `mkAppTy`
typeOf (undefined :: a)
-- Note [Memoising typeOf]
#else
-- | For defining a 'Typeable4' instance from any 'Typeable5' instance.
typeOf4Default :: (Typeable5 t, Typeable a) => t a b c d e -> TypeRep
typeOf4Default x = typeOf5 x `mkAppTy` typeOf (argType x)
where
argType :: t a b c d e -> a
argType = undefined
#endif
-- | Variant for 6-ary type constructors
class Typeable6 t where
typeOf6 :: t a b c d e f -> TypeRep
#ifdef __GLASGOW_HASKELL__
-- | For defining a 'Typeable5' instance from any 'Typeable6' instance.
typeOf5Default :: forall t a b c d e f. (Typeable6 t, Typeable a) => t a b c d e f -> TypeRep
typeOf5Default = \_ -> rep
where
rep = typeOf6 (undefined :: t a b c d e f) `mkAppTy`
typeOf (undefined :: a)
-- Note [Memoising typeOf]
#else
-- | For defining a 'Typeable5' instance from any 'Typeable6' instance.
typeOf5Default :: (Typeable6 t, Typeable a) => t a b c d e f -> TypeRep
typeOf5Default x = typeOf6 x `mkAppTy` typeOf (argType x)
where
argType :: t a b c d e f -> a
argType = undefined
#endif
-- | Variant for 7-ary type constructors
class Typeable7 t where
typeOf7 :: t a b c d e f g -> TypeRep
#ifdef __GLASGOW_HASKELL__
-- | For defining a 'Typeable6' instance from any 'Typeable7' instance.
typeOf6Default :: forall t a b c d e f g. (Typeable7 t, Typeable a) => t a b c d e f g -> TypeRep
typeOf6Default = \_ -> rep
where
rep = typeOf7 (undefined :: t a b c d e f g) `mkAppTy`
typeOf (undefined :: a)
-- Note [Memoising typeOf]
#else
-- | For defining a 'Typeable6' instance from any 'Typeable7' instance.
typeOf6Default :: (Typeable7 t, Typeable a) => t a b c d e f g -> TypeRep
typeOf6Default x = typeOf7 x `mkAppTy` typeOf (argType x)
where
argType :: t a b c d e f g -> a
argType = undefined
#endif
#ifdef __GLASGOW_HASKELL__
-- Given a @Typeable@/n/ instance for an /n/-ary type constructor,
-- define the instances for partial applications.
-- Programmers using non-GHC implementations must do this manually
-- for each type constructor.
-- (The INSTANCE_TYPEABLE/n/ macros in Typeable.h include this.)
-- | One Typeable instance for all Typeable1 instances
instance (Typeable1 s, Typeable a)
=> Typeable (s a) where
typeOf = typeOfDefault
-- | One Typeable1 instance for all Typeable2 instances
instance (Typeable2 s, Typeable a)
=> Typeable1 (s a) where
typeOf1 = typeOf1Default
-- | One Typeable2 instance for all Typeable3 instances
instance (Typeable3 s, Typeable a)
=> Typeable2 (s a) where
typeOf2 = typeOf2Default
-- | One Typeable3 instance for all Typeable4 instances
instance (Typeable4 s, Typeable a)
=> Typeable3 (s a) where
typeOf3 = typeOf3Default
-- | One Typeable4 instance for all Typeable5 instances
instance (Typeable5 s, Typeable a)
=> Typeable4 (s a) where
typeOf4 = typeOf4Default
-- | One Typeable5 instance for all Typeable6 instances
instance (Typeable6 s, Typeable a)
=> Typeable5 (s a) where
typeOf5 = typeOf5Default
-- | One Typeable6 instance for all Typeable7 instances
instance (Typeable7 s, Typeable a)
=> Typeable6 (s a) where
typeOf6 = typeOf6Default
#endif /* __GLASGOW_HASKELL__ */
----------------- Showing TypeReps --------------------
instance Show TypeRep where
showsPrec p (TypeRep _ tycon tys) =
case tys of
[] -> showsPrec p tycon
[x] | tycon == listTc -> showChar '[' . shows x . showChar ']'
[a,r] | tycon == funTc -> showParen (p > 8) $
showsPrec 9 a .
showString " -> " .
showsPrec 8 r
xs | isTupleTyCon tycon -> showTuple xs
| otherwise ->
showParen (p > 9) $
showsPrec p tycon .
showChar ' ' .
showArgs tys
showsTypeRep :: TypeRep -> ShowS
showsTypeRep = shows
instance Show TyCon where
showsPrec _ t = showString (tyConName t)
isTupleTyCon :: TyCon -> Bool
isTupleTyCon (TyCon _ _ _ ('(':',':_)) = True
isTupleTyCon _ = False
-- Some (Show.TypeRep) helpers:
showArgs :: Show a => [a] -> ShowS
showArgs [] = id
showArgs [a] = showsPrec 10 a
showArgs (a:as) = showsPrec 10 a . showString " " . showArgs as
showTuple :: [TypeRep] -> ShowS
showTuple args = showChar '('
. (foldr (.) id $ intersperse (showChar ',')
$ map (showsPrec 10) args)
. showChar ')'
#if defined(__GLASGOW_HASKELL__)
listTc :: TyCon
listTc = typeRepTyCon (typeOf [()])
funTc :: TyCon
funTc = mkTyCon3 "ghc-prim" "GHC.Types" "->"
#endif
-------------------------------------------------------------
--
-- Instances of the Typeable classes for Prelude types
--
-------------------------------------------------------------
#include "Typeable.h"
INSTANCE_TYPEABLE0((),unitTc,"()")
INSTANCE_TYPEABLE1([],listTc,"[]")
INSTANCE_TYPEABLE1(Maybe,maybeTc,"Maybe")
INSTANCE_TYPEABLE1(Ratio,ratioTc,"Ratio")
#if defined(__GLASGOW_HASKELL__)
{-
TODO: Deriving this instance fails with:
libraries/base/Data/Typeable.hs:589:1:
Can't make a derived instance of `Typeable2 (->)':
The last argument of the instance must be a data or newtype application
In the stand-alone deriving instance for `Typeable2 (->)'
-}
instance Typeable2 (->) where { typeOf2 _ = mkTyConApp funTc [] }
#else
INSTANCE_TYPEABLE2((->),funTc,"->")
#endif
INSTANCE_TYPEABLE1(IO,ioTc,"IO")
#if defined(__GLASGOW_HASKELL__) || defined(__HUGS__)
-- Types defined in GHC.MVar
INSTANCE_TYPEABLE1(MVar,mvarTc,"MVar" )
#endif
INSTANCE_TYPEABLE2(Array,arrayTc,"Array")
INSTANCE_TYPEABLE2(IOArray,iOArrayTc,"IOArray")
#ifdef __GLASGOW_HASKELL__
-- Hugs has these too, but their Typeable<n> instances are defined
-- elsewhere to keep this module within Haskell 98.
-- This is important because every invocation of runhugs or ffihugs
-- uses this module via Data.Dynamic.
INSTANCE_TYPEABLE2(ST,stTc,"ST")
INSTANCE_TYPEABLE2(STRef,stRefTc,"STRef")
INSTANCE_TYPEABLE3(STArray,sTArrayTc,"STArray")
#endif
#ifndef __NHC__
INSTANCE_TYPEABLE2((,),pairTc,"(,)")
INSTANCE_TYPEABLE3((,,),tup3Tc,"(,,)")
INSTANCE_TYPEABLE4((,,,),tup4Tc,"(,,,)")
INSTANCE_TYPEABLE5((,,,,),tup5Tc,"(,,,,)")
INSTANCE_TYPEABLE6((,,,,,),tup6Tc,"(,,,,,)")
INSTANCE_TYPEABLE7((,,,,,,),tup7Tc,"(,,,,,,)")
#endif /* __NHC__ */
INSTANCE_TYPEABLE1(Ptr,ptrTc,"Ptr")
INSTANCE_TYPEABLE1(FunPtr,funPtrTc,"FunPtr")
#ifndef __GLASGOW_HASKELL__
INSTANCE_TYPEABLE1(ForeignPtr,foreignPtrTc,"ForeignPtr")
#endif
INSTANCE_TYPEABLE1(StablePtr,stablePtrTc,"StablePtr")
INSTANCE_TYPEABLE1(IORef,iORefTc,"IORef")
-------------------------------------------------------
--
-- Generate Typeable instances for standard datatypes
--
-------------------------------------------------------
INSTANCE_TYPEABLE0(Bool,boolTc,"Bool")
INSTANCE_TYPEABLE0(Char,charTc,"Char")
INSTANCE_TYPEABLE0(Float,floatTc,"Float")
INSTANCE_TYPEABLE0(Double,doubleTc,"Double")
INSTANCE_TYPEABLE0(Int,intTc,"Int")
#ifndef __NHC__
INSTANCE_TYPEABLE0(Word,wordTc,"Word" )
#endif
INSTANCE_TYPEABLE0(Integer,integerTc,"Integer")
INSTANCE_TYPEABLE0(Ordering,orderingTc,"Ordering")
#ifndef __GLASGOW_HASKELL__
INSTANCE_TYPEABLE0(Handle,handleTc,"Handle")
#endif
INSTANCE_TYPEABLE0(Int8,int8Tc,"Int8")
INSTANCE_TYPEABLE0(Int16,int16Tc,"Int16")
INSTANCE_TYPEABLE0(Int32,int32Tc,"Int32")
INSTANCE_TYPEABLE0(Int64,int64Tc,"Int64")
INSTANCE_TYPEABLE0(Word8,word8Tc,"Word8" )
INSTANCE_TYPEABLE0(Word16,word16Tc,"Word16")
INSTANCE_TYPEABLE0(Word32,word32Tc,"Word32")
INSTANCE_TYPEABLE0(Word64,word64Tc,"Word64")
INSTANCE_TYPEABLE0(TyCon,tyconTc,"TyCon")
INSTANCE_TYPEABLE0(TypeRep,typeRepTc,"TypeRep")
#ifdef __GLASGOW_HASKELL__
{-
TODO: This can't be derived currently:
libraries/base/Data/Typeable.hs:674:1:
Can't make a derived instance of `Typeable RealWorld':
The last argument of the instance must be a data or newtype application
In the stand-alone deriving instance for `Typeable RealWorld'
-}
realWorldTc :: TyCon; \
realWorldTc = mkTyCon3 "ghc-prim" "GHC.Types" "RealWorld"; \
instance Typeable RealWorld where { typeOf _ = mkTyConApp realWorldTc [] }
#endif
|
mightymoose/liquidhaskell
|
benchmarks/base-4.5.1.0/Data/Typeable/Internal.hs
|
bsd-3-clause
| 18,347 | 5 | 15 | 3,704 | 3,526 | 1,978 | 1,548 | -1 | -1 |
{- |
Module : Data.KMeans
Copyright : (c) Keegan Carruthers-Smith, 2009
License : BSD 3 Clause
Maintainer : [email protected]
Stability : experimental
A simple implementation of the standard k-means clustering algorithm: <http://en.wikipedia.org/wiki/K-means_clustering>. K-means clustering partitions points into clusters, with each point belonging to the cluster with th nearest mean. As the general problem is NP hard, the standard algorithm, which is relatively rapid, is heuristic and not guaranteed to converge to a global optimum. Varying the input order, from which the initial clusters are generated, can yield different results. For degenerate and malicious cases, the algorithm may take exponential time.
-}
{-# LANGUAGE ScopedTypeVariables #-}
module Data.KMeans (kmeans)
where
import Data.List (sort, groupBy, minimumBy)
import Data.Function (on)
import Data.Ord (comparing)
import Language.Haskell.Liquid.List (transpose)
dist :: [Double] -> [Double] -> Double
dist a b = sqrt . sum $ zipWith (\x y-> (x-y) ^ 2) a b
centroid :: Int -> [[Double]] -> [Double]
centroid n points = map (flip (/) l . sum) $ transpose n points
where l = fromIntegral $ length points
closest :: Int -> [[Double]] -> [Double] -> [Double]
closest n points point = minimumBy (comparing $ dist point) points
recluster' :: Int -> [[Double]] -> [[Double]] -> [[[Double]]]
recluster' n centroids points = map (map snd) $ groupBy ((==) `on` fst) reclustered
where reclustered = sort [(closest n centroids a, a) | a <- points]
recluster :: Int -> [[[Double]]] -> [[[Double]]]
recluster n clusters = recluster' n centroids $ concat clusters
where centroids = map (centroid n) clusters
part :: (Eq a) => Int -> [a] -> [[a]]
part x ys
| zs' == [] = [zs]
| otherwise = zs : part x zs'
where (zs, zs') = splitAt x ys
-- | Recluster points
kmeans'' n clusters
| clusters == clusters' = clusters
| otherwise = kmeans'' n clusters'
where clusters' = recluster n clusters
--blocker :: Int -> Int -> [[Double]] -> [[[Double]]]
--blocker n l points = part l points
kmeans' n k points = kmeans'' n $ part l points
where l = (length points + k - 1) `div` k
-- | Cluster points in a Euclidian space, represented as lists of Doubles, into at most k clusters.
-- The initial clusters are chosen arbitrarily.
{-# ANN kmeans "n: Int -> k:Int -> points:[{v:[Double] | len(v) = n}] -> [[{ v: [Double] | len(v) = n}]]" #-}
kmeans :: Int -> Int -> [[Double]] -> [[[Double]]]
--kmeans n k points = kmeans' n k points
kmeans n = kmeansGen n id
-- | A generalized kmeans function. This function operates not on points, but an arbitrary type which may be projected into a Euclidian space. Since the projection may be chosen freely, this allows for weighting dimensions to different degrees, etc.
--{-# ANN kmeansGen "n: Int -> f:(a -> {v:[Double] | len(v)=n}) -> k:Int -> points:[a] -> [[a]]" #-}
--kmeansGen :: Int -> (a -> [Double]) -> Int -> [a] -> [[a]]
--kmeansGen n f k points = kmeans' n k points
kmeansGen n f k points = map (map id) . kmeans' n k . map id $ points
--kmeansGen n f k points = map (map getVal) . kmeans' n k . map (\x -> WrapType (f x) x) $ points
--kmeansGen n f k points = map (map getVal) clusters
-- where wpoints = map (\x -> WrapType (f x) x) points
-- clusters = kmeans' n k wpoints
|
mightymoose/liquidhaskell
|
benchmarks/kmeans-0.1.2/Data/KMeans0.hs
|
bsd-3-clause
| 3,397 | 0 | 11 | 700 | 731 | 398 | 333 | 34 | 1 |
module T9230 where
test :: Monad m => m ()
test = do
return ()
|
sdiehl/ghc
|
testsuite/tests/warnings/should_compile/T9230.hs
|
bsd-3-clause
| 65 | 0 | 8 | 17 | 34 | 17 | 17 | 4 | 1 |
{-# LANGUAGE Trustworthy #-}
module CheckB_Aux (
trace
) where
import qualified Debug.Trace as D
import qualified Data.ByteString.Lazy.Char8 as BS
-- | Allowed declasification
trace :: String -> a -> a
trace s = D.trace $ s ++ show a3
a3 :: BS.ByteString
a3 = BS.take 3 $ BS.repeat 'a'
|
urbanslug/ghc
|
testsuite/tests/safeHaskell/check/CheckB_Aux.hs
|
bsd-3-clause
| 302 | 0 | 7 | 66 | 89 | 52 | 37 | 9 | 1 |
import Anthill (World, step, initial_state)
import Draw (draw_world)
import Graphics.Gloss.Interface.Pure.Simulate
-- kinda settings
speed = 10 -- steps/second
display = InWindow "anthill" (1200, 800) (50, 50)
-- end kinda settings
main = simulate display black speed initial_state draw_world (\_ _ m -> step m)
|
AnotherKamila/anthill
|
main.hs
|
isc
| 316 | 0 | 8 | 48 | 96 | 57 | 39 | 6 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Tests.Math.Hclaws.ConservationLaws (
tests
) where
import Test.Tasty (TestTree, testGroup)
import qualified Test.Tasty.QuickCheck as QC
import Test.Tasty.TH (testGroupGenerator)
import qualified Math.Hclaws.ConservationLaws as CL
import qualified Math.Hclaws.Systems.Burgers as B
tests :: TestTree
tests = $(testGroupGenerator)
|
mikebenfield/hclaws
|
test/Tests/Math/Hclaws/ConservationLaws.hs
|
isc
| 376 | 0 | 6 | 46 | 80 | 55 | 25 | 10 | 1 |
{-|
This module contains types that describe SQL statement to be executed by
DBMS.
-}
module Database.Toy.Internal.Language.Command where
import Database.Toy.Internal.Prelude
type TableName = String
type ColumnName = String
type IndexName = String
type Alias = String
data Command = CreateTable TableName [(ColumnName, ColumnType)]
| CreateIndex IndexName TableName [ColumnName]
| DropTable TableName
| DropIndex IndexName
| Select Projection [TableName] (Maybe WhereClause) (Maybe Int)
| Insert TableName [Value]
| Update TableName [(ColumnName, Value)] (Maybe WhereClause)
| Delete TableName (Maybe WhereClause)
| Vacuum
deriving (Show, Read, Eq)
data ColumnType = ColumnTypeInt
| ColumnTypeDouble
| ColumnTypeVarchar Int
deriving (Show, Read, Eq)
data Projection = All
| Some [Selector]
deriving (Show, Read, Eq)
data Selector = OneColumn ColumnSelector Alias
| WholeTable TableName
deriving (Show, Read, Eq)
data ColumnSelector = Column ColumnName
| QualifiedColumn TableName ColumnName
deriving (Show, Read, Eq)
data WhereClause = And WhereClause WhereClause
| Or WhereClause WhereClause
| Gt WhereSelector WhereSelector
| Ge WhereSelector WhereSelector
| Eq WhereSelector WhereSelector
| Ne WhereSelector WhereSelector
| Le WhereSelector WhereSelector
| Lt WhereSelector WhereSelector
deriving (Show, Read, Eq)
data WhereSelector = WhereSelectorColumn ColumnSelector
| WhereSelectorValue Value
deriving (Show, Read, Eq)
data Value = ValueInt !Int
| ValueDouble !Double
| ValueString !String
deriving (Show, Read, Eq)
|
dancingrobot84/toydb
|
src/Database/Toy/Internal/Language/Command.hs
|
mit
| 1,962 | 0 | 8 | 638 | 437 | 250 | 187 | 51 | 0 |
main = do
print [1..3]
print $[1..3] ++ [4, 5]
|
shigemk2/haskell_abc
|
renketsu.hs
|
mit
| 51 | 0 | 9 | 14 | 39 | 20 | 19 | 3 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Base.Object
( ObjectType(..)
, Object
, object
, obj
, objMatrl
) where
import Math.Sphere
import Base.Material
import Base.Intersection
import Control.Lens
import Control.Applicative ((<$>))
data Object' a = Object { _obj :: a
, _objMatrl :: Material
}
makeLenses ''Object'
instance Intersectable a => Intersectable (Object' a) where
intersect o m r = (& matrl .~ (o ^. objMatrl)) <$> intersect (o ^. obj) m r
data ObjectType =
Sphr Sphere
instance Intersectable ObjectType where
intersect (Sphr s) = intersect s
type Object = Object' ObjectType
object :: ObjectType -> Material -> Object
object o m = Object { _obj = o
, _objMatrl = m
}
|
burz/Rayzer
|
Base/Object.hs
|
mit
| 801 | 0 | 10 | 234 | 236 | 133 | 103 | 25 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module System.Directory.Watchman.Clockspec
( Clockspec
, ClockId(..)
, renderClockspec
, mkNamedCursor
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BC8
import System.Directory.Watchman.BSER
newtype NamedCursor = NamedCursor ByteString
deriving (Show, Eq, Ord)
mkNamedCursor :: ByteString -> NamedCursor
mkNamedCursor name
| "n:" `BC8.isPrefixOf` name = NamedCursor name
| otherwise = error "Named Cursor must begin with \"n:\""
newtype ClockId = ClockId ByteString
deriving (Show, Eq, Ord)
data Clockspec
= Clockspec_Epoch Int -- TODO Should this not be Int64 or Double?
| Clockspec_Cursor NamedCursor
| Clockspec_ClockId ClockId
deriving (Show, Eq, Ord)
renderClockspec :: Clockspec -> BSERValue
renderClockspec (Clockspec_Epoch n) = compactBSERInt n
renderClockspec (Clockspec_Cursor (NamedCursor s)) = BSERString s
renderClockspec (Clockspec_ClockId (ClockId s)) = BSERString s
|
bitc/hs-watchman
|
src/System/Directory/Watchman/Clockspec.hs
|
mit
| 1,017 | 0 | 9 | 177 | 252 | 140 | 112 | 26 | 1 |
{-# LANGUAGE Arrows #-}
module Main where
import Control.Monad.Loops
import FRP.Yampa as Yampa
import FRP.Yampa.Geometry
import Data.IORef
import Data.Maybe
import Data.Serialize
import Data.Time
import Control.Concurrent
import Control.Concurrent.MVar
import Control.Exception
import Network.Socket hiding (send, sendTo, recv, recvFrom)
import Network.Socket.ByteString
import Game.Shared.Types
import Game.Shared.Networking
import Game.Shared.Concurrency
import Game.Shared.Object
import Game.Shared.Physics
import Game.Shared.Arrows
import Game.Server.Object
import Game.Server.Networking
import Game.Server.Objects.GameSetup
import IdentityList
-- |Gets the number of ticks since the server was started
getTicks :: IO Double
getTicks = do
now <- getCurrentTime
let dayTime = utctDayTime now
rationalDayTime = toRational dayTime
integralVal = fromRational rationalDayTime
integerVal = integralVal
return integralVal
--
-- Based on code at:
-- http://lambdor.net/?p=59
--
-- |Entrypoint function
main :: IO ()
main = do
tickRef <- newIORef =<< getTicks
sock <- createServerSocket
msgs <- initMVar :: IO (MVar ServerNetInput)
conns <- initMVar :: IO (MVar [Socket])
reactimate (initialise tickRef sock msgs conns) (sense tickRef sock msgs) (actuate sock conns) (process objs)
where
objs = listToIL [setupBasicGame]
-- |Initialises the client environment and sets up SDL
initialise :: IORef Double -- ^The IOReference to the tick counter
-> Socket -- ^The server socket
-> MVar ServerNetInput -- ^The MVar referencing inbound network events
-> MVar [Socket] -- ^The MVar referencing all client connections
-> IO ServerNetInput -- ^Initial network input
initialise tickRef sock msgs conns = do
-- Start listening
forkIO (serverAcceptLoop sock msgs conns)
-- Store the current time
t <- getTicks
writeIORef tickRef t
return []
-- |Performs input handling for the reactimate loop, returning both
-- the number of second since the last call and all network input events.
sense :: IORef Double -- ^The IOReference to the tick counter
-> Socket -- ^The server socket
-> MVar ServerNetInput -- ^The MVar referencing inbound network events
-> Bool -- ^Unused
-> IO (DTime, Maybe ServerNetInput) -- ^Number of seconds since last call and the network input
sense tickRef sock msgs _ = do
-- Compute the time difference between now and the last frame
t <- getTicks
t' <- readIORef tickRef
-- Grab all of the network events
netEvents <- popAllMVar msgs
-- Wrute the cyrrebt tune and return the time detla
writeIORef tickRef t
let dt = t - t'
return (dt, Just netEvents)
-- |Performs output handling for the reactimate loop, drawing the result
-- of the objects to the screen, and sending all output messages to the clients
actuate :: Socket -- ^The server socket
-> MVar [Socket] -- ^The MVar referencing all client connections
-> Bool -- ^Unused
-> IL ObjOutput -- ^Object output identity list
-> IO Bool -- ^True if the server should shutdown
actuate sock conns _ oos = do
-- Send any packets to the clients (ignoring empty lists)
let objOutputs = elemsIL oos
allConns <- readMVar conns
mapM_ (\oo -> mapM_ (\msg -> mapM_ (\sock -> sendMessage sock msg) allConns) (eventToList (ooGlobalMessages oo))) objOutputs
mapM_ (\oo -> mapM_ (\(notSock, msg) -> mapM_ (\sock -> if sock /= notSock then sendMessage sock msg else return ()) allConns) (eventToList (ooGlobalExceptMessages oo))) objOutputs
mapM_ (\oo -> mapM_ (\(sock, msg) -> sendMessage sock msg) (eventToList (ooMessages oo))) objOutputs
-- Output console messages
mapM_ (\oo -> let con = ooConsole oo in if (not . null) con then putStrLn con else return ()) objOutputs
return $ null (keysIL oos)
-- |Performs the game message pump, passing the inputs into each
-- game object, producing a list of the object outputs
process :: IL Object -- ^Identity list of all the object signal functions
-> SF ServerNetInput (IL ObjOutput)
process objs = proc input -> do
rec
objOut <- core objs -< (input, objOut)
returnA -< objOut
-- |Performs each object in turn, and executes the killAndSpawn fuction
-- when finished
core :: IL Object -- ^Identity list of all the object signal functions
-> SF (ServerNetInput, IL ObjOutput) (IL ObjOutput)
core objs = dpSwitch route objs (arr killAndSpawn >>> notYet) (\sfs f -> core (f sfs))
-- |Routes all events to each object
route :: (ServerNetInput, IL ObjOutput) -- ^Network input
-> IL sf -- ^Identity list of all the object signal functions
-> IL (ObjInput, sf) -- ^List of inputs for each signal function
route (messages, oos) objs = mapIL route' objs
where
route' (k, obj) = (ObjInput {
oiId = k,
oiNetwork = messages,
oiAllObjects = allObjects,
oiCollidingWith = fmap snd (filter (\(a, _) -> (goId a) == k) allCollisions)
}, obj)
allObjects = elemsIL (fmap ooGameObject oos)
allCollisions = findCollisions allObjects
-- |Reads the output from all the objects, performing their output kill requests and
-- output spawn requests
killAndSpawn :: ((ServerNetInput, IL ObjOutput), IL ObjOutput) -- ^Network input and the list of object outputs
-> Yampa.Event (IL Object -> IL Object)
killAndSpawn ((msgs, _), oos) = foldl (mergeBy (.)) noEvent events
where
events = [mergeBy (.) (ooKillRequest oo `tag` (deleteIL k))
(fmap (foldl (.) id . map insertIL_) (ooSpawnRequests oo))
| (k, oo) <- assocsIL oos]
|
Mattiemus/LaneWars
|
Server.hs
|
mit
| 5,873 | 1 | 18 | 1,409 | 1,398 | 745 | 653 | 102 | 3 |
module Import (module Export, fromJSONEither) where
import Protolude as Export
import Data.Aeson as Export
import Data.HashMap.Strict as Export (HashMap)
import Data.List.NonEmpty as Export (NonEmpty)
import Data.Vector as Export (Vector)
import Test.QuickCheck as Export hiding (Failure, Result, Success,
(.&.))
import qualified Data.Text as T
fromJSONEither :: FromJSON a => Value -> Either Text a
fromJSONEither a =
case fromJSON a of
Error e -> Left (T.pack e)
Success b -> Right b
|
seagreen/hjsonschema
|
src/Import.hs
|
mit
| 617 | 0 | 11 | 202 | 163 | 98 | 65 | 14 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.