code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE BangPatterns, DeriveDataTypeable, DeriveGeneric, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Web.RTBBidder.Protocol.Adx.BidRequest.Device.DeviceType (DeviceType(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified GHC.Generics as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data DeviceType = UNKNOWN_DEVICE
| HIGHEND_PHONE
| TABLET
| PERSONAL_COMPUTER
| CONNECTED_TV
| GAME_CONSOLE
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data,
Prelude'.Generic)
instance P'.Mergeable DeviceType
instance Prelude'.Bounded DeviceType where
minBound = UNKNOWN_DEVICE
maxBound = GAME_CONSOLE
instance P'.Default DeviceType where
defaultValue = UNKNOWN_DEVICE
toMaybe'Enum :: Prelude'.Int -> P'.Maybe DeviceType
toMaybe'Enum 0 = Prelude'.Just UNKNOWN_DEVICE
toMaybe'Enum 1 = Prelude'.Just HIGHEND_PHONE
toMaybe'Enum 2 = Prelude'.Just TABLET
toMaybe'Enum 3 = Prelude'.Just PERSONAL_COMPUTER
toMaybe'Enum 4 = Prelude'.Just CONNECTED_TV
toMaybe'Enum 5 = Prelude'.Just GAME_CONSOLE
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum DeviceType where
fromEnum UNKNOWN_DEVICE = 0
fromEnum HIGHEND_PHONE = 1
fromEnum TABLET = 2
fromEnum PERSONAL_COMPUTER = 3
fromEnum CONNECTED_TV = 4
fromEnum GAME_CONSOLE = 5
toEnum
= P'.fromMaybe
(Prelude'.error "hprotoc generated code: toEnum failure for type Web.RTBBidder.Protocol.Adx.BidRequest.Device.DeviceType")
. toMaybe'Enum
succ UNKNOWN_DEVICE = HIGHEND_PHONE
succ HIGHEND_PHONE = TABLET
succ TABLET = PERSONAL_COMPUTER
succ PERSONAL_COMPUTER = CONNECTED_TV
succ CONNECTED_TV = GAME_CONSOLE
succ _ = Prelude'.error "hprotoc generated code: succ failure for type Web.RTBBidder.Protocol.Adx.BidRequest.Device.DeviceType"
pred HIGHEND_PHONE = UNKNOWN_DEVICE
pred TABLET = HIGHEND_PHONE
pred PERSONAL_COMPUTER = TABLET
pred CONNECTED_TV = PERSONAL_COMPUTER
pred GAME_CONSOLE = CONNECTED_TV
pred _ = Prelude'.error "hprotoc generated code: pred failure for type Web.RTBBidder.Protocol.Adx.BidRequest.Device.DeviceType"
instance P'.Wire DeviceType where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB DeviceType
instance P'.MessageAPI msg' (msg' -> DeviceType) DeviceType where
getVal m' f' = f' m'
instance P'.ReflectEnum DeviceType where
reflectEnum
= [(0, "UNKNOWN_DEVICE", UNKNOWN_DEVICE), (1, "HIGHEND_PHONE", HIGHEND_PHONE), (2, "TABLET", TABLET),
(3, "PERSONAL_COMPUTER", PERSONAL_COMPUTER), (4, "CONNECTED_TV", CONNECTED_TV), (5, "GAME_CONSOLE", GAME_CONSOLE)]
reflectEnumInfo _
= P'.EnumInfo
(P'.makePNF (P'.pack ".Adx.BidRequest.Device.DeviceType") ["Web", "RTBBidder", "Protocol"] ["Adx", "BidRequest", "Device"]
"DeviceType")
["Web", "RTBBidder", "Protocol", "Adx", "BidRequest", "Device", "DeviceType.hs"]
[(0, "UNKNOWN_DEVICE"), (1, "HIGHEND_PHONE"), (2, "TABLET"), (3, "PERSONAL_COMPUTER"), (4, "CONNECTED_TV"),
(5, "GAME_CONSOLE")]
instance P'.TextType DeviceType where
tellT = P'.tellShow
getT = P'.getRead | hiratara/hs-rtb-bidder | src/Web/RTBBidder/Protocol/Adx/BidRequest/Device/DeviceType.hs | bsd-3-clause | 3,617 | 0 | 11 | 617 | 880 | 487 | 393 | 78 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module YX.Initialize
where
import Prelude (error)
import Control.Applicative (Applicative, (*>), pure)
import Control.Exception (Exception, catch, throwIO)
import Control.Monad ((>>=), foldM, unless, when)
import Control.Monad.IO.Class (liftIO)
import Data.Bool (Bool(False, True), (||), not, otherwise)
import Data.Either (Either(Left, Right))
import Data.Function (($), (.), const, flip)
import Data.Functor ((<$), (<$>))
import Data.IORef (IORef, atomicWriteIORef, newIORef, readIORef)
import qualified Data.List as List (map, null)
import qualified Data.List.NonEmpty as NonEmpty (head, toList)
import Data.Maybe (Maybe(Just, Nothing), fromMaybe)
import Data.Monoid ((<>))
import Data.String (String, fromString)
import System.IO (FilePath, IO)
import qualified System.IO as IO ({-print,-} putStrLn)
import Text.Show (Show, show)
import qualified Data.Aeson as Aeson (eitherDecode', encode)
import qualified Data.ByteString.Lazy as Lazy.ByteString (readFile, writeFile)
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap (foldrWithKey, lookup)
import Data.Text (Text)
import qualified Data.Text as Text (unlines, unpack)
import qualified Data.Text.IO as Text (writeFile)
import Development.Shake
( ShakeOptions
( shakeFiles
-- , shakeLint
)
, (%>)
, (~>)
, shake
, shakeOptions
)
import qualified Development.Shake as Shake
( Action
-- , Lint(LintFSATrace)
, alternatives
, need
, newCache
, want
)
import System.Directory
( createDirectoryIfMissing
, doesDirectoryExist
, doesFileExist
, removeFile
)
import System.FilePath ((</>))
import qualified System.FilePath as FilePath
( dropTrailingPathSeparator
, makeRelative
, splitFileName
, takeDirectory
)
import System.FilePath.Glob (globDir1)
import qualified System.FilePath.Glob as Glob (compile)
import qualified System.Posix.Files as Posix (createSymbolicLink)
import YX.Paths
( ProjectRoot
, TypeOfStuff(CachedStuff, EnvironmentStuff)
, yxConfigs
, yxExeStuff
, yxShellStuff
, yxStuffFile
, yxStuffPath
)
import YX.Shell.Bash (mkBashrc)
import YX.Type.BuildTool (BuildTool(Cabal, Stack))
import qualified YX.Type.BuildTool as BuildTool (toText)
import YX.Type.CommandType (CommandType(Alias, Command, Symlink))
import YX.Type.ConfigFile
( ProjectConfig(ProjectConfig, _environments)
, Environment(Environment, _bin)
, Executable(Executable, _command, _type)
, parseProjectConfig
)
import YX.Type.Scm (Scm(Git))
import qualified YX.Type.Scm as Scm (toText)
import YX.Type.Shell (Shell(Bash))
type GlobPattern = String
-- | During initialization we only know where the project is, nothing more.
initializeProject
:: FilePath
-- ^ Absolute path to YX executable.
-> ProjectRoot
-- ^ Absolute path to project root directory. Everthing initiation does is
-- relative to this directory.
-> IO ProjectConfig
initializeProject yxExe root = do
possibleConfig <- detectYxConfig root
cfgRef <- newIORef (Nothing :: Maybe ProjectConfig)
doYxStuff cfgRef yxExe root defaultYxConfig $ (root </>) <$> possibleConfig
readIORef cfgRef >>= \case
Nothing -> readCachedYxConfig $ yxStuffFile root CachedStuff "config.bin"
Just cfg -> pure cfg
where
defaultYxConfig = root </> NonEmpty.head yxConfigs
-- First file is considered to be the default. See 'yxConfigs' for more
-- details.
doYxStuff
:: IORef (Maybe ProjectConfig)
-> FilePath
-- ^ Absolute path to YX executable.
-> ProjectRoot
-- ^ Absolute path to project root directory. Everthing initiation does is
-- relative to this directory.
-> FilePath
-- ^ Absolute path to YX project configuration, which is used when new one
-- has to be created.
-> Maybe FilePath
-- ^ Possibly absolute path to existing YX project configuration. 'Nothing'
-- if there is no such file.
-> IO ()
doYxStuff cfgRef yxExe root defaultYxConfig possibleYxConfig = shake opts $ do
getProjectCfg' <- Shake.newCache $ \(yxConfigChanged, cacheFile) -> liftIO
$ let memo r = r <$ atomicWriteIORef cfgRef (Just r)
getMemo = readIORef cfgRef
in
-- When configuration file has been changed then the cache must be
-- invalidated. To avoid reading cache file (efficiently stored
-- version of parsed configuration file) multiple times, we memoize
-- the parsing result.
if yxConfigChanged
then parseAndCacheYxConfig yxConfig cacheFile >>= memo
else getMemo >>= \case
Just r -> pure r
Nothing ->
-- We don't need to memoize cache file reads, because,
-- it is done for us by Shake. See 'Shake.newCache' for
-- more details.
readCachedYxConfig cacheFile
`catch` \(CacheDecodingException _ _) ->
-- This should happen only when cache file uses
-- old data format and we need to reparse
-- project config. As a result it may force
-- some additional rules to be executed, but
-- the result should always be consistent.
parseAndCacheYxConfig yxConfig cacheFile
let compileProjectCfg = getProjectCfg' . (True, )
getProjectCfg = getProjectCfg' . (False, )
Shake.want ["yx-initialization"]
-- We need to create YX project file only if there isn't one. To correctly
-- track the dependencies we need this rule to be a pattern rule instead of
-- phony rule.
yxConfig %> \out -> liftIO $ do
-- Even though this is not a phony rule, we need to check for existence
-- of the file, otherwise we would overwrite any user defined
-- configuration.
haveConfig <- doesFileExist out
unless haveConfig $ liftIO $ do
IO.putStrLn $ "Generating YX configuration file for this project: "
<> FilePath.makeRelative root out
createProjectConfig root >>= Text.writeFile out
IO.putStrLn ""
cfgCacheFile %> \out -> do
-- We expect yxConfig to be already present. See "yx-initialization" rule
-- for details.
Shake.need [yxConfig]
-- (BIG) TODO: Delete old artifacts in .yx-stuff dir. Currently we
-- would keep e.g. old links in "bin" dir.
() <$ compileProjectCfg out
yxShellStuff root "*" Bash </> "bashrc" %> \out -> do
Shake.need [cfgCacheFile]
cfg <- getProjectCfg cfgCacheFile
printGeneratingBashrc out
compileBashrc cfg (FilePath.makeRelative yxEnvStuff out) out
liftIO $ IO.putStrLn ""
yxShellStuff root "*" Bash </> "completion" %> \out -> do
printGeneratingBashrc out
-- TODO
yxShellStuff root "*" Bash </> "environment" %> \out -> do
printGeneratingBashrc out
-- TODO
yxShellStuff root "*" Bash </> "aliases" %> \out -> do
printGeneratingBashrc out
-- TODO
-- Combinator 'Shake.alternatives' allows us to use overlapping patterns.
Shake.alternatives $ do
-- Symbolic link to "yx" is a special case. We want to handle it
-- separately to make that fact explicit.
yxExeStuff root "*" </> "yx" %> \out ->
createExecutableLink root yxExe out
yxExeStuff root "*" </> "*" %> \out -> do
Shake.need [cfgCacheFile]
(envName, exeName) <- parseBinFileName yxEnvStuff out
cfg <- getProjectCfg cfgCacheFile
lookupExe cfg envName exeName >>= \exe -> case _type exe of
Alias -> error $ out
<> ": Trying to create binary, but shell alias was found."
Command -> createExecutableLink root yxExe out
Symlink ->
createExecutableLink root (Text.unpack $ _command exe) out
-- Top level rule:
"yx-initialization" ~> do
-- No matter what, we need to parse YX project configuration file, or
-- read the cached version if its available.
Shake.need [cfgCacheFile]
-- We need to traverse the configuration file to know what tasks need
-- to be done executed.
getProjectCfg cfgCacheFile >>= mapHM_ envDependencies . _environments
where
yxConfig = fromMaybe defaultYxConfig possibleYxConfig
cfgCacheFile = yxStuffFile root CachedStuff "config.bin"
yxEnvStuff = yxStuffPath (Just root) (EnvironmentStuff Nothing)
opts = shakeOptions
{ shakeFiles = yxStuffPath (Just root) CachedStuff
-- , shakeLint = Just Shake.LintFSATrace
}
mapHM_ :: Applicative f => (k -> v -> f ()) -> HashMap k v -> f ()
mapHM_ f = HashMap.foldrWithKey (\k -> (*>) . f k) $ pure ()
forHM_ = flip mapHM_
envDependencies name Environment{_bin = bins} = do
needBin name "yx"
Shake.need [yxShellStuff root (Text.unpack name) Bash </> "bashrc"]
forHM_ bins $ \exeName Executable{_type = t} -> case t of
Alias -> pure () -- Aliases are handled by shell.
Command -> needBin name exeName
Symlink -> needBin name exeName
needBin name bin = do
Shake.need [yxExeStuff root (Text.unpack name) </> Text.unpack bin]
parseBinFileName dir file
| haveParseError = error $ file <> ": Unexpected path when parsing\
\'yxStuff </> envName </> \"bin\" </> exeName'"
| otherwise = pure (envName, exeName)
where
haveParseError = List.null envName || List.null exeName
envName = FilePath.dropTrailingPathSeparator envDir
(envDir, _) = FilePath.splitFileName
$ FilePath.dropTrailingPathSeparator binDir
(binDir, exeName) = FilePath.splitFileName
$ FilePath.makeRelative dir file
lookupExe :: ProjectConfig -> String -> String -> Shake.Action Executable
lookupExe ProjectConfig{_environments = envs} envName exeName =
case HashMap.lookup envName' envs >>= HashMap.lookup exeName' . _bin of
Nothing -> error $ exeName <> ": Unable to find configuration for\
\ executable in environment '" <> envName <> "'"
Just r -> pure r
where
exeName' = fromString exeName
envName' = fromString envName
printGeneratingBashrc out = liftIO . IO.putStrLn
$ "Generating Bash *rc script:\n " <> FilePath.makeRelative root out
createExecutableLink :: FilePath -> FilePath -> FilePath -> Shake.Action ()
createExecutableLink root src dst = liftIO $ do
srcExists <- doesFileExist src
unless srcExists . error
$ src <> ": File not found when trying to create symbolic link: "
<> dst
-- We need to force the symlink existence, since the configuration may have
-- changed.
dstExists <- doesFileExist dst
when dstExists $ removeFile dst
IO.putStrLn $ "Creating symbolic link:\n " <> src <> " --> "
<> FilePath.makeRelative root dst <> "\n"
Posix.createSymbolicLink src dst
compileBashrc :: ProjectConfig -> FilePath -> FilePath -> Shake.Action ()
compileBashrc cfg relativeOut out =
liftIO . Text.writeFile out . mkBashrc cfg $ fromString envName
where
envName =
FilePath.dropTrailingPathSeparator $ FilePath.takeDirectory relativeOut
{-
prepareExecutable :: ProjectConfig -> FilePath -> FilePath -> Shake.Action ()
prepareExecutable cfg relativeOut out = prepareExecutable cfg relativeOut out
-}
-- {{{ Project Configuration File ---------------------------------------------
-- | Failed to parse\/decode YX project configuration cache file.
data CacheDecodingException = CacheDecodingException FilePath String
deriving Show
instance Exception CacheDecodingException
-- | Read YX project configuration cache file, which is just the same as
-- @yx.yaml@ or @yx.yml@ (see also 'yxConfigs'), but stored more efficiently.
--
-- /Throws: 'CacheDecodingException'/
readCachedYxConfig :: FilePath -> IO ProjectConfig
readCachedYxConfig cfg =
(Aeson.eitherDecode' <$> Lazy.ByteString.readFile cfg) >>= \case
Left e -> throwIO $ CacheDecodingException cfg e
-- TODO: Maybe we could try to parse the YAML file in this case.
Right r -> pure r
-- | Parse YX project configuration file @yx.yaml@ or @yx.yml@ (see also
-- 'yxConfigs').
--
-- /Throws: 'Data.Yaml.ParseException'/
parseAndCacheYxConfig :: FilePath -> FilePath -> IO ProjectConfig
parseAndCacheYxConfig yxConfig out = parseProjectConfig yxConfig >>= \case
Left e -> throwIO e
Right r -> do
createDirectoryIfMissing True (FilePath.takeDirectory out)
-- TODO: Use efficient binary serialization. Should we assume
-- portability? I hope not, but some people may use git clone
-- inside a Dropbox. Don't ask for details, please, just think
-- about how to handle such (pathological) cases.
r <$ Lazy.ByteString.writeFile out (Aeson.encode r)
-- TODO: We should check that there is at least one environment section in
-- the configuration file and that there is exactly one with
-- "is-default: True".
-- | Create initial version of project configuration file.
--
-- *TODO:*
--
-- * When /Stack/ is detected, then we need to add stack wrappers in to the configuration.
--
-- * When non-standard /Stack configuration file/ is found, then @STACK_YAML@
-- should be defined.
createProjectConfig :: ProjectRoot -> IO Text
createProjectConfig root = do
scm <- detectVersionControl root
putJustStrLn scm $ \tool ->
" ... detected " <> show tool <> " as a SCM"
buildTool <- detectBuildTool root
putJustStrLn buildTool $ \tool ->
" ... detected " <> show tool <> " as a build tool."
pure $ Text.unlines
[ "# Source Code Management (SCM) tool used by the project."
, "# Currently only 'Git' is recognized automatically."
, field' "scm" $ Scm.toText <$> scm
, ""
, "# Build tool used by the project."
, "# Currently only 'Cabal' and 'Stack' are recognized automatically."
, field' "build-tool" $ BuildTool.toText <$> buildTool
, ""
, "# Settings in global section are always applied, regardless of\
\ environment is"
, "# used."
, "global:"
, " env:"
, " # User can override these variables in \"${HOME}/.bash_yx\",\
\ that is usually"
, " # necessary on systems with different installation paths for\
\ these tools."
, " #YX_STACK_EXE: /usr/bin/stack"
, " #YX_GIT_EXE: /usr/bin/git"
, ""
, " bin:"
, ""
, "# Environments for this project."
, "environment:"
, " # Execution environment named \"default\". It is used when there\
\ is no"
, " # environment specified on the command line, due to \"is-default:\
\ true\"."
, " default:"
, " is-default: true"
, ""
, " # Add or modify environment variables of the isolated execution"
, " # environment."
, " env:"
, " # PATH: \"/some/path/bin:${PATH}\""
, ""
, " # Add following commands/executables in to the isolated execution"
, " # environment."
, " #"
, " # When creating symlink, the command has always have to be\
\ absolute path to"
, " # the executable."
, " bin:"
, " #stack:"
, " # type: command"
, " # command: stack build"
, " # env:"
, " # STACK_YAML: ${YX_PROJECT_ROOT}/stack-production.yaml"
, " #"
, " #alex:"
, " # type: symlink"
, " # command: /opt/alex/3.1.7/bin/alex"
, " #"
, " #happy:"
, " # type: symlink"
, " # command: /opt/happy/1.19.5/bin/happy"
, " #"
, " #build:"
, " # type: alias"
, " # command: ${YX_ENVIRONMENT_DIR}/bin/stack build"
, " #"
, " #hoogle:"
, " # type: alias"
, " # command: \"stack exec hoogle --\""
]
where
field :: Text -> Text -> Text
field name value = name <> ": \"" <> value <> "\""
field' :: Text -> Maybe Text -> Text
field' name = \case
Nothing -> "#" <> name <> ":"
Just value -> field name value
-- | Detect if project contains YX project configuration file, and which, if
-- its found.
detectYxConfig :: ProjectRoot -> IO (Maybe FilePath)
detectYxConfig root =
detect . List.map doesConfigExist $ NonEmpty.toList yxConfigs
where
doesConfigExist cfg = doesFileExist (root </> cfg) ~~> cfg
-- }}} Project Configuration File ---------------------------------------------
-- {{{ Tooling Detection ------------------------------------------------------
detectVersionControl :: ProjectRoot -> IO (Maybe Scm)
detectVersionControl root = detect
[ doesDirectoryExist (root </> ".git") ~~> Git
]
detectBuildTool :: ProjectRoot -> IO (Maybe BuildTool)
detectBuildTool root = detect
[ glob root "stack*.yaml" ~~> Stack
, glob root "stack*.cabal" ~~> Cabal
]
-- }}} Tooling Detection ------------------------------------------------------
-- {{{ Utility functions ------------------------------------------------------
putJustStrLn :: Maybe a -> (a -> String) -> IO ()
putJustStrLn = \case
Nothing -> const $ pure ()
Just s -> IO.putStrLn . ($ s)
glob :: ProjectRoot -> GlobPattern -> IO Bool
glob root pattern = not . List.null <$> globDir1 (Glob.compile pattern) root
(~~>) :: IO Bool -> a -> (IO Bool, a)
(~~>) = (,)
detect :: [(IO Bool, a)] -> IO (Maybe a)
detect = foldM go Nothing
where
go r@(Just _) _ = pure r
go Nothing (predicate, a) = do
isThisOne <- predicate
pure $ if isThisOne
then Just a
else Nothing
-- }}} Utility functions ------------------------------------------------------
| trskop/yx | src/YX/Initialize.hs | bsd-3-clause | 18,592 | 0 | 23 | 5,123 | 3,515 | 1,914 | 1,601 | 316 | 8 |
-- ^Uses NetCore to implement a learning switch.
--
-- Does not work on networks with loops.
module MacLearning where
import Control.Concurrent
import Control.Concurrent.SampleVar
import Data.IORef
import Control.Monad (forever)
import Frenetic.NetCore
import qualified Data.Map as Map
import Frenetic.NetCore.Util (poDom)
isFlood = DlDst broadcastAddress
-- |Learns the location of hosts at each switch.
--
-- 'pktsByLocation' produces a stream of NetCore policies and a stream of
-- hosts' locations. The NetCore policies inspect packets at all switches.
-- At each switch, the NetCore policies learn the ingress port that receives
-- packets from each host. These learned locations are output to the other
-- stream.
--
-- The program handles host mobility: if the host starts sending from
-- a new port, the program learns the new location of the host.
pktsByLocation :: IO (Chan (EthernetAddress, Loc), Chan Policy)
pktsByLocation = do
locChan <- newChan -- hosts' locations
polChan <- newChan -- policy to monitor new hosts and moved hosts
(pktChan, act) <- getPkts
-- In a loop, inspect packets, and learn the ingress port for hosts
let loop :: Map.Map (Switch, EthernetAddress) (Port, Predicate)
-> IO ()
loop locs = do
(Loc sw port, pkt) <- readChan pktChan
let srcMac = pktDlSrc pkt
case Map.lookup (sw, srcMac) locs of
Just (port', _) | port == port' -> do
-- We aleady know the host and its location is unchanged.
loop locs
otherwise -> do
-- Either (1) srcMac has never sent a packet to the switch, or
-- (2) it has before, but from a different port.
let pred = DlSrc srcMac <&&>
Switch sw <&&>
IngressPort port
let locs' = Map.insert (sw, srcMac) (port, pred) locs
writeChan locChan (srcMac, Loc sw port)
-- Update the policy so that we do not see packets from known
-- hosts at known locations.
let knownHosts = prOr (map snd (Map.elems locs'))
writeChan polChan $
Not (DlSrc broadcastAddress <||> knownHosts) ==> act
loop locs'
-- Initially, inspect all packets (excluding broadcasts)
writeChan polChan (Not (DlSrc broadcastAddress) ==> act)
forkIO (loop Map.empty)
return (locChan, polChan)
-- |'fwdTo locs ((sw, dstMac), port)' builds a policy that forwards
-- packets to 'dstMac', at switch 'sw' on port 'port' from hosts in
-- 'locs'. The policy only forwards packets on 'sw', so it ignores
-- members of 'locs' on other switches.
fwdTo :: Map.Map (Switch, EthernetAddress) Port
-> ((Switch, EthernetAddress), Port) -- ^forwards to this
-> Policy
fwdTo locs ((sw, dstMac), port) = foldr (<+>) PoBottom (map f srcMacs)
where -- select only srcMacs at sw
srcMacs = map (\((_, srcMac), _) -> srcMac) $
filter (\((sw', _), _) -> sw' == sw)
(Map.toList locs)
f srcMac = (Switch sw <&&> DlSrc srcMac <&&> DlDst dstMac)
==> forward [port]
-- |'placeRoutes pktChan' produces a stream of NetCore forwarding policies that
-- forward packets based on hosts' locations in 'pktChan'.
placeRoutes :: Chan (EthernetAddress, Loc) -> IO (Chan Policy)
placeRoutes pktChan = do
polChan <- newChan
-- When a hosts location arrives, update the map and recompute the forwarding
-- policy.
locsRef <- newIORef Map.empty
-- Initially, flood all packets
writeChan polChan (Any ==> allPorts unmodified)
forkIO $ forever $ do
(dlSrc, Loc sw pt) <- readChan pktChan
locs <- readIORef locsRef
let locs' = Map.insert (sw, dlSrc) pt locs
let fwdPol = foldr (<+>) PoBottom (map (fwdTo locs') (Map.toList locs'))
-- Flood all other packets to maintain connectivity.
let floodPol = Not (poDom fwdPol) ==> allPorts unmodified
writeChan polChan (fwdPol <+> floodPol)
writeIORef locsRef locs'
return polChan
calmChan :: Int -> Chan a -> IO (Chan a)
calmChan msDelay inChan = do
outChan <- newChan
sample <- newEmptySampleVar
forkIO $ forever $ do
v <- readChan inChan
writeSampleVar sample v
forkIO $ forever $ do
v <- readSampleVar sample
writeChan outChan v
threadDelay (msDelay * 1000)
return outChan
learningSwitch = do
(uniqPktsChan, queryPolChan) <- pktsByLocation
fwdPolChan <- placeRoutes uniqPktsChan
bothPolsChan <- both queryPolChan fwdPolChan
polChan <- newChan
forkIO $ forever $ do
(queryPol, fwdPol) <- readChan bothPolsChan
writeChan polChan (fwdPol <+> queryPol)
delayedPolChan <- calmChan 1000 polChan
return delayedPolChan
main addr = do
polChan <- learningSwitch
dynController addr polChan
| frenetic-lang/netcore-1.0 | examples/MacLearning.hs | bsd-3-clause | 4,785 | 0 | 25 | 1,174 | 1,149 | 579 | 570 | 83 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Server where
import Servant.API
import Game.Game
import Account.Account
import System.Environment
import Network.Wai
import Network.Wai.Handler.Warp
import Servant.JQuery
import Servant.Server
import Servant
import Control.Concurrent.STM.TVar
import Control.Concurrent.STM
import qualified Data.HashMap as H
import Servant.Docs
type FullServer = "game" :> GameEndpoints :<|> "account" :> AccountEndpoints :<|> Raw
server gameRef = gameServer gameRef :<|> accountServer :<|> serveDirectory "./www/"
serverAPI::Proxy FullServer
serverAPI = Proxy
app gameRef = serve serverAPI $ server gameRef
mainFunc = do
args <- getArgs
if args == []
then do
gameRef <- atomically $ newTVar (H.empty)
run 8080 (app gameRef)
else useArg (args !! 0)
useArg:: String -> IO ()
useArg arg
-- | (arg == "j") = writeFile "./api.js" js
| (arg == "m") = makeDB
-- | (arg == "d") = writeFile "./docs.html" doc
| otherwise = print "Didn't understand that command line arg"
{--
doc::String
doc = markdown $ docs serverAPI
js::String
js=jsForAPI serverAPI
--}
| octopuscabbage/UltimateTicTacToeServer | src/Server.hs | bsd-3-clause | 1,315 | 0 | 14 | 251 | 278 | 152 | 126 | 36 | 2 |
module Web.Client.CapacitySpec (main, spec) where
import Web.Client.Capacity
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Instances
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "someFunction" $ do
it "should work fine" $ do
property someFunction
someFunction :: Bool -> Bool -> Property
someFunction x y = x === y
| athanclark/client-capacity | test/Web/Client/CapacitySpec.hs | bsd-3-clause | 369 | 0 | 13 | 70 | 118 | 63 | 55 | 14 | 1 |
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
{-# LANGUAGE CPP #-}
----------------------------------------------------------------
-- ~ 2021.12.14
-- |
-- Module : Unused.TastyQuickCheck
-- Copyright : 2008--2021 wren romano
-- License : BSD-3-Clause
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Unused extensions\/helpers for "Test.Tasty.QuickCheck". This
-- will eventually be removed from the repo; I'm just leaving it
-- here for now, for lack of somewhere better to shelve it.
----------------------------------------------------------------
module Unused.TastyQuickCheck (localQuickCheckOptions) where
import qualified Test.Tasty as Tasty
import qualified Test.Tasty.QuickCheck as TastyQC
import qualified Test.QuickCheck as QC
----------------------------------------------------------------
-- QuickCheck >=2.1.0 && <2.5.0 used 'maxDiscard' instead, which
-- has a different semantics and which we set to @max 1000 (10*n)@.
-- But since the cabal file lists QuickCheck-2.10 as the minimum
-- version, must switch to the new 'maxDiscardRatio' instead.
-- | Convert most of 'QC.Args' into Tasty. There are a few QuickCheck
-- args which are not handled:
--
-- * 'QC.maxShrinks' if tasty-quickcheck<0.10.2, because
-- 'TastyQC.QuickCheckMaxShrinks' is not exported.
-- <https://github.com/UnkindPartition/tasty/issues/316>
-- * 'QC.chatty' because Tasty always ignores this setting.
-- * 'QC.replay' because of technical difficulty with inverting
-- @Test.QuickCheck.Random.mkQCGen :: Int -> QCGen@
--
-- Conversely, there are two TastyQC options which have no equivalent
-- in 'QC.Args': 'TastyQC.QuickCheckVerbose' and 'TastyQC.QuickCheckShowReplay'.
localQuickCheckOptions :: QC.Args -> Tasty.TestTree -> Tasty.TestTree
localQuickCheckOptions args
= Tasty.localOption (TastyQC.QuickCheckTests $ QC.maxSuccess args)
. Tasty.localOption (TastyQC.QuickCheckMaxSize $ QC.maxSize args)
. Tasty.localOption (TastyQC.QuickCheckMaxRatio $ QC.maxDiscardRatio args)
#if MIN_VERSION_tasty_quickcheck(0,10,2)
. Tasty.localOption (TastyQC.QuickCheckMaxShrinks $ QC.maxShrinks args)
#endif
{-
Tasty lacks some options that QC.Args has:
* (QC.chatty Bool{default=True}), though 'TastyQC.optionSetToArgs'
always sets this to False.
Tasty has some additional options that QC.Args lacks:
* (TastyQC.QuickCheckVerbose Bool{default=False})
chooses between QC.verboseCheckWithResult vs QC.quickCheckWithResult.
Where,
QC.verboseCheckWithResult a p = QC.quickCheckWithResult a (QC.verbose p)
* (TastyQC.QuickCheckShowReplay Bool{default=False})
says whether to print the replay seed even on successful tests
(it's always printed on unsuccessful tests).
Tasty has some discrepancy with QC.Args:
* (TastyQC.QuickCheckReplay (Maybe Int){default=Nothing})
vs (QC.replay (Maybe (QCGen, Int)){default=Nothing})
The Int of QC.replay is the value returned by QC.getSize,
which can be ignored for the purposes of TastyQC.QuickCheckReplay
since QC.verboseCheckWithResult doesn't use it for random
seed stuff. (QC.verboseCheckWithResult does use it to define
the case for QC.State.computeSize applied to (0,0) however.)
However, there's no good way I can think to invert
Test.QuickCheck.Random.mkQCGen :: Int -> QCGen
Which is just a wrapper around
System.Random.mkStdGen :: Int -> StdGen
or
System.Random.SplitMix.mkSMGen :: Word64 -> SMGen
depending on, if impl(hugs): cpp-options: -DNO_SPLITMIX
Partly because it depends on whether the @random@ library
version is >=1.2 vs <1.2, since they use different internals
which the QCGen type is expressly designed to paper over.
But mainly because there is no inverse function already given.
We could use the Read and Show instances to recover the components
of the QCGen, however it's less clear how to put them back
together into an Int.
-}
----------------------------------------------------------------
----------------------------------------------------------- fin.
| wrengr/bytestring-trie | dev/Unused/TastyQuickCheck.hs | bsd-3-clause | 4,266 | 0 | 12 | 810 | 189 | 120 | 69 | 11 | 1 |
-- This module defines different SQL dialects.
module Database.Algebra.SQL.Dialect
( Dialect(..)
, forMonetDB
, forPostgreSQL
) where
-- TODO Provide feature specific records, in case this file gets bigger.
-- | Defines the possible SQL dialects used for certain tasks like rendering and
-- materialization.
data Dialect = SQL99
| PostgreSQL
| MonetDB
forMonetDB :: Dialect -> Bool
forMonetDB MonetDB = True
forMonetDB _ = False
forPostgreSQL :: Dialect -> Bool
forPostgreSQL PostgreSQL = True
forPostgreSQL _ = False
| ulricha/algebra-sql | src/Database/Algebra/SQL/Dialect.hs | bsd-3-clause | 579 | 0 | 5 | 140 | 90 | 54 | 36 | 13 | 1 |
module HW11Test
(hw11Tests)
where
import Test.Tasty
import Test.Tasty.Hspec
import Data.Char
import Data.Maybe
import HW11.AParser
import HW11.SExpr
hw11Tests :: IO TestTree
hw11Tests = do
hspecSuite <- testSpec "HW11" hspecTests
return $ testGroup "tests" [hspecSuite]
hspecTests :: SpecWith ()
hspecTests = do
describe "Ex 1" $ do
describe "zeroOrMore" $ do
it "parses as many as values as possible" $
shouldBe (runParser (zeroOrMore (satisfy isUpper)) "ABCdEfgH")
(Just ("ABC", "dEfgH"))
it "always succeeds" $
shouldBe (runParser (zeroOrMore (satisfy isUpper)) "abcdeFGh")
(Just ("", "abcdeFGh"))
describe "oneOrMore" $ do
it "fails if there isn't at least one result" $
shouldBe (runParser (oneOrMore (satisfy isUpper)) "abcdEfgH")
Nothing
it "works like zeroOrMore otherwise" $
shouldBe (runParser (oneOrMore (satisfy isUpper)) "ABCdEfgH")
(runParser (zeroOrMore (satisfy isUpper)) "ABCdEfgH")
describe "Ex 2" $ do
describe "spaces" $
it "parses zero or more whitespace chars" $ do
shouldBe (runParser spaces " a") (Just (" ", "a"))
shouldBe (runParser spaces "a") (Just ("", "a"))
describe "ident" $
it "parses an alpha char followed by 0 or more alphaNums" $ do
shouldBe (runParser ident "foobar baz") (Just ("foobar", " baz"))
shouldBe (runParser ident "foo33fa") (Just ("foo33fa", ""))
shouldBe (runParser ident "2bad") Nothing
shouldBe (runParser ident "") Nothing
describe "Ex 3" $ do
describe "parsing parseSExpr" $ do
it "parses integers" $
shouldBe (fst . fromJust $ runParser parseSExpr "5")
(A (N 5))
it "parses identifiers" $
shouldBe (fst . fromJust $ runParser parseSExpr "foo3")
(A (I "foo3"))
it "leading and trailing whitespace" $
shouldBe (fst . fromJust $ runParser parseSExpr " 4 ")
(A (N 4))
it "parses lists of sexprs" $ do
shouldBe (fst . fromJust $ runParser parseSExpr "(bar (foo) 3 5 874)")
(Comb [ A (I "bar")
, Comb [A (I "foo")]
, A (N 3)
, A (N 5)
, A (N 874)
])
shouldBe (fst . fromJust $ runParser parseSExpr
"(((lambda x (lambda y (plus x y))) 3) 5)")
(Comb [ Comb [ Comb [ A (I "lambda")
, A (I "x")
, Comb [ A (I "lambda")
, A (I "y")
, Comb [ A (I "plus")
, A (I "x")
, A (I "y")
]
]
]
, A (N 3)
]
, A (N 5)
])
shouldBe (fst . fromJust $ runParser parseSExpr
"( lots of ( spaces in ) this (one ) )")
(Comb [ A (I "lots")
, A (I "of")
, Comb [ A (I "spaces")
, A (I "in")
]
, A (I "this")
, Comb [ A (I "one") ]
])
| cgag/cis-194-solutions | test/HW11Test.hs | bsd-3-clause | 3,642 | 0 | 30 | 1,696 | 1,028 | 495 | 533 | 77 | 1 |
{-# language FlexibleContexts #-}
{-# language ScopedTypeVariables #-}
{-# language GADTs #-}
module Hard2 where
import Co
import Control.Applicative
import Control.Monad
import Feldspar
import Feldspar.Hardware
import qualified Feldspar.Hardware.Compile as Hard
import Prelude (flip, (.), ($), Bool(..), Num(..), Integer(..))
--------------------------------------------------------------------------------
-- * Hardware
--------------------------------------------------------------------------------
type HRef = Reference Hardware
type HArr = Array Hardware
type HIrr = IArray Hardware
type HIx = Ix Hardware
type HBool = HExp Bool
type HInt = HExp Integer
type HWord8 = HExp Word8
type HWord16 = HExp Word16
type HWord32 = HExp Word32
type HWord64 = HExp Word64
type HBlock = Block Hardware
type HB = B Hardware
--------------------------------------------------------------------------------
-- ** SHA1
--------------------------------------------------------------------------------
--
-- todo: same assumption as for software version.
--
--------------------------------------------------------------------------------
sha1 :: Integer -> HArr HWord8 -> Hardware (HArr HWord8)
sha1 blocks message =
do let f :: HInt -> HWord32 -> HWord32 -> HWord32 -> HWord32
f t b c d =
(0 <= t && t <= 19) ?? ((b .&. c) .|. (complement b .&. d)) $
(20 <= t && t <= 39) ?? (b `xor` c `xor` d) $
(40 <= t && t <= 59) ?? ((b .&. c) .|. (b .&. d) .|. (c .&. d))
$ (b `xor` c `xor` d)
let k :: HInt -> HWord32
k t =
(0 <= t && t <= 19) ?? 0x5a827999 $
(20 <= t && t <= 39) ?? 0x6ed9eba1 $
(40 <= t && t <= 59) ?? 0x8f1bbcdc
$ 0xca62c1d6
let step :: HIrr HWord32 -> HInt -> HBlock -> Hardware ()
step w t block@(ra, rb, rc, rd, re) =
do (a, b, c, d, e) <- freeze_block block
temp <- shareM $
a `rotateL` (5 :: HWord32) + (f t b c d) + e + (w !! t) + (k t)
setRef re (d)
setRef rd (c)
setRef rc (b `rotateL` (30 :: HWord32))
setRef rb (a)
setRef ra (temp)
-- format the message according to SHA1.
let b = value blocks
p <- sha1_pad b message
w <- sha1_extend b p
-- fetch the initial 160-bit block.
ib <- init_block
-- process the blocks of w.
for (0) (b-1) (\(i :: HExp Integer) ->
do -- copy the current block.
cb <- copy_block ib
-- iterate step over block.
for (0) (79) $ \i -> step w i cb
-- add new block to previous block.
add_block ib cb)
-- translate the final block into an array of octets.
sha1_block ib
--------------------------------------------------------------------------------
sha1_pad :: HInt -> HArr (HExp Word8) -> Hardware (HArr (HExp Word8))
sha1_pad blocks message =
do let len = length message :: HInt
bits :: HWord64 <- shareM (i2n len * 8)
size :: HInt <- shareM (64 * blocks)
imsg :: HIrr (HExp Word8) <- unsafeFreezeArr message
pad :: HArr (HExp Word8) <- newArr (64 * blocks)
-- copy original message.
for (0) (len-1) $ \i ->
setArr pad i (imsg !! i)
-- add the single one.
setArr pad len 1
-- fill with zeroes.
for (len+1) (size-9) $ \i ->
setArr pad i 0
-- add length in last 8 8-bits.
for (size-8) (size-1) $ \i ->
setArr pad i (i2n (bits `shiftR` (8 * ((size-1) - i))))
return pad
sha1_extend :: HInt -> HArr (HExp Word8) -> Hardware (HIrr (HExp Word32))
sha1_extend blocks pad =
do ipad :: HIrr (HExp Word8) <- unsafeFreezeArr pad
ex :: HArr (HExp Word32) <- newArr (80 + blocks)
-- truncate original block.
for (0) (blocks-1) $ (\(b :: HExp Integer) -> do
po <- shareM (b * 16)
bo <- shareM (b * 80)
for (0) (15) (\(i :: HExp Integer) ->
setArr ex (b+i)
( (i2n $ ipad ! (po+(i*4)))
+ (i2n $ ipad ! (po+(i*4)+1)) `shiftL` (8 :: HExp Integer)
+ (i2n $ ipad ! (po+(i*4)+2)) `shiftL` (16 :: HExp Integer)
+ (i2n $ ipad ! (po+(i*4)+3)) `shiftL` (24 :: HExp Integer)
)))
-- extend block with new words.
iex :: HIrr (HExp Word32) <- unsafeFreezeArr ex
for (0) (blocks-1) (\(b :: HExp Integer) -> do
bo <- shareM (b * 80)
for (bo+16) (bo+79) (\(i :: HExp Integer) ->
setArr ex i $ flip rotateL (1 :: HExp Word32)
( (iex ! (i-3))
`xor` (iex ! (i-8))
`xor` (iex ! (i-14))
`xor` (iex ! (i-16))
)))
unsafeFreezeArr ex
-- Translate a 160-bit block into an array of 20 8-bits.
sha1_block :: HBlock -> Hardware (HArr (HExp Word8))
sha1_block (a, b, c, d, e) =
do ta <- unsafeFreezeRef a
tb <- unsafeFreezeRef b
tc <- unsafeFreezeRef c
td <- unsafeFreezeRef d
te <- unsafeFreezeRef e
out <- newArr 20
let shift i = 8 * (3 - (i2n i :: HExp Word32))
for 0 3 $ \i -> setArr out (i) (i2n (ta `shiftR` shift i) :: HExp Word8)
for 0 3 $ \i -> setArr out (i+4) (i2n (tb `shiftR` shift i) :: HExp Word8)
for 0 3 $ \i -> setArr out (i+8) (i2n (tc `shiftR` shift i) :: HExp Word8)
for 0 3 $ \i -> setArr out (i+12) (i2n (td `shiftR` shift i) :: HExp Word8)
for 0 3 $ \i -> setArr out (i+16) (i2n (te `shiftR` shift i) :: HExp Word8)
return out
--------------------------------------------------------------------------------
(??) :: HType a => HBool -> HExp a -> HExp a -> HExp a
(??) = (?)
(!!) :: HType a => HIrr (HExp a) -> HInt -> HExp a
(!!) = (!)
foldlHM
:: (HBlock -> HInt -> Hardware ()) -- update function.
-> HBlock -- initial block.
-> HInt -- lower range.
-> HInt -- upper range.
-> Hardware HBlock
foldlHM f b l u =
do for l u (\ix -> f b ix)
return b
--------------------------------------------------------------------------------
test = Hard.icompile (msg >>= sha1 2 >> return ())
--------------------------------------------------------------------------------
| markus-git/PBKDF2 | src/Hard2.hs | bsd-3-clause | 6,273 | 0 | 29 | 1,859 | 2,454 | 1,285 | 1,169 | 124 | 1 |
module Problem4 where
import Data.Char
import Data.List
import Data.Maybe
import Control.Applicative
import System.IO
-- Given two sequences a1, a2.. an (ai is the profit per click of the i-th ad) and b1, b2, . . . , bn (bi is
-- the average number of clicks per day of the i-th slot), we need to partition them into n pairs (ai, bi))
-- such that the sum of their products is maximized.
main :: IO ()
main =
hSetBuffering stdin NoBuffering >>= \_ ->
nextNum >>= \n ->
allHours n >>= \h ->
let solved = solve h
size = length solved
print' = intercalate " " . map show
in (putStrLn $ show size) >>= \_ ->
putStrLn $ print' solved
solve :: (Ord a) => [(a, a)] -> [a]
solve =
let f [] (start, end) = end : []
f acc@(h:_) (start, end)
| start <= h = acc
| otherwise = end : acc
in (foldl' f []) . (sortBy hourOrder)
allHours n = sequence $ take n $ repeat nextHours
hourOrder (f1, t1) (f2, t2)
| t1 > t2 = GT
| t1 < t2 = LT
| t1 == t2 = compare f1 f2
nextHours = fmap (tuplify . sort) $ nextNums 2
tuplify :: [a] -> (a, a)
tuplify [a, b] = (a, b)
nextNums :: (Integral a, Read a) => Int -> IO [a]
nextNums n = sequence $ take n $ repeat nextNum
nextNum :: (Integral a, Read a) => IO a
nextNum = nextNum' ""
nextNum' n = getChar >>= \char ->
if(isDigit char || (null n && char == '-')) then nextNum' $ char:n
else if(null n) then nextNum' n
else pure $ read $ reverse n
| msosnicki/algorithms | app/week3/Problem4.hs | bsd-3-clause | 1,443 | 0 | 16 | 366 | 592 | 309 | 283 | 39 | 3 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
module Util where
import System.Console.GetOpt.Generics
import System.Console.GetOpt.Generics.Modifier
parse :: (Generic a, HasDatatypeInfo a, All2 HasArguments (Code a)) =>
String -> Result a
parse = modsParse []
modsParse :: (Generic a, HasDatatypeInfo a, All2 HasArguments (Code a)) =>
[Modifier] -> String -> Result a
modsParse modifiers = parseArguments "prog-name" modifiers . words
unsafeModifiers :: [Modifier] -> Modifiers
unsafeModifiers mods = case mkModifiers mods of
Success x -> x
Errors errs -> error ("unsafeModifiers: " ++ show errs)
OutputAndExit msg -> error ("unsafeModifiers: " ++ show msg)
| sol/getopt-generics | test/Util.hs | bsd-3-clause | 714 | 0 | 11 | 128 | 217 | 113 | 104 | 16 | 3 |
-- Copyright (c) 2012-2013, Christoph Pohl
-- BSD License (see http://www.opensource.org/licenses/BSD-3-Clause)
-------------------------------------------------------------------------------
--
-- Project Euler Problem 16
--
-- 2^15 = 32768 and the sum of its digits is 3 + 2 + 7 + 6 + 8 = 26.
--
-- What is the sum of the digits of the number 2^1000?
module Main where
import Data.Digits
main :: IO ()
main = print result
result = sum(digitList (2^1000))
digitList :: Integer -> [Integer]
digitList x = digits 10 x
| Psirus/euler | src/euler016.hs | bsd-3-clause | 523 | 0 | 9 | 88 | 82 | 48 | 34 | 7 | 1 |
{-
This module contains helper functions for reporting and creating
unbound variables.
-}
module GHC.Rename.Unbound
( mkUnboundName
, mkUnboundNameRdr
, isUnboundName
, reportUnboundName
, unknownNameSuggestions
, WhereLooking(..)
, unboundName
, unboundNameX
, notInScopeErr
)
where
import GhcPrelude
import RdrName
import HscTypes
import TcRnMonad
import Name
import Module
import SrcLoc
import Outputable
import PrelNames ( mkUnboundName, isUnboundName, getUnique)
import Util
import Maybes
import DynFlags
import FastString
import Data.List
import Data.Function ( on )
import UniqDFM (udfmToList)
{-
************************************************************************
* *
What to do when a lookup fails
* *
************************************************************************
-}
data WhereLooking = WL_Any -- Any binding
| WL_Global -- Any top-level binding (local or imported)
| WL_LocalTop -- Any top-level binding in this module
| WL_LocalOnly
-- Only local bindings
-- (pattern synonyms declaractions,
-- see Note [Renaming pattern synonym variables])
mkUnboundNameRdr :: RdrName -> Name
mkUnboundNameRdr rdr = mkUnboundName (rdrNameOcc rdr)
reportUnboundName :: RdrName -> RnM Name
reportUnboundName rdr = unboundName WL_Any rdr
unboundName :: WhereLooking -> RdrName -> RnM Name
unboundName wl rdr = unboundNameX wl rdr Outputable.empty
unboundNameX :: WhereLooking -> RdrName -> SDoc -> RnM Name
unboundNameX where_look rdr_name extra
= do { dflags <- getDynFlags
; let show_helpful_errors = gopt Opt_HelpfulErrors dflags
err = notInScopeErr rdr_name $$ extra
; if not show_helpful_errors
then addErr err
else do { local_env <- getLocalRdrEnv
; global_env <- getGlobalRdrEnv
; impInfo <- getImports
; currmod <- getModule
; hpt <- getHpt
; let suggestions = unknownNameSuggestions_ where_look
dflags hpt currmod global_env local_env impInfo
rdr_name
; addErr (err $$ suggestions) }
; return (mkUnboundNameRdr rdr_name) }
notInScopeErr :: RdrName -> SDoc
notInScopeErr rdr_name
= hang (text "Not in scope:")
2 (what <+> quotes (ppr rdr_name))
where
what = pprNonVarNameSpace (occNameSpace (rdrNameOcc rdr_name))
type HowInScope = Either SrcSpan ImpDeclSpec
-- Left loc => locally bound at loc
-- Right ispec => imported as specified by ispec
-- | Called from the typechecker (TcErrors) when we find an unbound variable
unknownNameSuggestions :: DynFlags
-> HomePackageTable -> Module
-> GlobalRdrEnv -> LocalRdrEnv -> ImportAvails
-> RdrName -> SDoc
unknownNameSuggestions = unknownNameSuggestions_ WL_Any
unknownNameSuggestions_ :: WhereLooking -> DynFlags
-> HomePackageTable -> Module
-> GlobalRdrEnv -> LocalRdrEnv -> ImportAvails
-> RdrName -> SDoc
unknownNameSuggestions_ where_look dflags hpt curr_mod global_env local_env
imports tried_rdr_name =
similarNameSuggestions where_look dflags global_env local_env tried_rdr_name $$
importSuggestions where_look global_env hpt
curr_mod imports tried_rdr_name $$
extensionSuggestions tried_rdr_name
similarNameSuggestions :: WhereLooking -> DynFlags
-> GlobalRdrEnv -> LocalRdrEnv
-> RdrName -> SDoc
similarNameSuggestions where_look dflags global_env
local_env tried_rdr_name
= case suggest of
[] -> Outputable.empty
[p] -> perhaps <+> pp_item p
ps -> sep [ perhaps <+> text "one of these:"
, nest 2 (pprWithCommas pp_item ps) ]
where
all_possibilities :: [(String, (RdrName, HowInScope))]
all_possibilities
= [ (showPpr dflags r, (r, Left loc))
| (r,loc) <- local_possibilities local_env ]
++ [ (showPpr dflags r, rp) | (r, rp) <- global_possibilities global_env ]
suggest = fuzzyLookup (showPpr dflags tried_rdr_name) all_possibilities
perhaps = text "Perhaps you meant"
pp_item :: (RdrName, HowInScope) -> SDoc
pp_item (rdr, Left loc) = pp_ns rdr <+> quotes (ppr rdr) <+> loc' -- Locally defined
where loc' = case loc of
UnhelpfulSpan l -> parens (ppr l)
RealSrcSpan l -> parens (text "line" <+> int (srcSpanStartLine l))
pp_item (rdr, Right is) = pp_ns rdr <+> quotes (ppr rdr) <+> -- Imported
parens (text "imported from" <+> ppr (is_mod is))
pp_ns :: RdrName -> SDoc
pp_ns rdr | ns /= tried_ns = pprNameSpace ns
| otherwise = Outputable.empty
where ns = rdrNameSpace rdr
tried_occ = rdrNameOcc tried_rdr_name
tried_is_sym = isSymOcc tried_occ
tried_ns = occNameSpace tried_occ
tried_is_qual = isQual tried_rdr_name
correct_name_space occ = nameSpacesRelated (occNameSpace occ) tried_ns
&& isSymOcc occ == tried_is_sym
-- Treat operator and non-operators as non-matching
-- This heuristic avoids things like
-- Not in scope 'f'; perhaps you meant '+' (from Prelude)
local_ok = case where_look of { WL_Any -> True
; WL_LocalOnly -> True
; _ -> False }
local_possibilities :: LocalRdrEnv -> [(RdrName, SrcSpan)]
local_possibilities env
| tried_is_qual = []
| not local_ok = []
| otherwise = [ (mkRdrUnqual occ, nameSrcSpan name)
| name <- localRdrEnvElts env
, let occ = nameOccName name
, correct_name_space occ]
global_possibilities :: GlobalRdrEnv -> [(RdrName, (RdrName, HowInScope))]
global_possibilities global_env
| tried_is_qual = [ (rdr_qual, (rdr_qual, how))
| gre <- globalRdrEnvElts global_env
, isGreOk where_look gre
, let name = gre_name gre
occ = nameOccName name
, correct_name_space occ
, (mod, how) <- qualsInScope gre
, let rdr_qual = mkRdrQual mod occ ]
| otherwise = [ (rdr_unqual, pair)
| gre <- globalRdrEnvElts global_env
, isGreOk where_look gre
, let name = gre_name gre
occ = nameOccName name
rdr_unqual = mkRdrUnqual occ
, correct_name_space occ
, pair <- case (unquals_in_scope gre, quals_only gre) of
(how:_, _) -> [ (rdr_unqual, how) ]
([], pr:_) -> [ pr ] -- See Note [Only-quals]
([], []) -> [] ]
-- Note [Only-quals]
-- The second alternative returns those names with the same
-- OccName as the one we tried, but live in *qualified* imports
-- e.g. if you have:
--
-- > import qualified Data.Map as Map
-- > foo :: Map
--
-- then we suggest @Map.Map@.
--------------------
unquals_in_scope :: GlobalRdrElt -> [HowInScope]
unquals_in_scope (GRE { gre_name = n, gre_lcl = lcl, gre_imp = is })
| lcl = [ Left (nameSrcSpan n) ]
| otherwise = [ Right ispec
| i <- is, let ispec = is_decl i
, not (is_qual ispec) ]
--------------------
quals_only :: GlobalRdrElt -> [(RdrName, HowInScope)]
-- Ones for which *only* the qualified version is in scope
quals_only (GRE { gre_name = n, gre_imp = is })
= [ (mkRdrQual (is_as ispec) (nameOccName n), Right ispec)
| i <- is, let ispec = is_decl i, is_qual ispec ]
-- | Generate helpful suggestions if a qualified name Mod.foo is not in scope.
importSuggestions :: WhereLooking
-> GlobalRdrEnv
-> HomePackageTable -> Module
-> ImportAvails -> RdrName -> SDoc
importSuggestions where_look global_env hpt currMod imports rdr_name
| WL_LocalOnly <- where_look = Outputable.empty
| not (isQual rdr_name || isUnqual rdr_name) = Outputable.empty
| null interesting_imports
, Just name <- mod_name
, show_not_imported_line name
= hsep
[ text "No module named"
, quotes (ppr name)
, text "is imported."
]
| is_qualified
, null helpful_imports
, [(mod,_)] <- interesting_imports
= hsep
[ text "Module"
, quotes (ppr mod)
, text "does not export"
, quotes (ppr occ_name) <> dot
]
| is_qualified
, null helpful_imports
, not (null interesting_imports)
, mods <- map fst interesting_imports
= hsep
[ text "Neither"
, quotedListWithNor (map ppr mods)
, text "exports"
, quotes (ppr occ_name) <> dot
]
| [(mod,imv)] <- helpful_imports_non_hiding
= fsep
[ text "Perhaps you want to add"
, quotes (ppr occ_name)
, text "to the import list"
, text "in the import of"
, quotes (ppr mod)
, parens (ppr (imv_span imv)) <> dot
]
| not (null helpful_imports_non_hiding)
= fsep
[ text "Perhaps you want to add"
, quotes (ppr occ_name)
, text "to one of these import lists:"
]
$$
nest 2 (vcat
[ quotes (ppr mod) <+> parens (ppr (imv_span imv))
| (mod,imv) <- helpful_imports_non_hiding
])
| [(mod,imv)] <- helpful_imports_hiding
= fsep
[ text "Perhaps you want to remove"
, quotes (ppr occ_name)
, text "from the explicit hiding list"
, text "in the import of"
, quotes (ppr mod)
, parens (ppr (imv_span imv)) <> dot
]
| not (null helpful_imports_hiding)
= fsep
[ text "Perhaps you want to remove"
, quotes (ppr occ_name)
, text "from the hiding clauses"
, text "in one of these imports:"
]
$$
nest 2 (vcat
[ quotes (ppr mod) <+> parens (ppr (imv_span imv))
| (mod,imv) <- helpful_imports_hiding
])
| otherwise
= Outputable.empty
where
is_qualified = isQual rdr_name
(mod_name, occ_name) = case rdr_name of
Unqual occ_name -> (Nothing, occ_name)
Qual mod_name occ_name -> (Just mod_name, occ_name)
_ -> error "importSuggestions: dead code"
-- What import statements provide "Mod" at all
-- or, if this is an unqualified name, are not qualified imports
interesting_imports = [ (mod, imp)
| (mod, mod_imports) <- moduleEnvToList (imp_mods imports)
, Just imp <- return $ pick (importedByUser mod_imports)
]
-- We want to keep only one for each original module; preferably one with an
-- explicit import list (for no particularly good reason)
pick :: [ImportedModsVal] -> Maybe ImportedModsVal
pick = listToMaybe . sortBy (compare `on` prefer) . filter select
where select imv = case mod_name of Just name -> imv_name imv == name
Nothing -> not (imv_qualified imv)
prefer imv = (imv_is_hiding imv, imv_span imv)
-- Which of these would export a 'foo'
-- (all of these are restricted imports, because if they were not, we
-- wouldn't have an out-of-scope error in the first place)
helpful_imports = filter helpful interesting_imports
where helpful (_,imv)
= not . null $ lookupGlobalRdrEnv (imv_all_exports imv) occ_name
-- Which of these do that because of an explicit hiding list resp. an
-- explicit import list
(helpful_imports_hiding, helpful_imports_non_hiding)
= partition (imv_is_hiding . snd) helpful_imports
-- See note [When to show/hide the module-not-imported line]
show_not_imported_line :: ModuleName -> Bool -- #15611
show_not_imported_line modnam
| modnam `elem` globMods = False -- #14225 -- 1
| moduleName currMod == modnam = False -- 2.1
| is_last_loaded_mod modnam hpt_uniques = False -- 2.2
| otherwise = True
where
hpt_uniques = map fst (udfmToList hpt)
is_last_loaded_mod _ [] = False
is_last_loaded_mod modnam uniqs = last uniqs == getUnique modnam
globMods = nub [ mod
| gre <- globalRdrEnvElts global_env
, isGreOk where_look gre
, (mod, _) <- qualsInScope gre
]
extensionSuggestions :: RdrName -> SDoc
extensionSuggestions rdrName
| rdrName == mkUnqual varName (fsLit "mdo") ||
rdrName == mkUnqual varName (fsLit "rec")
= text "Perhaps you meant to use RecursiveDo"
| otherwise = Outputable.empty
qualsInScope :: GlobalRdrElt -> [(ModuleName, HowInScope)]
-- Ones for which the qualified version is in scope
qualsInScope GRE { gre_name = n, gre_lcl = lcl, gre_imp = is }
| lcl = case nameModule_maybe n of
Nothing -> []
Just m -> [(moduleName m, Left (nameSrcSpan n))]
| otherwise = [ (is_as ispec, Right ispec)
| i <- is, let ispec = is_decl i ]
isGreOk :: WhereLooking -> GlobalRdrElt -> Bool
isGreOk where_look = case where_look of
WL_LocalTop -> isLocalGRE
WL_LocalOnly -> const False
_ -> const True
{- Note [When to show/hide the module-not-imported line] -- #15611
For the error message:
Not in scope X.Y
Module X does not export Y
No module named ‘X’ is imported:
there are 2 cases, where we hide the last "no module is imported" line:
1. If the module X has been imported.
2. If the module X is the current module. There are 2 subcases:
2.1 If the unknown module name is in a input source file,
then we can use the getModule function to get the current module name.
(See test T15611a)
2.2 If the unknown module name has been entered by the user in GHCi,
then the getModule function returns something like "interactive:Ghci1",
and we have to check the current module in the last added entry of
the HomePackageTable. (See test T15611b)
-}
| sdiehl/ghc | compiler/GHC/Rename/Unbound.hs | bsd-3-clause | 14,889 | 0 | 17 | 4,918 | 3,263 | 1,678 | 1,585 | 267 | 9 |
module Hans.Device.Pcap (pcapOpen, pcapSend, pcapReceiveLoop) where
import Prelude (String, IO, Bool(..), const, (.))
import Hans.Layer.Ethernet (EthernetHandle, queueEthernet)
import Control.Monad (void)
import Data.ByteString.Lazy (ByteString, toStrict)
import Network.Pcap (PcapHandle, openLive, loopBS, sendPacketBS)
-- | Open device with pcap, will give info about the state,
-- Unless the device is up it will throw errors later
-- Be sure to use fesh mac, otherwise all might fail.
pcapOpen :: String -> IO PcapHandle
pcapOpen s = openLive s 1514 True 0
-- | send to deviece
pcapSend :: PcapHandle -> ByteString -> IO ()
pcapSend dev = sendPacketBS dev . toStrict
-- | receive from it
pcapReceiveLoop :: PcapHandle -> EthernetHandle -> IO ()
pcapReceiveLoop dev = void . loopBS dev (-1) . const . queueEthernet
| tolysz/hans-pcap | Hans/Device/Pcap.hs | bsd-3-clause | 857 | 0 | 10 | 162 | 216 | 125 | 91 | 12 | 1 |
{-# LANGUAGE OverloadedStrings, TypeFamilies, QuasiQuotes,
TemplateHaskell, FlexibleInstances, MultiParamTypeClasses,
FlexibleContexts, ScopedTypeVariables
#-}
------------------------------------------------------------------------------
-- File: Chat.hs
-- Creation Date: Jul 15 2012 [15:27:50]
-- Last Modified: Oct 20 2013 [00:33:59]
-- Created By: Samuli Thomasson [SimSaladin] samuli.thomassonAtpaivola.fi
--
-- Credits: http://www.yesodweb.com/book/wiki-chat-example
------------------------------------------------------------------------------
-- | This modules defines a subsite that allows you to insert a chat box on
-- any page of your site. It uses eventsource for sending the messages from
-- the server to the browser.
module Chat
( Chat(..)
, Route(..)
, resourcesChat
, YesodChat(..)
, chatWidget
, postSendR, getReceiveR
) where
import Prelude
import Yesod
import Blaze.ByteString.Builder (Builder, toByteString)
import Control.Concurrent.Chan (Chan, dupChan, writeChan)
import Data.Foldable
import Data.Text (Text)
import Data.Text.Encoding (decodeUtf8)
import Network.Wai (Response(ResponseSource), responseSource)
import Network.Wai.EventSource (ServerEvent(..), eventSourceAppChan)
import Text.Julius (rawJS)
data Chat = Chat (Chan ServerEvent)
mkYesodSubData "Chat" [parseRoutes|
/send SendR POST
/recv ReceiveR GET
|]
-- | Chat interface
class (Yesod master, RenderMessage master FormMessage) => YesodChat master where
-- | The message datatype
data ChatMessage master
chatRenderMsg :: ChatMessage master -> Builder
chatCreateMsg :: Text -> Text -> HandlerT master IO (ChatMessage master)
-- | Retrieve the chat ident, nick, for the user. Nothing if no nick is
-- available (=> chat is disabled)
chatIdent :: HandlerT master IO (Maybe Text)
chatGet :: HandlerT master IO [ChatMessage master]
-- *
type Handler master = HandlerT Chat (HandlerT master IO)
-- | Get a message from the user and send it to all listeners.
postSendR :: YesodChat master => Handler master ()
postSendR = lift chatIdent >>= traverse_ f
where f ident = do
Chat chan <- getYesod
body <- lift (runInputGet $ ireq textField "message")
message <- lift $ chatCreateMsg ident body
-- Set nginx-specific header for eventsource to work.
addHeader "X-Accel-Buffering" "no"
-- Send an event to all listeners with the user's name and message.
liftIO $ writeChan chan $ ServerEvent Nothing Nothing
[chatRenderMsg message]
-- | Send an eventstream response with all messages streamed in.
getReceiveR :: Handler master ()
getReceiveR = do
Chat chan0 <- getYesod
chan <- liftIO $ dupChan chan0
req <- waiRequest
res <- liftResourceT $ eventSourceAppChan chan req
let (stat, hs, src) = responseSource res
-- for nginx reverse-proxying to work, we add the header X-Accel-Buffering
sendWaiResponse $ ResponseSource stat (("X-Accel-Buffering", "no"):hs) src
-- | Provide a widget that the master site can embed on any page.
chatWidget :: (YesodChat master, ChatMessage master ~ ChatMessage master0)
=> (Route Chat -> Route master) -> WidgetT master IO ()
chatWidget toMaster = do
let disabledChat = do
master <- liftHandlerT getYesod
[whamlet|
<h1 .icon-chat> Chat
<p>
You must be #
$maybe ar <- authRoute master
<a href=@{ar}>logged in
$nothing
logged in
\ to chat.
|]
mident <- liftHandlerT chatIdent -- check if we're already logged in
flip (maybe disabledChat) mident $ \_ -> do
recent <- liftHandlerT chatGet
chat <- liftHandlerT newIdent -- the containing div
[whamlet|
<h1 .icon-chat> Chat
<div.clearfix ##{chat}>
<div ##{chat}-output>
$forall msg <- recent
#{preEscapedToMarkup $ decodeUtf8 $ toByteString $ chatRenderMsg msg}
<input ##{chat}-input type=text placeholder="Enter Message">
|] >> toWidget [lucius|
##{chat}-output { width: 100%; max-height: 300px; overflow: auto; }
##{chat}-input { width: 100%; padding-left:0; padding-right:0; }
##{chat}-output p {
margin:0;
}
|] >> toWidgetBody [julius|
// Set up the receiving end
var output = document.getElementById("#{rawJS chat}-output");
var src = new EventSource("@{toMaster ReceiveR}");
src.onmessage = function(msg) {
// This function will be called for each new message.
output.innerHTML = output.innerHTML + msg.data;
// And now scroll down within the output div so the most recent message
// is displayed.
output.scrollTop = output.scrollHeight;
};
// Set up the sending end: send a message via Ajax whenever the user hits
// enter.
var input = document.getElementById("#{rawJS chat}-input");
input.onkeyup = function(event) {
var keycode = (event.keyCode ? event.keyCode : event.which);
if (keycode == '13') {
var xhr = new XMLHttpRequest();
var val = input.value;
input.value = "";
var params = "?message=" + encodeURIComponent(val);
xhr.open("POST", "@{toMaster SendR}" + params);
xhr.send(null);
}
}
|]
| SimSaladin/rnfssp | Chat.hs | bsd-3-clause | 5,314 | 0 | 19 | 1,212 | 734 | 396 | 338 | 59 | 1 |
module Network.XMPP.Utils
( ReadT(..)
, ShowT(..)
) where
import Data.Text (Text)
import qualified Data.Text as T
-- | Convert 'Text' to value.
-- Like 'Read' but convert from 'Text' instead of 'String'.
class ReadT a where
readT :: Text -> a
-- | Convert value to 'Text'.
-- Like 'Show' but convert to 'Text' instead of 'String'.
class ShowT a where
showT :: a -> Text
instance ShowT Int where
showT = T.pack . show
| Kagami/cirno | src/Network/XMPP/Utils.hs | bsd-3-clause | 445 | 0 | 7 | 103 | 100 | 60 | 40 | 11 | 0 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE FlexibleContexts #-}
module Zero.Account.Profile.Widget
(
profileWidget
) where
import Control.Monad.IO.Class (liftIO)
import Data.Proxy (Proxy(..))
import Data.Maybe (fromJust)
import qualified Data.Map as M
import qualified Data.Text as T
import Data.Text (Text)
import Data.Time.Clock
import Servant.API
import Servant.Reflex
import Reflex.Dom.Core
import Reflex.Dom.Path
import Reflex.Dom.Time
import Zero.Bitcoin
import Zero.Settings.Client
import Zero.Account.Profile.Navigation
import Zero.Positions.Widget
import Zero.Wallet.Widget
import qualified Zero.Bittrex.Widget as Bittrex
import qualified Zero.Bitfinex.Widget as Bitfinex
import qualified Zero.Bitstamp.Widget as Bitstamp
import qualified Zero.GDAX.Widget as GDAX
import qualified Zero.HitBTC.Widget as HitBTC
import Zero.Widget
------------------------------------------------------------------------------
bindInit :: WidgetState -> ProfileState
bindInit (AuthenticationState state) =
ProfileInit state
bindInit _ =
error "Error initialising profile routing."
------------------------------------------------------------------------------
routePath :: forall t m. (
SupportsServantReflex t m
, MonadWidget t m
) => Maybe (Behavior t WidgetState) -> Behavior t (Maybe ProfileState) -> Route -> m (Event t Route, Event t ProfileState)
routePath (Just bhSuperEvent) _ ProfileRoute =
overviewLayout (bindInit <$> bhSuperEvent)
routePath (Just bhSuperEvent) _ ColdStorageRoute =
walletWidget (bindInit <$> bhSuperEvent)
routePath (Just bhSuperEvent) _ PositionsRoute =
positionsLayout (bindInit <$> bhSuperEvent)
routePath Nothing _ _ =
error "Error routing profile path."
------------------------------------------------------------------------------
overviewLayout :: MonadWidget t m
=> Behavior t ProfileState -> m (Event t Route, Event t ProfileState)
overviewLayout bhRouteEvent = do
evNavClick <- navigation ProfileRoute
divClass "ui hidden divider" $ return ()
divClass "stackable ui grid" $ do
divClass "four wide column" $ return ()
divClass "eight wide column" $ do
overviewWidget bhRouteEvent
divClass "four wide column" $ return ()
return ()
return (evNavClick, (\_ -> ProfileState ()) <$> never)
------------------------------------------------------------------------------
overviewWidget :: forall t m. (
SupportsServantReflex t m
, MonadWidget t m
) => Behavior t ProfileState -> m ()
overviewWidget bhRouteEvent = do
t <- liftIO $ getCurrentTime
evClock <- tickLossy 30.0 t
postBuild <- getPostBuild
let evRefresh = leftmost [
postBuild
, (\_ -> ()) <$> evClock
]
bittrexTicker <- Bittrex.tickerWidget (constDyn $ QParamSome "USDT-BTC") evRefresh
bitfinexTicker <- Bitfinex.tickerWidget (Right <$> constDyn "btcusd") (Bittrex.tw_prevDay bittrexTicker) evRefresh
bitstampTicker <- Bitstamp.tickerWidget (Right <$> constDyn "btcusd") (Bittrex.tw_prevDay bittrexTicker) evRefresh
gdaxTicker <- GDAX.tickerWidget (Right <$> constDyn "BTC-USD") (Bittrex.tw_prevDay bittrexTicker) evRefresh
hitBTCTicker <- HitBTC.tickerWidget (Right <$> constDyn "BTCUSD") (Bittrex.tw_prevDay bittrexTicker) evRefresh
-- Summaries
let prevDay = Bittrex.tw_prevDay bittrexTicker
let bittrexShowLast = T.pack . show <$> Bittrex.tw_last bittrexTicker
let bittrex24hDiff = diff24hDyn prevDay (Bittrex.tw_last bittrexTicker)
let bittrexMarket24hPercent = T.pack . show <$> bittrex24hDiff
let bitfinexShowLast = T.pack . show <$> Bitfinex.tw_last bitfinexTicker
let bitfinex24hDiff = diff24hDyn prevDay (Bitfinex.tw_last bitfinexTicker)
let bitfinexMarket24hPercent = T.pack . show <$> bitfinex24hDiff
let gdaxShowLast = T.pack . show <$> GDAX.tw_last gdaxTicker
let gdax24hDiff = diff24hDyn prevDay (GDAX.tw_last gdaxTicker)
let gdaxMarket24hPercent = T.pack . show <$> gdax24hDiff
let bitstampShowLast = T.pack . show <$> Bitstamp.tw_last bitstampTicker
let bitstamp24hDiff = diff24hDyn prevDay (Bitstamp.tw_last bitstampTicker)
let bitstampMarket24hPercent = T.pack . show <$> bitstamp24hDiff
let hitBTCShowLast = T.pack . show <$> HitBTC.tw_last hitBTCTicker
let hitBTC24hDiff = diff24hDyn prevDay (HitBTC.tw_last hitBTCTicker)
let hitBTCMarket24hPercent = T.pack . show <$> hitBTC24hDiff
let combinedRate = averages <$> mconcat [
(\x -> [x]) <$> Bittrex.tw_last bittrexTicker
, (\x -> [x]) <$> Bitfinex.tw_last bitfinexTicker
, (\x -> [x]) <$> GDAX.tw_last gdaxTicker
, (\x -> [x]) <$> Bitstamp.tw_last bitstampTicker
, (\x -> [x]) <$> HitBTC.tw_last hitBTCTicker
]
divClass "ui centered header" $ do
dynText (T.pack . ((++) "1BTC = $") . show <$> combinedRate)
elClass "table" "ui table" $ do
el "thead" $ do
el "tr" $ do
el "th" $ text "Exchange"
el "th" $ text "Market"
el "th" $ text "Last"
el "th" $ text "24h % Delta"
el "tbody" $ do
el "tr" $ do
el "td" $ text "Bittrex"
el "td" $ dynText $ Bittrex.tw_marketName bittrexTicker
el "td" $ dynText (T.pack . show <$> Bittrex.tw_last bittrexTicker)
percentCell bittrex24hDiff bittrexMarket24hPercent
el "tr" $ do
el "td" $ text "Bitfinex"
el "td" $ dynText $ Bitfinex.tw_marketName bitfinexTicker
el "td" $ dynText (T.pack . show <$> Bitfinex.tw_last bitfinexTicker)
percentCell bitfinex24hDiff bitfinexMarket24hPercent
el "tr" $ do
el "td" $ text "Bitstamp"
el "td" $ dynText $ Bitstamp.tw_marketName bitstampTicker
el "td" $ dynText (T.pack . show <$> Bitstamp.tw_last bitstampTicker)
percentCell bitstamp24hDiff bitstampMarket24hPercent
el "tr" $ do
el "td" $ text "GDAX"
el "td" $ dynText $ GDAX.tw_marketName gdaxTicker
el "td" $ dynText (T.pack . show <$> GDAX.tw_last gdaxTicker)
percentCell gdax24hDiff gdaxMarket24hPercent
el "tr" $ do
el "td" $ text "HitBTC"
el "td" $ dynText $ HitBTC.tw_marketName hitBTCTicker
el "td" $ dynText (T.pack . show <$> HitBTC.tw_last hitBTCTicker)
percentCell hitBTC24hDiff hitBTCMarket24hPercent
return ()
isPositive x =
if x > 0.0 then
True
else
False
positiveAttr x =
if isPositive x then
("class" =: "positive")
else
("class" =: "negative")
percentCell :: MonadWidget t m => Dynamic t Double -> Dynamic t Text -> m ()
percentCell d txt = do
elDynAttr "td" (positiveAttr <$> d) $ do
dynText txt
return ()
maybeText f (Just x) = f x
maybeText f Nothing = ""
averages :: [Double] -> Double
averages xs = averages' xs 0.0 (length xs)
where
averages' [] r lim =
r / fromIntegral lim
averages' (x:xs) r lim =
averages' xs (x+r) lim
------------------------------------------------------------------------------
profileWidget :: forall t m. (
SupportsServantReflex t m
, MonadWidget t m
) => Behavior t (Maybe WidgetState) -> m (Event t Route, Event t ProfileState)
profileWidget bhSuperEvent = do
evNavClick <- navigation ProfileRoute
let bhInitProfile = fromJust <$> bhSuperEvent
rec routeEventGen <- pathWidget (routePath (Just bhInitProfile) bhRouteEvent)
let evRouteEvent = switchPromptlyDyn routeEventGen
bhRouteEvent <- hold Nothing (Just <$> evRouteEvent)
return (evNavClick, (\_ -> ProfileState ()) <$> never)
profileWidget _ =
error "Invalid state in profile initialisation."
| et4te/zero | src/Zero/Account/Profile/Widget.hs | bsd-3-clause | 7,779 | 0 | 21 | 1,650 | 2,322 | 1,130 | 1,192 | 167 | 2 |
module CarbonCopy.StorageInit (
storageInit
) where
import Control.Monad.IfElse
import Data.Maybe
import CarbonCopy.EmailStorage
import CarbonCopy.MailHeaders
import CarbonCopy.HeadersStorage
import CarbonCopy.ThreadBuilder
storageInit :: String -> FilePath -> Storage StrHeader -> IO ()
storageInit email rootDir storage = visitEmailsRecursively rootDir ( processHeader email storage )
processHeader :: String -> Storage StrHeader -> EmailHandler
processHeader email storage content = processHeader' chain
where
(chain, hdrsMatchFound) = matchFromHeader content email
processHeader' (Just (Chain {current=fromMsgId, previous=inReplyTo})) = processHeader'' hdrsMatchFound
where
processHeader'' True = unlessM (storage `hdrExists` fromMsgId) $ storage `hdrAdd` fromMsgId
processHeader'' False = whenM (storage `hdrExists` inReplyTo) $
unlessM (storage `hdrExists` fromMsgId) $
storage `hdrAdd` fromMsgId
processHeader' (Just (Root {current=fromMsgId})) = processHeader'' hdrsMatchFound
where
processHeader'' True = unlessM (storage `hdrExists` fromMsgId) $ storage `hdrAdd` fromMsgId
processHeader'' False = return ()
processHeader' _ = return ()
| jdevelop/carboncopy | CarbonCopy/StorageInit.hs | bsd-3-clause | 1,376 | 0 | 14 | 358 | 342 | 183 | 159 | 22 | 5 |
{-# LANGUAGE OverloadedStrings, DataKinds, TypeOperators, FlexibleInstances #-}
module Api
( Coordinate(..)
, runRoute
, runExceptT
, explainError )
where
import Protolude
import Servant.API
import Servant.Client
import Network.HTTP.Client (Manager)
import qualified Data.Text as T
import Response (Response(..))
data Coordinate = Coordinate
{ coordinateLongitude :: Double
, coordinateLatitude :: Double }
deriving (Show)
instance ToHttpApiData Coordinate where
toUrlPiece c = (show . coordinateLongitude $ c) <> "," <> (show . coordinateLatitude $ c)
instance ToHttpApiData [Coordinate] where
toUrlPiece cs = T.intercalate ";" $ toUrlPiece <$> cs
data Alternatives
= AlternativesFalse
| AlternativesTrue
deriving (Show, Eq)
instance ToHttpApiData Alternatives where
toQueryParam AlternativesFalse = "false"
toQueryParam AlternativesTrue = "true"
data Steps
= StepsTrue
| StepsFalse
deriving (Show, Eq)
instance ToHttpApiData Steps where
toQueryParam StepsTrue = "true"
toQueryParam StepsFalse = "false"
data Geometries
= GeometriesPolyline
| GeometriesGeoJson
deriving (Show, Eq)
instance ToHttpApiData Geometries where
toQueryParam GeometriesGeoJson = "geojson"
toQueryParam GeometriesPolyline = "polyline"
data Overview
= OverviewSimplified
| OverviewFull
| OverviewFalse
deriving (Show, Eq)
instance ToHttpApiData Overview where
toQueryParam OverviewSimplified = "simplified"
toQueryParam OverviewFull = "full"
toQueryParam OverviewFalse = "false"
type RouteAPI
= "driving"
:> Capture "coordinates" [Coordinate]
:> QueryParam "alternatives" Alternatives
:> QueryParam "steps" Steps
:> QueryParam "geometries" Geometries
:> QueryParam "overview" Overview
:> Get '[JSON] Response
api :: Proxy RouteAPI
api = Proxy
routeAPI
:: [Coordinate]
-> Maybe Alternatives
-> Maybe Steps
-> Maybe Geometries
-> Maybe Overview
-> Manager
-> BaseUrl
-> ExceptT ServantError IO Response
routeAPI = client api
runRoute :: Manager -> Text -> Int -> ExceptT ServantError IO Response
runRoute manager host port = routeAPI coordinates alternatives steps geometries overview manager baseurl
where
start = Coordinate { coordinateLongitude = -3.279966, coordinateLatitude = 51.406314 }
end = Coordinate { coordinateLongitude = -3.281205, coordinateLatitude = 51.407274 }
coordinates = [start, end]
alternatives = Just AlternativesFalse
steps = Just StepsTrue
geometries = Just GeometriesGeoJson
overview = Just OverviewFalse
baseurl = BaseUrl Http (T.unpack host) port "/route/v1"
explainError :: ServantError -> Text
explainError (FailureResponse _ _ _) = "Response Failure"
explainError (DecodeFailure m _ _) = "Decode Failure: " <> T.pack m
explainError (UnsupportedContentType _ _) = "Unsupported Content Type"
explainError (InvalidContentTypeHeader _ _) = "Invalid Content Type Header"
explainError (ConnectionError _) = "Connection Error"
| daniel-j-h/hosrm-qa | src/Api.hs | bsd-3-clause | 3,075 | 0 | 13 | 611 | 768 | 410 | 358 | 86 | 1 |
{-# LANGUAGE Rank2Types, DeriveDataTypeable #-}
{-# OPTIONS_HADDOCK hide #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.LinearAlgebra.Vector.STBase
-- Copyright : Copyright (c) 2010, Patrick Perry <[email protected]>
-- License : BSD3
-- Maintainer : Patrick Perry <[email protected]>
-- Stability : asinerimental
--
module Numeric.LinearAlgebra.Vector.STBase (
STVector,
IOVector,
RVector(..),
create,
freeze,
new_,
new,
newCopy,
clear,
copyTo,
unsafeCopyTo,
swap,
unsafeSwap,
getIndices,
getElems,
getElems',
getAssocs,
getAssocs',
setElems,
setAssocs,
unsafeSetAssocs,
read,
unsafeRead,
write,
unsafeWrite,
modify,
unsafeModify,
unsafeSwapElems,
mapTo,
unsafeMapTo,
zipWithTo,
unsafeZipWithTo,
withSlice,
withDrop,
withTake,
withSplitAt,
withSliceM,
withDropM,
withTakeM,
withSplitAtM,
getSumAbs,
getNorm2,
getWhichMaxAbs,
getDot,
unsafeGetDot,
scaleM_,
addWithScaleM_,
unsafeAddWithScaleM_,
kroneckerTo,
addTo,
subTo,
mulTo,
negateTo,
conjugateTo,
absTo,
signumTo,
divTo,
recipTo,
sqrtTo,
expTo,
logTo,
powTo,
sinTo,
cosTo,
tanTo,
asinTo,
acosTo,
atanTo,
sinhTo,
coshTo,
tanhTo,
asinhTo,
acoshTo,
atanhTo,
) where
import Prelude hiding ( drop, read, splitAt, take )
import Control.Monad( when, liftM2 )
import Control.Monad.ST( RealWorld, ST, runST, stToIO, unsafeIOToST )
import Data.Complex( Complex )
import Data.Typeable( Typeable )
import Foreign( Ptr, Storable, advancePtr, peek, poke, peekElemOff,
pokeElemOff, copyArray, mallocForeignPtrArray )
import System.IO.Unsafe( unsafeInterleaveIO )
import Text.Printf( printf )
import Unsafe.Coerce( unsafeCoerce )
import Foreign.BLAS( BLAS1, BLAS2 )
import qualified Foreign.BLAS as BLAS
import Foreign.VMath( VNum, VFractional, VFloating )
import qualified Foreign.VMath as VMath
import Numeric.LinearAlgebra.Internal( clearArray )
import Numeric.LinearAlgebra.Vector.Base hiding ( unsafeWith )
import qualified Numeric.LinearAlgebra.Vector.Base as V
-- | Mutable vectors in the 'ST' monad.
newtype STVector s e = STVector { unSTVector :: Vector e }
deriving (Typeable)
-- | Mutable vectors in the 'IO' monad. Note that 'IO' operations
-- aren't directly supported; to perform an operation in the 'IO'
-- monad, perform the action in 'ST' 'RealWorld' and then convert
-- it via 'stToIO'.
type IOVector s = STVector RealWorld
-- | A safe way to create and work with a mutable vector before returning
-- an immutable vector for later perusal. This function avoids copying
-- the vector before returning it - it uses 'unsafeFreeze' internally,
-- but this wrapper is a safe interface to that function.
create :: (Storable e) => (forall s . ST s (STVector s e)) -> Vector e
create stmv = runST $ do
mv <- stmv
unsafeFreeze mv
{-# INLINE create #-}
-- | Converts a mutable vector to an immutable one by taking a complete
-- copy of it.
freeze :: (RVector v, Storable e) => v e -> ST s (Vector e)
freeze mv = do
mv' <- newCopy mv
unsafeFreeze mv'
{-# INLINE freeze #-}
-- | Read-only vectors
class RVector v where
-- | Get the dimension of the vector. This is equal to the number of
-- elements in the vector.
getDim :: (Storable e) => v e -> ST s Int
-- | Same as 'withSlice' but does not range-check indices.
unsafeWithSlice :: (Storable e)
=> Int -> Int -> v e
-> (forall v'. RVector v' => v' e -> ST s a)
-> ST s a
-- | Execute an 'IO' action with a pointer to the first element in the
-- vector.
unsafeWith :: (Storable e) => v e -> (Ptr e -> IO a) -> IO a
-- | Converts a read-only vector into an immutable vector. This simply
-- casts the vector from one type to the other without copying the vector.
-- Note that because the vector is possibly not copied, any subsequent
-- modifications made to the mutable version of the vector may be shared
-- with the immutable version. It is safe to use, therefore, if the
-- mutable version is never modified after the freeze operation.
unsafeFreeze :: (Storable e) => v e -> ST s (Vector e)
-- | Unsafe cast from a read-only vector to a mutable vector.
unsafeThaw :: (Storable e)
=> v e -> ST s (STVector s e)
instance RVector Vector where
getDim = return . dim
{-# INLINE getDim #-}
unsafeWith = V.unsafeWith
{-# INLINE unsafeWith #-}
unsafeWithSlice i n' v f =
f (V.unsafeSlice i n' v)
{-# INLINE unsafeWithSlice #-}
unsafeFreeze = return . id
{-# INLINE unsafeFreeze #-}
unsafeThaw = return . STVector
{-# INLINE unsafeThaw #-}
instance RVector (STVector s) where
getDim = return . dim . unSTVector
{-# INLINE getDim #-}
unsafeWith v f = V.unsafeWith (unSTVector v) f
{-# INLINE unsafeWith #-}
unsafeWithSlice i n' v f =
f $ unsafeSlice i n' (unSTVector v)
{-# INLINE unsafeWithSlice #-}
unsafeFreeze = return . unSTVector
{-# INLINE unsafeFreeze #-}
unsafeThaw v = return $ cast v
where
cast :: STVector s e -> STVector s' e
cast = unsafeCoerce
{-# INLINE unsafeThaw #-}
-- | @withSlice i n v@ performs an action with a view of the
-- @n@-dimensional subvector of @v@ starting at index @i@.
withSlice :: (RVector v, Storable e)
=> Int
-> Int
-> v e
-> (forall v'. RVector v' => v' e -> ST s a)
-> ST s a
withSlice i n' v f = do
n <- getDim v
when (i < 0 || n' < 0 || i + n' > n) $ error $
printf "withSlice %d %d <vector with dim %d>: index out of range"
i n' n
unsafeWithSlice i n' v f
-- | Same as 'withSliceM' but does not range-check indices.
unsafeWithSliceM :: (Storable e)
=> Int
-> Int
-> STVector s e
-> (STVector s e -> ST s a)
-> ST s a
unsafeWithSliceM i n' v f =
f $ STVector $ unsafeSlice i n' (unSTVector v)
-- | Like 'withSlice', but perform the action with a mutable view
-- of the vector.
withSliceM :: (Storable e)
=> Int
-> Int
-> STVector s e
-> (STVector s e -> ST s a)
-> ST s a
withSliceM i n' v f = do
n <- getDim v
when (i < 0 || n' < 0 || i + n' > n) $ error $
printf "withSlice %d %d <vector with dim %d>: index out of range"
i n' n
unsafeWithSliceM i n' v f
-- | Like 'withDrop', but perform the action with a mutable view
-- of the vector.
withDropM :: (Storable e)
=> Int
-> STVector s e
-> (STVector s e -> ST s a)
-> ST s a
withDropM i v f = do
n <- getDim v
withSliceM i (n-i) v f
-- | Like 'withTake', but perform the action with a mutable view
-- of the vector.
withTakeM :: (Storable e)
=> Int
-> STVector s e
-> (STVector s e -> ST s a)
-> ST s a
withTakeM = withSliceM 0
-- | Like 'withSplitAt' but perform the action with mutable views
-- of the vector.
withSplitAtM :: (Storable e)
=> Int
-> STVector s e
-> (STVector s e -> STVector s e -> ST s a)
-> ST s a
withSplitAtM i v f = do
n <- getDim v
withSliceM 0 i v $ \v1 ->
withSliceM i (n-i) v $ \v2 ->
f v1 v2
-- | Perform an action the a view gotten from dropping the given
-- number of elements from the start of the vector.
withDrop :: (RVector v, Storable e)
=> Int
-> v e
-> (forall v'. RVector v' => v' e -> ST s a)
-> ST s a
withDrop i v f = do
mv <- unsafeThaw v
withDropM i mv f
-- | Perform an action with a view gotten from taking the given
-- number of elements from the start of the vector.
withTake :: (RVector v, Storable e)
=> Int
-> v e
-> (forall v'. RVector v' => v' e -> ST s a)
-> ST s a
withTake n v f = do
mv <- unsafeThaw v
withTakeM n mv f
-- | Perform an action with views from splitting the vector at the
-- given index.
withSplitAt :: (RVector v, Storable e)
=> Int
-> v e
-> (forall v1' v2'. (RVector v1', RVector v2') => v1' e -> v2' e -> ST s a)
-> ST s a
withSplitAt i v f = do
mv <- unsafeThaw v
withSplitAtM i mv f
-- | Creates a new vector of the given length. The elements will be
-- uninitialized.
new_ :: (Storable e) => Int -> ST s (STVector s e)
new_ n
| n < 0 = error $
printf "new_ %d: invalid dimension" n
| otherwise = unsafeIOToST $ do
f <- mallocForeignPtrArray n
return $ STVector $ V.unsafeFromForeignPtr f 0 n
-- | Create a vector with every element initialized to the same value.
new :: (Storable e) => Int -> e -> ST s (STVector s e)
new n e = do
x <- new_ n
setElems x $ replicate n e
return x
-- | Creates a new vector by copying another one.
newCopy :: (RVector v, Storable e) => v e -> ST s (STVector s e)
newCopy x = do
n <- getDim x
y <- new_ n
unsafeCopyTo y x
return y
-- | @copyTo dst src@ replaces the values in @dst@ with those in
-- source. The operands must be the same shape.
copyTo :: (RVector v, Storable e) => STVector s e -> v e -> ST s ()
copyTo = checkOp2 "copyTo" unsafeCopyTo
{-# INLINE copyTo #-}
-- | Same as 'copyTo' but does not check the dimensions.
unsafeCopyTo :: (RVector v, Storable e) => STVector s e -> v e -> ST s ()
unsafeCopyTo dst src = do
n <- getDim dst
unsafeIOToST $
unsafeWith dst $ \pdst ->
unsafeWith src $ \psrc ->
copyArray pdst psrc n
{-# INLINE unsafeCopyTo #-}
-- | Swap the values stored in two vectors.
swap :: (BLAS1 e) => STVector s e -> STVector s e -> ST s ()
swap = checkOp2 "swap" unsafeSwap
{-# INLINE swap #-}
-- | Same as 'swap' but does not check the dimensions.
unsafeSwap :: (BLAS1 e) => STVector s e -> STVector s e -> ST s ()
unsafeSwap = strideCall2 BLAS.swap
{-# INLINE unsafeSwap #-}
-- | Get the indices of the elements in the vector, @[ 0..n-1 ]@, where
-- @n@ is the dimension of the vector.
getIndices :: (RVector v, Storable e) => v e -> ST s [Int]
getIndices v = do
n <- getDim v
return $ [ 0..n-1 ]
{-# INLINE getIndices #-}
-- | Lazily get the elements of the vector.
getElems :: (RVector v, Storable e) => v e -> ST s [e]
getElems v = let
go end p' | p' == end = do
touch v
return []
| otherwise = unsafeInterleaveIO $ do
e <- peek p'
es <- go end (p' `advancePtr` 1)
return $ e `seq` (e:es)
in do
n <- getDim v
unsafeIOToST $
unsafeWith v $ \p ->
go (p `advancePtr` n) p
where
touch v' = unsafeWith v' $ const (return ())
{-# SPECIALIZE INLINE getElems :: STVector s Double -> ST s [Double] #-}
{-# SPECIALIZE INLINE getElems :: STVector s (Complex Double) -> ST s [Complex Double] #-}
-- | Get the elements of the vector.
getElems' :: (RVector v, Storable e) => v e -> ST s [e]
getElems' v = let
go end p' es | p' == end =
return es
| otherwise = do
e <- peek p'
go end (p' `advancePtr` (-1)) (e:es)
in do
n <- getDim v
unsafeIOToST $
unsafeWith v $ \p ->
go (p `advancePtr` (-1)) (p `advancePtr` (n-1)) []
{-# SPECIALIZE INLINE getElems' :: STVector s Double -> ST s [Double] #-}
{-# SPECIALIZE INLINE getElems' :: STVector s (Complex Double) -> ST s [Complex Double] #-}
-- | Lazily get the association list of the vector.
getAssocs :: (RVector v, Storable e) => v e -> ST s [(Int,e)]
getAssocs x = liftM2 zip (getIndices x) (getElems x)
{-# INLINE getAssocs #-}
-- | Get the association list of the vector.
getAssocs' :: (RVector v, Storable e) => v e -> ST s [(Int,e)]
getAssocs' x = liftM2 zip (getIndices x) (getElems' x)
{-# INLINE getAssocs' #-}
-- | Set all of the values of the vector from the elements in the list.
setElems :: (Storable e) => STVector s e -> [e] -> ST s ()
setElems x es = let
go n [] i _ | i < n = error $
printf ("setElems <vector with dim %d>"
++ " <list with length %d>:"
++ " not enough elements") n i
go n (_:_) i _ | i == n = error $
printf ("setElems <vector with dim %d>"
++ " <list with length at least %d>:"
++ " too many elements") n (i+1)
go _ [] _ _ = return ()
go n (f:fs) i p = do
poke p f
go n fs (i+1) (p `advancePtr` 1)
in do
n <- getDim x
unsafeIOToST $ unsafeWith x $ go n es 0
-- | Set the given values in the vector. If an index is repeated twice,
-- the value is implementation-defined.
setAssocs :: (Storable e) => STVector s e -> [(Int,e)] -> ST s ()
setAssocs x ies =
let go n p ((i,e):ies') = do
when (i < 0 || i >= n) $ error $
printf ("setAssocs <vector with dim %d>"
++ " [ ..., (%d,_), ... ]: invalid index") n i
pokeElemOff p i e
go n p ies'
go _ _ [] = return ()
in do
n <- getDim x
unsafeIOToST $ unsafeWith x $ \p -> go n p ies
unsafeSetAssocs :: (Storable e) => STVector s e -> [(Int,e)] -> ST s ()
unsafeSetAssocs x ies =
let go p ((i,e):ies') = do
pokeElemOff p i e
go p ies'
go _ [] = return ()
in unsafeIOToST $ unsafeWith x $ \p -> go p ies
-- | Get the element stored at the given index.
read :: (RVector v, Storable e) => v e -> Int -> ST s e
read x i = do
n <- getDim x
when (i < 0 || i >= n) $ error $
printf ("read <vector with dim %d> %d:"
++ " invalid index") n i
unsafeRead x i
{-# SPECIALIZE INLINE read :: STVector s Double -> Int -> ST s (Double) #-}
{-# SPECIALIZE INLINE read :: STVector s (Complex Double) -> Int -> ST s (Complex Double) #-}
-- | Same as 'read' but does not range check the index.
unsafeRead :: (RVector v, Storable e) => v e -> Int -> ST s e
unsafeRead x i =
unsafeIOToST $ unsafeWith x $ \p -> peekElemOff p i
{-# SPECIALIZE INLINE unsafeRead :: STVector s Double -> Int -> ST s (Double) #-}
{-# SPECIALIZE INLINE unsafeRead :: STVector s (Complex Double) -> Int -> ST s (Complex Double) #-}
-- | Set the element stored at the given index.
write :: (Storable e) => STVector s e -> Int -> e -> ST s ()
write x i e = do
n <- getDim x
when (i < 0 || i >= n) $ error $
printf ("write <vector with dim %d> %d:"
++ " invalid index") n i
unsafeWrite x i e
{-# SPECIALIZE INLINE write :: STVector s Double -> Int -> Double -> ST s () #-}
{-# SPECIALIZE INLINE write :: STVector s (Complex Double) -> Int -> Complex Double -> ST s () #-}
-- | Same as 'write' but does not range check the index.
unsafeWrite :: (Storable e) => STVector s e -> Int -> e -> ST s ()
unsafeWrite x i e =
unsafeIOToST $ unsafeWith x $ \p -> pokeElemOff p i e
{-# SPECIALIZE INLINE unsafeWrite :: STVector s Double -> Int -> Double -> ST s () #-}
{-# SPECIALIZE INLINE unsafeWrite :: STVector s (Complex Double) -> Int -> Complex Double -> ST s () #-}
-- | Modify the element stored at the given index.
modify :: (Storable e) => STVector s e -> Int -> (e -> e) -> ST s ()
modify x i f = do
n <- getDim x
when (i < 0 || i >= n) $ error $
printf ("modify <vector with dim %d> %d:"
++ " invalid index") n i
unsafeModify x i f
{-# SPECIALIZE INLINE modify :: STVector s Double -> Int -> (Double -> Double) -> ST s () #-}
{-# SPECIALIZE INLINE modify :: STVector s (Complex Double) -> Int -> (Complex Double -> Complex Double) -> ST s () #-}
-- | Same as 'modify' but does not range check the index.
unsafeModify :: (Storable e) => STVector s e -> Int -> (e -> e) -> ST s ()
unsafeModify x i f =
unsafeIOToST $ unsafeWith x $ \p -> do
e <- peekElemOff p i
pokeElemOff p i $ f e
{-# SPECIALIZE INLINE unsafeModify :: STVector s Double -> Int -> (Double -> Double) -> ST s () #-}
{-# SPECIALIZE INLINE unsafeModify :: STVector s (Complex Double) -> Int -> (Complex Double -> Complex Double) -> ST s () #-}
unsafeSwapElems :: (Storable e) => STVector s e -> Int -> Int -> ST s ()
unsafeSwapElems x i1 i2 = unsafeIOToST $ unsafeWith x $ \p ->
let p1 = p `advancePtr` i1
p2 = p `advancePtr` i2
in do
e1 <- peek p1
e2 <- peek p2
poke p2 e1
poke p1 e2
{-# SPECIALIZE INLINE unsafeSwapElems :: STVector s Double -> Int -> Int -> ST s () #-}
{-# SPECIALIZE INLINE unsafeSwapElems :: STVector s (Complex Double) -> Int -> Int -> ST s () #-}
-- | @mapTo dst f src@ replaces @dst@ elementwise with @f(src)@.
mapTo :: (RVector v, Storable e, Storable f)
=> STVector s f
-> (e -> f)
-> v e
-> ST s ()
mapTo dst f src = (checkOp2 "mapTo _" $ \z x -> unsafeMapTo z f x) dst src
{-# INLINE mapTo #-}
-- | Same as 'mapTo' but does not check dimensions.
unsafeMapTo :: (RVector v, Storable e, Storable f)
=> STVector s f
-> (e -> f)
-> v e
-> ST s ()
unsafeMapTo dst f src =
let go end pdst psrc
| pdst == end =
return ()
| otherwise = do
e <- peek psrc
poke pdst (f e)
go end (pdst `advancePtr` 1) (psrc `advancePtr` 1)
in do
ndst <- getDim dst
unsafeIOToST $
unsafeWith dst $ \pdst ->
unsafeWith src $ \psrc ->
go (pdst `advancePtr` ndst) pdst psrc
where
{-# INLINE unsafeMapTo #-}
-- | @zipWithTo dst f x y@ replaces @dst@ elementwise with @f(x,y)@.
zipWithTo :: (RVector v1, RVector v2, Storable e1, Storable e2, Storable f)
=> STVector s f
-> (e1 -> e2 -> f)
-> v1 e1
-> v2 e2
-> ST s ()
zipWithTo dst f x y =
(checkOp3 "zipWithTo _" $ \dst1 x1 y1 -> unsafeZipWithTo dst1 f x1 y1)
dst x y
{-# INLINE zipWithTo #-}
-- | Same as 'zipWithTo' but does not range-check dimensions.
unsafeZipWithTo :: (RVector v1, RVector v2, Storable e1, Storable e2, Storable f)
=> STVector s f
-> (e1 -> e2 -> f)
-> v1 e1
-> v2 e2
-> ST s ()
unsafeZipWithTo dst f src1 src2 =
let go end pdst psrc1 psrc2
| pdst == end =
return ()
| otherwise = do
e1 <- peek psrc1
e2 <- peek psrc2
poke pdst (f e1 e2)
go end (pdst `advancePtr` 1) (psrc1 `advancePtr` 1)
(psrc2 `advancePtr` 1)
in do
ndst <- getDim dst
unsafeIOToST $
unsafeWith dst $ \pdst ->
unsafeWith src1 $ \psrc1 ->
unsafeWith src2 $ \psrc2 ->
go (pdst `advancePtr` ndst) pdst psrc1 psrc2
{-# INLINE unsafeZipWithTo #-}
-- | Set every element in the vector to a default value. For
-- standard numeric types (including 'Double', 'Complex Double', and 'Int'),
-- the default value is '0'.
clear :: (Storable e) => STVector s e -> ST s ()
clear x = do
n <- getDim x
unsafeIOToST $ unsafeWith x $ \p -> clearArray p n
-- | @negateTo dst x@ replaces @dst@ with @negate(x)@.
negateTo :: (RVector v, VNum e) => STVector s e -> v e -> ST s ()
negateTo = checkOp2 "negateTo" $ \dst x ->
call2 VMath.vNeg x dst
{-# INLINE negateTo #-}
-- | @absTo dst x@ replaces @dst@ with @abs(x)@.
absTo :: (RVector v, VNum e) => STVector s e -> v e -> ST s ()
absTo = checkOp2 "absTo" $ \dst x ->
call2 VMath.vAbs x dst
{-# INLINE absTo #-}
-- | @signumTo dst x@ replaces @dst@ with @signum(x)@.
signumTo :: (RVector v, VNum e) => STVector s e -> v e -> ST s ()
signumTo = checkOp2 "signumTo" $ \dst x ->
call2 VMath.vSgn x dst
{-# INLINE signumTo #-}
-- | @conjugateTo dst x@ replaces @dst@ with @conjugate(x)@.
conjugateTo :: (RVector v, VNum e) => STVector s e -> v e -> ST s ()
conjugateTo = checkOp2 "conjugateTo" $ \dst x ->
call2 VMath.vConj x dst
{-# INLINE conjugateTo #-}
-- | @addTo dst x y@ replaces @dst@ with @x+y@.
addTo :: (RVector v1, RVector v2, VNum e)
=> STVector s e -> v1 e -> v2 e -> ST s ()
addTo = checkOp3 "addTo" $ \dst x y -> call3 VMath.vAdd x y dst
{-# INLINE addTo #-}
-- | @subTo dst x y@ replaces @dst@ with @x-y@.
subTo :: (RVector v1, RVector v2, VNum e)
=> STVector s e -> v1 e -> v2 e -> ST s ()
subTo = checkOp3 "subTo" $ \dst x y -> call3 VMath.vSub x y dst
{-# INLINE subTo #-}
-- | @mulTo dst x y@ replaces @dst@ with @x*y@.
mulTo :: (RVector v1, RVector v2, VNum e)
=> STVector s e -> v1 e -> v2 e -> ST s ()
mulTo = checkOp3 "mulTo" $ \dst x y -> call3 VMath.vMul x y dst
{-# INLINE mulTo #-}
-- | @divTo dst x y@ replaces @dst@ with @x/y@.
divTo :: (RVector v1, RVector v2, VFractional e)
=> STVector s e -> v1 e -> v2 e -> ST s ()
divTo = checkOp3 "divTo" $ \dst x y -> call3 VMath.vDiv x y dst
{-# INLINE divTo #-}
-- | @recipTo dst x@ replaces @dst@ with @1/x@.
recipTo :: (RVector v, VFractional e)
=> STVector s e -> v e -> ST s ()
recipTo = checkOp2 "recipTo" $ \dst x -> call2 VMath.vInv x dst
{-# INLINE recipTo #-}
-- | @sqrtTo dst x@ replaces @dst@ with @sqrt(x)@.
sqrtTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
sqrtTo = checkOp2 "sqrtTo" $ \dst x -> call2 VMath.vSqrt x dst
{-# INLINE sqrtTo #-}
-- | @expTo dst x@ replaces @dst@ with @exp(x)@.
expTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
expTo = checkOp2 "expTo" $ \dst x -> call2 VMath.vExp x dst
{-# INLINE expTo #-}
-- | @logTo dst x@ replaces @dst@ with @log(x)@.
logTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
logTo = checkOp2 "logTo" $ \dst x -> call2 VMath.vLog x dst
{-# INLINE logTo #-}
-- | @powTo dst x y@ replaces @dst@ with @x ** y@.
powTo :: (RVector v1, RVector v2, VFloating e)
=> STVector s e -> v1 e -> v2 e -> ST s ()
powTo = checkOp3 "powTo" $ \dst x y -> call3 VMath.vPow x y dst
{-# INLINE powTo #-}
-- | @sinTo dst x@ replaces @dst@ with @sin(x)@.
sinTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
sinTo = checkOp2 "sinTo" $ \dst x -> call2 VMath.vSin x dst
{-# INLINE sinTo #-}
-- | @cosTo dst x@ replaces @dst@ with @cos(x)@.
cosTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
cosTo = checkOp2 "cosTo" $ \dst x -> call2 VMath.vCos x dst
{-# INLINE cosTo #-}
-- | @tanTo dst x@ replaces @dst@ with @tan(x)@.
tanTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
tanTo = checkOp2 "tanTo" $ \dst x -> call2 VMath.vTan x dst
{-# INLINE tanTo #-}
-- | @asinTo dst x@ replaces @dst@ with @asin(x)@.
asinTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
asinTo = checkOp2 "asinTo" $ \dst x -> call2 VMath.vASin x dst
{-# INLINE asinTo #-}
-- | @acosTo dst x@ replaces @dst@ with @acos(x)@.
acosTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
acosTo = checkOp2 "acosTo" $ \dst x -> call2 VMath.vACos x dst
{-# INLINE acosTo #-}
-- | @atanTo dst x@ replaces @dst@ with @atan(x)@.
atanTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
atanTo = checkOp2 "atanTo" $ \dst x -> call2 VMath.vATan x dst
{-# INLINE atanTo #-}
-- | @sinhTo dst x@ replaces @dst@ with @sinh(x)@.
sinhTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
sinhTo = checkOp2 "sinhTo" $ \dst x -> call2 VMath.vSinh x dst
{-# INLINE sinhTo #-}
-- | @coshTo dst x@ replaces @dst@ with @cosh(x)@.
coshTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
coshTo = checkOp2 "coshTo" $ \dst x -> call2 VMath.vCosh x dst
{-# INLINE coshTo #-}
-- | @tanhTo dst x@ replaces @dst@ with @tanh(x)@.
tanhTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
tanhTo = checkOp2 "tanhTo" $ \dst x -> call2 VMath.vTanh x dst
{-# INLINE tanhTo #-}
-- | @asinhTo dst x@ replaces @dst@ with @asinh(x)@.
asinhTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
asinhTo = checkOp2 "asinhTo" $ \dst x -> call2 VMath.vASinh x dst
{-# INLINE asinhTo #-}
-- | @acoshTo dst x@ replaces @dst@ with @acosh(x)@.
acoshTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
acoshTo = checkOp2 "acoshTo" $ \dst x -> call2 VMath.vACosh x dst
{-# INLINE acoshTo #-}
-- | @atanhTo dst x@ replaces @dst@ with @atanh(x)@.
atanhTo :: (RVector v, VFloating e) => STVector s e -> v e -> ST s ()
atanhTo = checkOp2 "atanhTo" $ \dst x -> call2 VMath.vATanh x dst
{-# INLINE atanhTo #-}
-- | Gets the sum of the absolute values of the vector entries.
getSumAbs :: (RVector v, BLAS1 e) => v e -> ST s Double
getSumAbs = strideCall BLAS.asum
{-# INLINE getSumAbs #-}
-- | Gets the 2-norm of a vector.
getNorm2 :: (RVector v, BLAS1 e) => v e -> ST s Double
getNorm2 = strideCall BLAS.nrm2
{-# INLINE getNorm2 #-}
-- | Gets the index and norm of the element with maximum magnitude. This is
-- undefined if any of the elements are @NaN@. It will throw an exception if
-- the dimension of the vector is 0.
getWhichMaxAbs :: (RVector v, BLAS1 e) => v e -> ST s (Int, e)
getWhichMaxAbs x = do
n <- getDim x
when (n == 0) $ error $
"getWhichMaxAbs <vector with dim 0>: empty vector"
i <- strideCall BLAS.iamax x
e <- unsafeRead x i
return (i,e)
{-# INLINE getWhichMaxAbs #-}
-- | Computes the dot product of two vectors.
getDot :: (RVector v, RVector v', BLAS1 e)
=> v e -> v' e -> ST s e
getDot = checkOp2 "getDot" unsafeGetDot
{-# INLINE getDot #-}
-- | Same as 'getDot' but does not check dimensions.
unsafeGetDot :: (RVector x, RVector y, BLAS1 e)
=> x e -> y e -> ST s e
unsafeGetDot x y = (strideCall2 BLAS.dotc) y x
{-# INLINE unsafeGetDot #-}
-- | @scaleM k x@ sets @x := k * x@.
scaleM_ :: (Storable e, BLAS1 e) => e -> STVector s e -> ST s ()
scaleM_ k x = do
n <- getDim x
unsafeIOToST $
unsafeWith x $ \px ->
BLAS.scal n k px 1
{-# INLINE scaleM_ #-}
-- | @addWithScaleM_ alpha x y@ sets @y := alpha * x + y@.
addWithScaleM_ :: (RVector v, BLAS1 e) => e -> v e -> STVector s e -> ST s ()
addWithScaleM_ alpha x y =
(checkOp2 "addWithScaleM_" $ \x1 y1 -> unsafeAddWithScaleM_ alpha x1 y1)
x y
{-# INLINE addWithScaleM_ #-}
-- | Same as 'addWithScaleM_' but does not check dimensions.
unsafeAddWithScaleM_ :: (RVector v, BLAS1 e)
=> e -> v e -> STVector s e -> ST s ()
unsafeAddWithScaleM_ alpha x y =
(strideCall2 $ flip BLAS.axpy alpha) x y
{-# INLINE unsafeAddWithScaleM_ #-}
-- | @kroneckerTo dst x y@ sets @dst := x \otimes y@.
kroneckerTo :: (RVector v1, RVector v2, BLAS2 e)
=> STVector s e -> v1 e -> v2 e -> ST s ()
kroneckerTo dst x y = do
m <- getDim x
n <- getDim y
dimdst <- getDim dst
when (dimdst /= m * n) $ error $
printf ("kroneckerTo"
++ " <vector with dim %d>"
++ " <vector with dim %d>"
++ " <vector with dim %d>:"
++ " dimension mismatch") dimdst m n
clear dst
unsafeIOToST $
unsafeWith dst $ \pdst ->
unsafeWith x $ \px ->
unsafeWith y $ \py ->
BLAS.geru n m 1 py 1 px 1 pdst (max n 1)
call2 :: (RVector x, RVector y, Storable e, Storable f)
=> (Int -> Ptr e -> Ptr f -> IO a)
-> x e -> y f -> ST s a
call2 f x y = do
n <- getDim x
unsafeIOToST $
unsafeWith x $ \pX ->
unsafeWith y $ \pY ->
f n pX pY
{-# INLINE call2 #-}
call3 :: (RVector x, RVector y, RVector z, Storable e, Storable f, Storable g)
=> (Int -> Ptr e -> Ptr f -> Ptr g -> IO a)
-> x e -> y f -> z g -> ST s a
call3 f x y z = do
n <- getDim x
unsafeIOToST $
unsafeWith x $ \pX ->
unsafeWith y $ \pY ->
unsafeWith z $ \pZ ->
f n pX pY pZ
{-# INLINE call3 #-}
strideCall :: (RVector x, Storable e)
=> (Int -> Ptr e -> Int -> IO a)
-> x e -> ST s a
strideCall f x = do
n <- getDim x
unsafeIOToST $
unsafeWith x $ \pX ->
f n pX incX
where
incX = 1
{-# INLINE strideCall #-}
strideCall2 :: (RVector x, RVector y, Storable e, Storable f)
=> (Int -> Ptr e -> Int -> Ptr f -> Int -> IO a)
-> x e -> y f -> ST s a
strideCall2 f x y = do
n <- getDim x
unsafeIOToST $
unsafeWith x $ \pX ->
unsafeWith y $ \pY ->
f n pX incX pY incY
where
incX = 1
incY = 1
{-# INLINE strideCall2 #-}
checkOp2 :: (RVector x, RVector y, Storable e, Storable f)
=> String
-> (x e -> y f -> ST s a)
-> x e
-> y f
-> ST s a
checkOp2 str f x y = do
n1 <- getDim x
n2 <- getDim y
when (n1 /= n2) $ error $
printf ("%s <vector with dim %d> <vector with dim %d>:"
++ " dimension mismatch") str n1 n2
f x y
{-# INLINE checkOp2 #-}
checkOp3 :: (RVector x, RVector y, RVector z, Storable e, Storable f, Storable g)
=> String
-> (x e -> y f -> z g -> ST s a)
-> x e
-> y f
-> z g
-> ST s a
checkOp3 str f x y z = do
n1 <- getDim x
n2 <- getDim y
n3 <- getDim z
when (n1 /= n2 || n1 /= n3) $ error $
printf ("%s <vector with dim %d> <vector with dim %d>"
++ " <vector with dim %d>:"
++ " dimension mismatch") str n1 n2 n3
f x y z
{-# INLINE checkOp3 #-}
| patperry/hs-linear-algebra | lib/Numeric/LinearAlgebra/Vector/STBase.hs | bsd-3-clause | 30,065 | 0 | 17 | 9,083 | 9,112 | 4,609 | 4,503 | 674 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE Rank2Types #-}
#ifndef HLINT
{-# LANGUAGE UnboxedTuples #-}
#endif
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GADTs #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
{-# OPTIONS_GHC -fno-full-laziness #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Data.Lens
-- Copyright : (C) 2012-2014 Edward Kmett, (C) 2006-2012 Neil Mitchell
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : Rank2Types
--
-- Smart and naïve generic traversals given 'Data' instances.
--
-- 'template', 'uniplate', and 'biplate' each build up information about what
-- types can be contained within another type to speed up 'Traversal'.
--
----------------------------------------------------------------------------
module Data.Data.Lens
(
-- * Generic Traversal
template
, tinplate
, uniplate
, biplate
-- * Field Accessor Traversal
, upon
, upon'
, onceUpon
, onceUpon'
-- * Data Traversal
, gtraverse
) where
import Control.Applicative
import Control.Exception as E
import Control.Lens.Internal.Context
import Control.Lens.Internal.Indexed
import Control.Lens.Lens
import Control.Lens.Setter
import Control.Lens.Traversal
import Control.Lens.Type
import Data.Data
import GHC.IO
import Data.Maybe
import Data.Foldable
import qualified Data.HashMap.Strict as M
import Data.HashMap.Strict (HashMap, (!))
import qualified Data.HashSet as S
import Data.HashSet (HashSet)
import Data.IORef
import Data.Monoid
import GHC.Exts (realWorld#)
#ifdef HLINT
{-# ANN module "HLint: ignore Eta reduce" #-}
{-# ANN module "HLint: ignore Use foldl" #-}
{-# ANN module "HLint: ignore Reduce duplication" #-}
{-# ANN module "HLint: ignore Unused LANGUAGE pragma" #-}
#endif
-- $setup
-- >>> :set -XNoOverloadedStrings
-- >>> import Control.Lens
-------------------------------------------------------------------------------
-- Generic Traversal
-------------------------------------------------------------------------------
-- | A generic applicative transformation that maps over the immediate subterms.
--
-- 'gtraverse' is to 'traverse' what 'gmapM' is to 'mapM'
--
-- This really belongs in @Data.Data@.
gtraverse :: (Applicative f, Data a) => (forall d. Data d => d -> f d) -> a -> f a
gtraverse f = gfoldl (\x y -> x <*> f y) pure
{-# INLINE gtraverse #-}
-------------------------------------------------------------------------------
-- Naïve Traversal
-------------------------------------------------------------------------------
-- | Naïve 'Traversal' using 'Data'. This does not attempt to optimize the traversal.
--
-- This is primarily useful when the children are immediately obvious, and for benchmarking.
tinplate :: (Data s, Typeable a) => Traversal' s a
tinplate f = gfoldl (step f) pure
{-# INLINE tinplate #-}
step :: forall s a f r. (Applicative f, Typeable a, Data s) => (a -> f a) -> f (s -> r) -> s -> f r
step f w s = w <*> case mightBe :: Maybe (Is s a) of
Just Data.Data.Lens.Refl -> f s
Nothing -> tinplate f s
{-# INLINE step #-}
-------------------------------------------------------------------------------
-- Smart Traversal
-------------------------------------------------------------------------------
-- | Find every occurrence of a given type @a@ recursively that doesn't require
-- passing through something of type @a@ using 'Data', while avoiding traversal
-- of areas that cannot contain a value of type @a@.
--
-- This is 'uniplate' with a more liberal signature.
template :: forall s a. (Data s, Typeable a) => Traversal' s a
template = uniplateData (fromOracle answer) where
answer = hitTest (undefined :: s) (undefined :: a)
{-# INLINE template #-}
-- | Find descendants of type @a@ non-transitively, while avoiding computation of areas that cannot contain values of
-- type @a@ using 'Data'.
--
-- 'uniplate' is a useful default definition for 'Control.Lens.Plated.plate'
uniplate :: Data a => Traversal' a a
uniplate = template
{-# INLINE uniplate #-}
-- | 'biplate' performs like 'template', except when @s ~ a@, it returns itself and nothing else.
biplate :: forall s a. (Data s, Typeable a) => Traversal' s a
biplate = biplateData (fromOracle answer) where
answer = hitTest (undefined :: s) (undefined :: a)
{-# INLINE biplate #-}
------------------------------------------------------------------------------
-- Automatic Traversal construction from field accessors
------------------------------------------------------------------------------
data FieldException a = FieldException !Int a deriving Typeable
instance Show (FieldException a) where
showsPrec d (FieldException i _) = showParen (d > 10) $
showString "<field " . showsPrec 11 i . showChar '>'
instance Typeable a => Exception (FieldException a)
lookupon :: Typeable a => LensLike' (Indexing Identity) s a -> (s -> a) -> s -> Maybe (Int, Context a a s)
lookupon l field s = case unsafePerformIO $ E.try $ evaluate $ field $ s & indexing l %@~ \i (a::a) -> E.throw (FieldException i a) of
Right _ -> Nothing
Left e -> case fromException e of
Nothing -> Nothing
Just (FieldException i a) -> Just (i, Context (\a' -> set (elementOf l i) a' s) a)
{-# INLINE lookupon #-}
-- | This automatically constructs a 'Traversal'' from an function.
--
-- >>> (2,4) & upon fst *~ 5
-- (10,4)
--
-- There are however, caveats on how this function can be used!
--
-- First, the user supplied function must access only one field of the specified type. That is to say the target
-- must be a single element that would be visited by @'holesOnOf' 'template' 'uniplate'@
--
-- Note: this even permits a number of functions to be used directly.
--
-- >>> [1,2,3,4] & upon head .~ 0
-- [0,2,3,4]
--
-- >>> [1,2,3,4] & upon last .~ 5
-- [1,2,3,5]
--
-- >>> [1,2,3,4] ^? upon tail
-- Just [2,3,4]
--
-- >>> "" ^? upon tail
-- Nothing
--
-- Accessing parents on the way down to children is okay:
--
-- >>> [1,2,3,4] & upon (tail.tail) .~ [10,20]
-- [1,2,10,20]
--
-- Second, the structure must not contain strict or unboxed fields of the same type that will be visited by 'Data'
--
-- @'upon' :: ('Data' s, 'Data' a) => (s -> a) -> 'IndexedTraversal'' [Int] s a@
upon :: forall p f s a. (Indexable [Int] p, Applicative f, Data s, Data a) => (s -> a) -> p a (f a) -> s -> f s
upon field f s = case lookupon template field s of
Nothing -> pure s
Just (i, Context k0 a0) ->
let
go :: [Int] -> Traversal' s a -> (a -> s) -> a -> f s
go is l k a = case lookupon (l.uniplate) field s of
Nothing -> k <$> indexed f (reverse is) a
Just (j, Context k' a') -> go (j:is) (l.elementOf uniplate j) k' a'
in go [i] (elementOf template i) k0 a0
{-# INLINE upon #-}
-- | The design of 'onceUpon'' doesn't allow it to search inside of values of type 'a' for other values of type 'a'.
-- 'upon'' provides this additional recursion.
--
-- Like 'onceUpon'', 'upon'' trusts the user supplied function more than 'upon' using it directly
-- as the accessor. This enables reading from the resulting 'Lens' to be considerably faster at the risk of
-- generating an illegal lens.
--
-- >>> upon' (tail.tail) .~ [10,20] $ [1,2,3,4]
-- [1,2,10,20]
upon' :: forall s a. (Data s, Data a) => (s -> a) -> IndexedLens' [Int] s a
upon' field f s = let
~(isn, kn) = case lookupon template field s of
Nothing -> (error "upon': no index, not a member", const s)
Just (i, Context k0 _) -> go [i] (elementOf template i) k0
go :: [Int] -> Traversal' s a -> (a -> s) -> ([Int], a -> s)
go is l k = case lookupon (l.uniplate) field s of
Nothing -> (reverse is, k)
Just (j, Context k' _) -> go (j:is) (l.elementOf uniplate j) k'
in kn <$> indexed f isn (field s)
{-# INLINE upon' #-}
-- | This automatically constructs a 'Traversal'' from a field accessor.
--
-- The index of the 'Traversal' can be used as an offset into @'elementOf' ('indexing' 'template')@ or into the list
-- returned by @'holesOf' 'template'@.
--
-- The design of 'onceUpon' doesn't allow it to search inside of values of type 'a' for other values of type 'a'.
-- 'upon' provides this additional recursion, but at the expense of performance.
--
-- >>> onceUpon (tail.tail) .~ [10,20] $ [1,2,3,4] -- BAD
-- [1,10,20]
--
-- >>> upon (tail.tail) .~ [10,20] $ [1,2,3,4] -- GOOD
-- [1,2,10,20]
--
-- When in doubt, use 'upon' instead.
onceUpon :: forall s a. (Data s, Typeable a) => (s -> a) -> IndexedTraversal' Int s a
onceUpon field f s = case lookupon template field s of
Nothing -> pure s
Just (i, Context k a) -> k <$> indexed f i a
{-# INLINE onceUpon #-}
-- | This more trusting version of 'upon' uses your function directly as the getter for a 'Lens'.
--
-- This means that reading from 'upon'' is considerably faster than 'upon'.
--
-- However, you pay for faster access in two ways:
--
-- 1. When passed an illegal field accessor, 'upon'' will give you a 'Lens' that quietly violates
-- the laws, unlike 'upon', which will give you a legal 'Traversal' that avoids modifying the target.
--
-- 2. Modifying with the lens is slightly slower, since it has to go back and calculate the index after the fact.
--
-- When given a legal field accessor, the index of the 'Lens' can be used as an offset into
-- @'elementOf' ('indexed' 'template')@ or into the list returned by @'holesOf' 'template'@.
--
-- When in doubt, use 'upon'' instead.
onceUpon' :: forall s a. (Data s, Typeable a) => (s -> a) -> IndexedLens' Int s a
onceUpon' field f s = k <$> indexed f i (field s) where
~(i, Context k _) = fromMaybe (error "upon': no index, not a member") (lookupon template field s)
{-# INLINE onceUpon' #-}
-------------------------------------------------------------------------------
-- Type equality
-------------------------------------------------------------------------------
data Is a b where
Refl :: Is a a
mightBe :: (Typeable a, Typeable b) => Maybe (Is a b)
mightBe = gcast Data.Data.Lens.Refl
{-# INLINE mightBe #-}
-------------------------------------------------------------------------------
-- Data Box
-------------------------------------------------------------------------------
data DataBox = forall a. Data a => DataBox
{ dataBoxKey :: TypeRep
, _dataBoxVal :: a
}
dataBox :: Data a => a -> DataBox
dataBox a = DataBox (typeOf a) a
{-# INLINE dataBox #-}
-- partial, caught elsewhere
sybChildren :: Data a => a -> [DataBox]
sybChildren x
| isAlgType dt = do
c <- dataTypeConstrs dt
gmapQ dataBox (fromConstr c `asTypeOf` x)
| otherwise = []
where dt = dataTypeOf x
{-# INLINE sybChildren #-}
-------------------------------------------------------------------------------
-- HitMap
-------------------------------------------------------------------------------
type HitMap = HashMap TypeRep (HashSet TypeRep)
emptyHitMap :: HitMap
emptyHitMap = M.fromList
[ (tRational, S.singleton tInteger)
, (tInteger, S.empty)
] where
tRational = typeOf (undefined :: Rational)
tInteger = typeOf (undefined :: Integer )
insertHitMap :: DataBox -> HitMap -> HitMap
insertHitMap box hit = fixEq trans (populate box) `mappend` hit where
populate :: DataBox -> HitMap
populate a = f a M.empty where
f (DataBox k v) m
| M.member k hit || M.member k m = m
| cs <- sybChildren v = fs cs $ M.insert k (S.fromList $ map dataBoxKey cs) m
fs [] m = m
fs (x:xs) m = fs xs (f x m)
trans :: HitMap -> HitMap
trans m = M.map f m where
f x = x `mappend` foldMap g x
g x = fromMaybe (hit ! x) (M.lookup x m)
fixEq :: Eq a => (a -> a) -> a -> a
fixEq f = go where
go x | x == x' = x'
| otherwise = go x'
where x' = f x
{-# INLINE fixEq #-}
#ifndef HLINT
-- | inlineable 'unsafePerformIO'
inlinePerformIO :: IO a -> a
inlinePerformIO (IO m) = case m realWorld# of
(# _, r #) -> r
{-# INLINE inlinePerformIO #-}
#endif
-------------------------------------------------------------------------------
-- Cache
-------------------------------------------------------------------------------
data Cache = Cache HitMap (HashMap TypeRep (HashMap TypeRep (Maybe Follower)))
cache :: IORef Cache
cache = unsafePerformIO $ newIORef $ Cache emptyHitMap M.empty
{-# NOINLINE cache #-}
readCacheFollower :: DataBox -> TypeRep -> Maybe Follower
readCacheFollower b@(DataBox kb _) ka = inlinePerformIO $
readIORef cache >>= \ (Cache hm m) -> case M.lookup kb m >>= M.lookup ka of
Just a -> return a
Nothing -> E.try (return $! insertHitMap b hm) >>= \r -> case r of
Left SomeException{} -> atomicModifyIORef cache $ \(Cache hm' n) -> (Cache hm' (insert2 kb ka Nothing n), Nothing)
Right hm' | fol <- Just (follower kb ka hm') -> atomicModifyIORef cache $ \(Cache _ n) -> (Cache hm' (insert2 kb ka fol n), fol)
insert2 :: TypeRep -> TypeRep -> a -> HashMap TypeRep (HashMap TypeRep a) -> HashMap TypeRep (HashMap TypeRep a)
insert2 x y v = M.insertWith (const $ M.insert y v) x (M.singleton y v)
{-# INLINE insert2 #-}
{-
readCacheHitMap :: DataBox -> Maybe HitMap
readCacheHitMap b@(DataBox kb _) = inlinePerformIO $
readIORef cache >>= \ (Cache hm _) -> case M.lookup kb hm of
Just _ -> return $ Just hm
Nothing -> E.try (return $! insertHitMap b hm) >>= \r -> case r of
Left SomeException{} -> return Nothing
Right hm' -> atomicModifyIORef cache $ \(Cache _ follow) -> (Cache hm' follow, Just hm')
-}
-------------------------------------------------------------------------------
-- Answers
-------------------------------------------------------------------------------
data Answer b a
= b ~ a => Hit a
| Follow
| Miss
-------------------------------------------------------------------------------
-- Oracles
-------------------------------------------------------------------------------
newtype Oracle a = Oracle { fromOracle :: forall t. Typeable t => t -> Answer t a }
hitTest :: forall a b. (Data a, Typeable b) => a -> b -> Oracle b
hitTest a b = Oracle $ \(c :: c) ->
case mightBe :: Maybe (Is c b) of
Just Data.Data.Lens.Refl -> Hit c
Nothing ->
case readCacheFollower (dataBox a) (typeOf b) of
Just p | not (p (typeOf c)) -> Miss
_ -> Follow
-------------------------------------------------------------------------------
-- Traversals
-------------------------------------------------------------------------------
biplateData :: forall f s a. (Applicative f, Data s, Typeable a) => (forall c. Typeable c => c -> Answer c a) -> (a -> f a) -> s -> f s
biplateData o f a0 = go2 a0 where
go :: Data d => d -> f d
go s = gfoldl (\x y -> x <*> go2 y) pure s
go2 :: Data d => d -> f d
go2 s = case o s of
Hit a -> f a
Follow -> go s
Miss -> pure s
{-# INLINE biplateData #-}
uniplateData :: forall f s a. (Applicative f, Data s, Typeable a) => (forall c. Typeable c => c -> Answer c a) -> (a -> f a) -> s -> f s
uniplateData o f a0 = go a0 where
go :: Data d => d -> f d
go s = gfoldl (\x y -> x <*> go2 y) pure s
go2 :: Data d => d -> f d
go2 s = case o s of
Hit a -> f a
Follow -> go s
Miss -> pure s
{-# INLINE uniplateData #-}
-------------------------------------------------------------------------------
-- Follower
-------------------------------------------------------------------------------
part :: (a -> Bool) -> HashSet a -> (HashSet a, HashSet a)
part p s = (S.filter p s, S.filter (not . p) s)
{-# INLINE part #-}
type Follower = TypeRep -> Bool
follower :: TypeRep -> TypeRep -> HitMap -> Follower
follower a b m
| S.null hit = const False
| S.null miss = const True
| S.size hit < S.size miss = S.member ?? hit
| otherwise = \k -> not (S.member k miss)
where (hit, miss) = part (\x -> S.member b (m ! x)) (S.insert a (m ! a))
| hvr/lens | src/Data/Data/Lens.hs | bsd-3-clause | 16,297 | 0 | 21 | 3,293 | 3,920 | 2,098 | 1,822 | 215 | 3 |
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Execution mode for tx creation tests.
module Test.Pos.Client.Txp.Mode
( TxpTestProperty
, TxpTestMode
, withBVData
) where
import Universum
import qualified Data.ByteString as BS
import Test.QuickCheck (Testable (..), ioProperty)
import Test.QuickCheck.Monadic (PropertyM, monadic)
import Pos.Chain.Update (BlockVersionData)
import Pos.Client.Txp.Addresses (MonadAddresses (..))
import Pos.Core (Address, makePubKeyAddressBoot)
import Pos.Core.NetworkMagic (NetworkMagic (..))
import Pos.Crypto (deterministicKeyGen)
import Pos.DB (MonadGState (..))
import Test.Pos.Chain.Genesis.Dummy (dummyBlockVersionData)
----------------------------------------------------------------------------
-- Mock for TxCreateMode
----------------------------------------------------------------------------
type TxpTestMode = ReaderT BlockVersionData IO
----------------------------------------------------------------------------
-- Boilerplate TxpTestMode instances
----------------------------------------------------------------------------
instance MonadGState TxpTestMode where
gsAdoptedBVData = ask
instance MonadAddresses TxpTestMode where
type AddrData TxpTestMode = ()
getNewAddress nm _ _ = pure (fakeAddressForMonadAddresses nm)
getFakeChangeAddress nm _ = pure (fakeAddressForMonadAddresses nm)
fakeAddressForMonadAddresses :: NetworkMagic -> Address
fakeAddressForMonadAddresses nm = address
where
-- seed for address generation is a ByteString with 32 255's
seedSize = 32
seed = BS.replicate seedSize (255 :: Word8)
address = makePubKeyAddressBoot nm $ fst $ deterministicKeyGen seed
withBVData
:: MonadReader BlockVersionData m
=> BlockVersionData
-> m a
-> m a
withBVData bvd = local (const bvd)
----------------------------------------------------------------------------
-- Property
----------------------------------------------------------------------------
type TxpTestProperty = PropertyM TxpTestMode
-- Cannot write a general OVERLAPPABLE instance with MonadTrans since
-- type families cannot be OVERLAPPABLE.
instance MonadAddresses TxpTestProperty where
type AddrData TxpTestProperty = AddrData TxpTestMode
getNewAddress nm epochSlots = lift . (getNewAddress nm epochSlots)
getFakeChangeAddress nm = lift . (getFakeChangeAddress nm)
instance Testable a => Testable (TxpTestProperty a) where
property = monadic (ioProperty . flip runReaderT dummyBlockVersionData)
| input-output-hk/pos-haskell-prototype | client/test/Test/Pos/Client/Txp/Mode.hs | mit | 2,649 | 0 | 9 | 446 | 468 | 267 | 201 | -1 | -1 |
-- (c) Simon Marlow 2011, see the file LICENSE for copying terms.
-- Simple wrapper around HTTP, allowing proxy use
module GetURL (getURL) where
import Network.HTTP
import Network.Browser
import Network.URI
getURL :: String -> IO String
getURL url = do
Network.Browser.browse $ do
setCheckForProxy True
setDebugLog Nothing
setOutHandler (const (return ()))
(_, rsp) <- request (getRequest' (escapeURIString isUnescapedInURI url))
return (rspBody rsp)
where
getRequest' :: String -> Request String
getRequest' urlString =
case parseURI urlString of
Nothing -> error ("getRequest: Not a valid URL - " ++ urlString)
Just u -> mkRequest GET u
| gsdlab/clafer | src/GetURL.hs | mit | 690 | 0 | 15 | 145 | 189 | 93 | 96 | 17 | 2 |
{-| Implementation of the Ganeti LUXI interface.
-}
{-
Copyright (C) 2009, 2010, 2011 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Luxi
( LuxiOp(..)
, Client
, getClient
, closeClient
, callMethod
, submitManyJobs
, queryJobsStatus
) where
import Data.IORef
import Control.Monad
import Text.JSON (encodeStrict, decodeStrict)
import qualified Text.JSON as J
import Text.JSON.Types
import System.Timeout
import qualified Network.Socket as S
import Ganeti.HTools.Utils
import Ganeti.HTools.Types
import Ganeti.Jobs (JobStatus)
import Ganeti.OpCodes (OpCode)
-- * Utility functions
-- | Wrapper over System.Timeout.timeout that fails in the IO monad.
withTimeout :: Int -> String -> IO a -> IO a
withTimeout secs descr action = do
result <- timeout (secs * 1000000) action
(case result of
Nothing -> fail $ "Timeout in " ++ descr
Just v -> return v)
-- * Generic protocol functionality
-- | Currently supported Luxi operations.
data LuxiOp = QueryInstances [String] [String] Bool
| QueryNodes [String] [String] Bool
| QueryGroups [String] [String] Bool
| QueryJobs [Int] [String]
| QueryExports [String] Bool
| QueryConfigValues [String]
| QueryClusterInfo
| QueryTags String String
| SubmitJob [OpCode]
| SubmitManyJobs [[OpCode]]
| WaitForJobChange Int [String] JSValue JSValue Int
| ArchiveJob Int
| AutoArchiveJobs Int Int
| CancelJob Int
| SetDrainFlag Bool
| SetWatcherPause Double
deriving (Show, Read)
-- | The serialisation of LuxiOps into strings in messages.
strOfOp :: LuxiOp -> String
strOfOp QueryNodes {} = "QueryNodes"
strOfOp QueryGroups {} = "QueryGroups"
strOfOp QueryInstances {} = "QueryInstances"
strOfOp QueryJobs {} = "QueryJobs"
strOfOp QueryExports {} = "QueryExports"
strOfOp QueryConfigValues {} = "QueryConfigValues"
strOfOp QueryClusterInfo {} = "QueryClusterInfo"
strOfOp QueryTags {} = "QueryTags"
strOfOp SubmitManyJobs {} = "SubmitManyJobs"
strOfOp WaitForJobChange {} = "WaitForJobChange"
strOfOp SubmitJob {} = "SubmitJob"
strOfOp ArchiveJob {} = "ArchiveJob"
strOfOp AutoArchiveJobs {} = "AutoArchiveJobs"
strOfOp CancelJob {} = "CancelJob"
strOfOp SetDrainFlag {} = "SetDrainFlag"
strOfOp SetWatcherPause {} = "SetWatcherPause"
-- | The end-of-message separator.
eOM :: Char
eOM = '\3'
-- | Valid keys in the requests and responses.
data MsgKeys = Method
| Args
| Success
| Result
-- | The serialisation of MsgKeys into strings in messages.
strOfKey :: MsgKeys -> String
strOfKey Method = "method"
strOfKey Args = "args"
strOfKey Success = "success"
strOfKey Result = "result"
-- | Luxi client encapsulation.
data Client = Client { socket :: S.Socket -- ^ The socket of the client
, rbuf :: IORef String -- ^ Already received buffer
}
-- | Connects to the master daemon and returns a luxi Client.
getClient :: String -> IO Client
getClient path = do
s <- S.socket S.AF_UNIX S.Stream S.defaultProtocol
withTimeout connTimeout "creating luxi connection" $
S.connect s (S.SockAddrUnix path)
rf <- newIORef ""
return Client { socket=s, rbuf=rf}
-- | Closes the client socket.
closeClient :: Client -> IO ()
closeClient = S.sClose . socket
-- | Sends a message over a luxi transport.
sendMsg :: Client -> String -> IO ()
sendMsg s buf =
let _send obuf = do
sbytes <- withTimeout queryTimeout
"sending luxi message" $
S.send (socket s) obuf
unless (sbytes == length obuf) $ _send (drop sbytes obuf)
in _send (buf ++ [eOM])
-- | Waits for a message over a luxi transport.
recvMsg :: Client -> IO String
recvMsg s = do
let _recv obuf = do
nbuf <- withTimeout queryTimeout "reading luxi response" $
S.recv (socket s) 4096
let (msg, remaining) = break (eOM ==) nbuf
(if null remaining
then _recv (obuf ++ msg)
else return (obuf ++ msg, tail remaining))
cbuf <- readIORef $ rbuf s
let (imsg, ibuf) = break (eOM ==) cbuf
(msg, nbuf) <-
(if null ibuf -- if old buffer didn't contain a full message
then _recv cbuf -- then we read from network
else return (imsg, tail ibuf)) -- else we return data from our buffer
writeIORef (rbuf s) nbuf
return msg
-- | Compute the serialized form of a Luxi operation
opToArgs :: LuxiOp -> JSValue
opToArgs (QueryNodes names fields lock) = J.showJSON (names, fields, lock)
opToArgs (QueryGroups names fields lock) = J.showJSON (names, fields, lock)
opToArgs (QueryInstances names fields lock) = J.showJSON (names, fields, lock)
opToArgs (QueryJobs ids fields) = J.showJSON (map show ids, fields)
opToArgs (QueryExports nodes lock) = J.showJSON (nodes, lock)
opToArgs (QueryConfigValues fields) = J.showJSON fields
opToArgs (QueryClusterInfo) = J.showJSON ()
opToArgs (QueryTags kind name) = J.showJSON (kind, name)
opToArgs (SubmitJob j) = J.showJSON j
opToArgs (SubmitManyJobs ops) = J.showJSON ops
-- This is special, since the JSON library doesn't export an instance
-- of a 5-tuple
opToArgs (WaitForJobChange a b c d e) =
JSArray [ J.showJSON a, J.showJSON b, J.showJSON c
, J.showJSON d, J.showJSON e]
opToArgs (ArchiveJob a) = J.showJSON (show a)
opToArgs (AutoArchiveJobs a b) = J.showJSON (a, b)
opToArgs (CancelJob a) = J.showJSON (show a)
opToArgs (SetDrainFlag flag) = J.showJSON flag
opToArgs (SetWatcherPause duration) = J.showJSON [duration]
-- | Serialize a request to String.
buildCall :: LuxiOp -- ^ The method
-> String -- ^ The serialized form
buildCall lo =
let ja = [ (strOfKey Method, JSString $ toJSString $ strOfOp lo::JSValue)
, (strOfKey Args, opToArgs lo::JSValue)
]
jo = toJSObject ja
in encodeStrict jo
-- | Check that luxi responses contain the required keys and that the
-- call was successful.
validateResult :: String -> Result JSValue
validateResult s = do
oarr <- fromJResult "Parsing LUXI response"
(decodeStrict s)::Result (JSObject JSValue)
let arr = J.fromJSObject oarr
status <- fromObj arr (strOfKey Success)::Result Bool
let rkey = strOfKey Result
(if status
then fromObj arr rkey
else fromObj arr rkey >>= fail)
-- | Generic luxi method call.
callMethod :: LuxiOp -> Client -> IO (Result JSValue)
callMethod method s = do
sendMsg s $ buildCall method
result <- recvMsg s
let rval = validateResult result
return rval
-- | Specialized submitManyJobs call.
submitManyJobs :: Client -> [[OpCode]] -> IO (Result [String])
submitManyJobs s jobs = do
rval <- callMethod (SubmitManyJobs jobs) s
-- map each result (status, payload) pair into a nice Result ADT
return $ case rval of
Bad x -> Bad x
Ok (JSArray r) ->
mapM (\v -> case v of
JSArray [JSBool True, JSString x] ->
Ok (fromJSString x)
JSArray [JSBool False, JSString x] ->
Bad (fromJSString x)
_ -> Bad "Unknown result from the master daemon"
) r
x -> Bad ("Cannot parse response from Ganeti: " ++ show x)
-- | Custom queryJobs call.
queryJobsStatus :: Client -> [String] -> IO (Result [JobStatus])
queryJobsStatus s jids = do
rval <- callMethod (QueryJobs (map read jids) ["status"]) s
return $ case rval of
Bad x -> Bad x
Ok y -> case J.readJSON y::(J.Result [[JobStatus]]) of
J.Ok vals -> if any null vals
then Bad "Missing job status field"
else Ok (map head vals)
J.Error x -> Bad x
| ekohl/ganeti | htools/Ganeti/Luxi.hs | gpl-2.0 | 8,768 | 0 | 19 | 2,386 | 2,277 | 1,171 | 1,106 | 172 | 5 |
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
module Basement.UArray.Base
( MUArray(..)
, UArray(..)
, MUArrayBackend(..)
, UArrayBackend(..)
-- * New mutable array creation
, newUnpinned
, newPinned
, newNative
, newNative_
, new
-- * Pinning status
, isPinned
, isMutablePinned
-- * Mutable array accessor
, unsafeRead
, unsafeWrite
-- * Freezing routines
, unsafeFreezeShrink
, unsafeFreeze
, unsafeThaw
, thaw
, copy
-- * Array accessor
, unsafeIndex
, unsafeIndexer
, onBackend
, onBackendPure
, onBackendPure'
, onBackendPrim
, onMutableBackend
, unsafeDewrap
, unsafeDewrap2
-- * Basic lowlevel functions
, vFromListN
, empty
, length
, offset
, ValidRange(..)
, offsetsValidRange
, equal
, equalMemcmp
, compare
, copyAt
, unsafeCopyAtRO
, toBlock
-- * temporary
, pureST
) where
import GHC.Prim
import GHC.Types
import GHC.Ptr
import GHC.ST
import Basement.Compat.Primitive
import Basement.Monad
import Basement.PrimType
import Basement.Compat.Base
import Basement.Compat.C.Types
import Basement.Compat.Semigroup
import qualified Basement.Runtime as Runtime
import Data.Proxy
import qualified Basement.Compat.ExtList as List
import qualified Basement.Alg.Class as Alg
import Basement.Types.OffsetSize
import Basement.FinalPtr
import Basement.NormalForm
import Basement.Block (MutableBlock(..), Block(..))
import qualified Basement.Block as BLK
import qualified Basement.Block.Mutable as MBLK
import Basement.Numerical.Additive
import Basement.Bindings.Memory
import System.IO.Unsafe (unsafeDupablePerformIO)
-- | A Mutable array of types built on top of GHC primitive.
--
-- Element in this array can be modified in place.
data MUArray ty st = MUArray {-# UNPACK #-} !(Offset ty)
{-# UNPACK #-} !(CountOf ty)
!(MUArrayBackend ty st)
data MUArrayBackend ty st = MUArrayMBA (MutableBlock ty st) | MUArrayAddr (FinalPtr ty)
instance PrimType ty => Alg.Indexable (Ptr ty) ty where
index (Ptr addr) = primAddrIndex addr
instance Alg.Indexable (Ptr Word8) Word64 where
index (Ptr addr) = primAddrIndex addr
instance (PrimMonad prim, PrimType ty) => Alg.RandomAccess (Ptr ty) prim ty where
read (Ptr addr) = primAddrRead addr
write (Ptr addr) = primAddrWrite addr
-- | An array of type built on top of GHC primitive.
--
-- The elements need to have fixed sized and the representation is a
-- packed contiguous array in memory that can easily be passed
-- to foreign interface
data UArray ty = UArray {-# UNPACK #-} !(Offset ty)
{-# UNPACK #-} !(CountOf ty)
!(UArrayBackend ty)
deriving (Typeable)
data UArrayBackend ty = UArrayBA !(Block ty) | UArrayAddr !(FinalPtr ty)
deriving (Typeable)
instance Data ty => Data (UArray ty) where
dataTypeOf _ = arrayType
toConstr _ = error "toConstr"
gunfold _ _ = error "gunfold"
arrayType :: DataType
arrayType = mkNoRepType "Basement.UArray"
instance NormalForm (UArray ty) where
toNormalForm (UArray _ _ !_) = ()
instance (PrimType ty, Show ty) => Show (UArray ty) where
show v = show (toList v)
instance (PrimType ty, Eq ty) => Eq (UArray ty) where
(==) = equal
instance (PrimType ty, Ord ty) => Ord (UArray ty) where
{-# SPECIALIZE instance Ord (UArray Word8) #-}
compare = vCompare
instance PrimType ty => Semigroup (UArray ty) where
(<>) = append
instance PrimType ty => Monoid (UArray ty) where
mempty = empty
mappend = append
mconcat = concat
instance PrimType ty => IsList (UArray ty) where
type Item (UArray ty) = ty
fromList = vFromList
fromListN len = vFromListN (CountOf len)
toList = vToList
length :: UArray ty -> CountOf ty
length (UArray _ len _) = len
{-# INLINE[1] length #-}
offset :: UArray ty -> Offset ty
offset (UArray ofs _ _) = ofs
{-# INLINE[1] offset #-}
data ValidRange ty = ValidRange {-# UNPACK #-} !(Offset ty) {-# UNPACK #-} !(Offset ty)
offsetsValidRange :: UArray ty -> ValidRange ty
offsetsValidRange (UArray ofs len _) = ValidRange ofs (ofs `offsetPlusE` len)
-- | Return if the array is pinned in memory
--
-- note that Foreign array are considered pinned
isPinned :: UArray ty -> PinnedStatus
isPinned (UArray _ _ (UArrayAddr {})) = Pinned
isPinned (UArray _ _ (UArrayBA blk)) = BLK.isPinned blk
-- | Return if a mutable array is pinned in memory
isMutablePinned :: MUArray ty st -> PinnedStatus
isMutablePinned (MUArray _ _ (MUArrayAddr {})) = Pinned
isMutablePinned (MUArray _ _ (MUArrayMBA mb)) = BLK.isMutablePinned mb
-- | Create a new pinned mutable array of size @n.
--
-- all the cells are uninitialized and could contains invalid values.
--
-- All mutable arrays are allocated on a 64 bits aligned addresses
newPinned :: forall prim ty . (PrimMonad prim, PrimType ty) => CountOf ty -> prim (MUArray ty (PrimState prim))
newPinned n = MUArray 0 n . MUArrayMBA <$> MBLK.newPinned n
-- | Create a new unpinned mutable array of size @n elements.
--
-- If the size exceeds a GHC-defined threshold, then the memory will be
-- pinned. To be certain about pinning status with small size, use 'newPinned'
newUnpinned :: forall prim ty . (PrimMonad prim, PrimType ty) => CountOf ty -> prim (MUArray ty (PrimState prim))
newUnpinned n = MUArray 0 n . MUArrayMBA <$> MBLK.new n
newNative :: (PrimMonad prim, PrimType ty)
=> CountOf ty
-> (MutableBlock ty (PrimState prim) -> prim a)
-> prim (a, MUArray ty (PrimState prim))
newNative n f = do
mb <- MBLK.new n
a <- f mb
pure (a, MUArray 0 n (MUArrayMBA mb))
-- | Same as newNative but expect no extra return value from f
newNative_ :: (PrimMonad prim, PrimType ty)
=> CountOf ty
-> (MutableBlock ty (PrimState prim) -> prim ())
-> prim (MUArray ty (PrimState prim))
newNative_ n f = do
mb <- MBLK.new n
f mb
pure (MUArray 0 n (MUArrayMBA mb))
-- | Create a new mutable array of size @n.
--
-- When memory for a new array is allocated, we decide if that memory region
-- should be pinned (will not be copied around by GC) or unpinned (can be
-- moved around by GC) depending on its size.
--
-- You can change the threshold value used by setting the environment variable
-- @HS_FOUNDATION_UARRAY_UNPINNED_MAX@.
new :: (PrimMonad prim, PrimType ty) => CountOf ty -> prim (MUArray ty (PrimState prim))
new sz
| sizeRecast sz <= maxSizeUnpinned = newUnpinned sz
| otherwise = newPinned sz
where
-- Safe to use here: If the value changes during runtime, this will only
-- have an impact on newly created arrays.
maxSizeUnpinned = Runtime.unsafeUArrayUnpinnedMaxSize
{-# INLINE new #-}
-- | read from a cell in a mutable array without bounds checking.
--
-- Reading from invalid memory can return unpredictable and invalid values.
-- use 'read' if unsure.
unsafeRead :: (PrimMonad prim, PrimType ty) => MUArray ty (PrimState prim) -> Offset ty -> prim ty
unsafeRead (MUArray start _ (MUArrayMBA (MutableBlock mba))) i = primMbaRead mba (start + i)
unsafeRead (MUArray start _ (MUArrayAddr fptr)) i = withFinalPtr fptr $ \(Ptr addr) -> primAddrRead addr (start + i)
{-# INLINE unsafeRead #-}
-- | write to a cell in a mutable array without bounds checking.
--
-- Writing with invalid bounds will corrupt memory and your program will
-- become unreliable. use 'write' if unsure.
unsafeWrite :: (PrimMonad prim, PrimType ty) => MUArray ty (PrimState prim) -> Offset ty -> ty -> prim ()
unsafeWrite (MUArray start _ (MUArrayMBA mb)) i v = MBLK.unsafeWrite mb (start+i) v
unsafeWrite (MUArray start _ (MUArrayAddr fptr)) i v = withFinalPtr fptr $ \(Ptr addr) -> primAddrWrite addr (start+i) v
{-# INLINE unsafeWrite #-}
-- | Return the element at a specific index from an array without bounds checking.
--
-- Reading from invalid memory can return unpredictable and invalid values.
-- use 'index' if unsure.
unsafeIndex :: forall ty . PrimType ty => UArray ty -> Offset ty -> ty
unsafeIndex (UArray start _ (UArrayBA ba)) n = BLK.unsafeIndex ba (start + n)
unsafeIndex (UArray start _ (UArrayAddr fptr)) n = withUnsafeFinalPtr fptr (\(Ptr addr) -> return (primAddrIndex addr (start+n)) :: IO ty)
{-# INLINE unsafeIndex #-}
unsafeIndexer :: (PrimMonad prim, PrimType ty) => UArray ty -> ((Offset ty -> ty) -> prim a) -> prim a
unsafeIndexer (UArray start _ (UArrayBA ba)) f = f (\n -> BLK.unsafeIndex ba (start + n))
unsafeIndexer (UArray start _ (UArrayAddr fptr)) f = withFinalPtr fptr $ \(Ptr addr) -> f (\n -> primAddrIndex addr (start + n))
{-# INLINE unsafeIndexer #-}
-- | Freeze a mutable array into an array.
--
-- the MUArray must not be changed after freezing.
unsafeFreeze :: PrimMonad prim => MUArray ty (PrimState prim) -> prim (UArray ty)
unsafeFreeze (MUArray start len (MUArrayMBA mba)) =
UArray start len . UArrayBA <$> MBLK.unsafeFreeze mba
unsafeFreeze (MUArray start len (MUArrayAddr fptr)) =
pure $ UArray start len (UArrayAddr fptr)
{-# INLINE unsafeFreeze #-}
unsafeFreezeShrink :: (PrimType ty, PrimMonad prim) => MUArray ty (PrimState prim) -> CountOf ty -> prim (UArray ty)
unsafeFreezeShrink (MUArray start _ backend) n = unsafeFreeze (MUArray start n backend)
{-# INLINE unsafeFreezeShrink #-}
-- | Thaw an immutable array.
--
-- The UArray must not be used after thawing.
unsafeThaw :: (PrimType ty, PrimMonad prim) => UArray ty -> prim (MUArray ty (PrimState prim))
unsafeThaw (UArray start len (UArrayBA blk)) = MUArray start len . MUArrayMBA <$> BLK.unsafeThaw blk
unsafeThaw (UArray start len (UArrayAddr fptr)) = pure $ MUArray start len (MUArrayAddr fptr)
{-# INLINE unsafeThaw #-}
-- | Thaw an array to a mutable array.
--
-- the array is not modified, instead a new mutable array is created
-- and every values is copied, before returning the mutable array.
thaw :: (PrimMonad prim, PrimType ty) => UArray ty -> prim (MUArray ty (PrimState prim))
thaw array = do
ma <- new (length array)
unsafeCopyAtRO ma azero array (Offset 0) (length array)
pure ma
{-# INLINE thaw #-}
-- | Copy every cells of an existing array to a new array
copy :: PrimType ty => UArray ty -> UArray ty
copy array = runST (thaw array >>= unsafeFreeze)
onBackend :: (Block ty -> a)
-> (FinalPtr ty -> Ptr ty -> ST s a)
-> UArray ty
-> a
onBackend onBa _ (UArray _ _ (UArrayBA ba)) = onBa ba
onBackend _ onAddr (UArray _ _ (UArrayAddr fptr)) = withUnsafeFinalPtr fptr $ \ptr@(Ptr !_) ->
onAddr fptr ptr
{-# INLINE onBackend #-}
onBackendPure :: (Block ty -> a)
-> (Ptr ty -> a)
-> UArray ty
-> a
onBackendPure goBA goAddr arr = onBackend goBA (\_ -> pureST . goAddr) arr
{-# INLINE onBackendPure #-}
onBackendPure' :: forall ty a . PrimType ty
=> UArray ty
-> (forall container. Alg.Indexable container ty
=> container -> Offset ty -> Offset ty -> a)
-> a
onBackendPure' arr f = onBackendPure f' f' arr
where f' :: Alg.Indexable container ty => container -> a
f' c = f c start end
where (ValidRange !start !end) = offsetsValidRange arr
{-# INLINE onBackendPure' #-}
onBackendPrim :: PrimMonad prim
=> (Block ty -> prim a)
-> (FinalPtr ty -> prim a)
-> UArray ty
-> prim a
onBackendPrim onBa _ (UArray _ _ (UArrayBA ba)) = onBa ba
onBackendPrim _ onAddr (UArray _ _ (UArrayAddr fptr)) = onAddr fptr
{-# INLINE onBackendPrim #-}
onMutableBackend :: PrimMonad prim
=> (MutableBlock ty (PrimState prim) -> prim a)
-> (FinalPtr ty -> prim a)
-> MUArray ty (PrimState prim)
-> prim a
onMutableBackend onMba _ (MUArray _ _ (MUArrayMBA mba)) = onMba mba
onMutableBackend _ onAddr (MUArray _ _ (MUArrayAddr fptr)) = onAddr fptr
{-# INLINE onMutableBackend #-}
unsafeDewrap :: (Block ty -> Offset ty -> a)
-> (Ptr ty -> Offset ty -> ST s a)
-> UArray ty
-> a
unsafeDewrap _ g (UArray start _ (UArrayAddr fptr)) = withUnsafeFinalPtr fptr $ \ptr -> g ptr start
unsafeDewrap f _ (UArray start _ (UArrayBA ba)) = f ba start
{-# INLINE unsafeDewrap #-}
unsafeDewrap2 :: (ByteArray# -> ByteArray# -> a)
-> (Ptr ty -> Ptr ty -> ST s a)
-> (ByteArray# -> Ptr ty -> ST s a)
-> (Ptr ty -> ByteArray# -> ST s a)
-> UArray ty
-> UArray ty
-> a
unsafeDewrap2 f g h i (UArray _ _ back1) (UArray _ _ back2) =
case (back1, back2) of
(UArrayBA (Block ba1), UArrayBA (Block ba2)) -> f ba1 ba2
(UArrayAddr fptr1, UArrayAddr fptr2) -> withUnsafeFinalPtr fptr1 $ \ptr1 -> withFinalPtr fptr2 $ \ptr2 -> g ptr1 ptr2
(UArrayBA (Block ba1), UArrayAddr fptr2) -> withUnsafeFinalPtr fptr2 $ \ptr2 -> h ba1 ptr2
(UArrayAddr fptr1, UArrayBA (Block ba2)) -> withUnsafeFinalPtr fptr1 $ \ptr1 -> i ptr1 ba2
{-# INLINE [2] unsafeDewrap2 #-}
pureST :: a -> ST s a
pureST = pure
-- | make an array from a list of elements.
vFromList :: forall ty . PrimType ty => [ty] -> UArray ty
vFromList l = runST $ do
a <- newNative_ len copyList
unsafeFreeze a
where
len = List.length l
copyList :: MutableBlock ty s -> ST s ()
copyList mb = loop 0 l
where
loop _ [] = pure ()
loop !i (x:xs) = MBLK.unsafeWrite mb i x >> loop (i+1) xs
-- | Make an array from a list of elements with a size hint.
--
-- The list should be of the same size as the hint, as otherwise:
--
-- * The length of the list is smaller than the hint:
-- the array allocated is of the size of the hint, but is sliced
-- to only represent the valid bits
-- * The length of the list is bigger than the hint:
-- The allocated array is the size of the hint, and the list is truncated to
-- fit.
vFromListN :: forall ty . PrimType ty => CountOf ty -> [ty] -> UArray ty
vFromListN len l = runST $ do
(sz, ma) <- newNative len copyList
unsafeFreezeShrink ma sz
where
copyList :: MutableBlock ty s -> ST s (CountOf ty)
copyList mb = loop 0 l
where
loop !i [] = pure (offsetAsSize i)
loop !i (x:xs)
| i .==# len = pure (offsetAsSize i)
| otherwise = MBLK.unsafeWrite mb i x >> loop (i+1) xs
-- | transform an array to a list.
vToList :: forall ty . PrimType ty => UArray ty -> [ty]
vToList a
| len == 0 = []
| otherwise = unsafeDewrap goBa goPtr a
where
!len = length a
goBa (Block ba) start = loop start
where
!end = start `offsetPlusE` len
loop !i | i == end = []
| otherwise = primBaIndex ba i : loop (i+1)
goPtr (Ptr addr) start = pureST (loop start)
where
!end = start `offsetPlusE` len
loop !i | i == end = []
| otherwise = primAddrIndex addr i : loop (i+1)
-- | Check if two vectors are identical
equal :: (PrimType ty, Eq ty) => UArray ty -> UArray ty -> Bool
equal a b
| la /= lb = False
| otherwise = unsafeDewrap2 goBaBa goPtrPtr goBaPtr goPtrBa a b
where
!start1 = offset a
!start2 = offset b
!end = start1 `offsetPlusE` la
!la = length a
!lb = length b
goBaBa ba1 ba2 = loop start1 start2
where
loop !i !o | i == end = True
| otherwise = primBaIndex ba1 i == primBaIndex ba2 o && loop (i+o1) (o+o1)
goPtrPtr (Ptr addr1) (Ptr addr2) = pureST (loop start1 start2)
where
loop !i !o | i == end = True
| otherwise = primAddrIndex addr1 i == primAddrIndex addr2 o && loop (i+o1) (o+o1)
goBaPtr ba1 (Ptr addr2) = pureST (loop start1 start2)
where
loop !i !o | i == end = True
| otherwise = primBaIndex ba1 i == primAddrIndex addr2 o && loop (i+o1) (o+o1)
goPtrBa (Ptr addr1) ba2 = pureST (loop start1 start2)
where
loop !i !o | i == end = True
| otherwise = primAddrIndex addr1 i == primBaIndex ba2 o && loop (i+o1) (o+o1)
o1 = Offset (I# 1#)
{-# RULES "UArray/Eq/Word8" [3] equal = equalBytes #-}
{-# INLINEABLE [2] equal #-}
equalBytes :: UArray Word8 -> UArray Word8 -> Bool
equalBytes a b
| la /= lb = False
| otherwise = memcmp a b (sizeInBytes la) == 0
where
!la = length a
!lb = length b
equalMemcmp :: PrimType ty => UArray ty -> UArray ty -> Bool
equalMemcmp a b
| la /= lb = False
| otherwise = memcmp a b (sizeInBytes la) == 0
where
!la = length a
!lb = length b
-- | Compare 2 vectors
vCompare :: (Ord ty, PrimType ty) => UArray ty -> UArray ty -> Ordering
vCompare a@(UArray start1 la _) b@(UArray start2 lb _) = unsafeDewrap2 goBaBa goPtrPtr goBaPtr goPtrBa a b
where
!end = start1 `offsetPlusE` min la lb
o1 = Offset (I# 1#)
goBaBa ba1 ba2 = loop start1 start2
where
loop !i !o | i == end = la `compare` lb
| v1 == v2 = loop (i + o1) (o + o1)
| otherwise = v1 `compare` v2
where v1 = primBaIndex ba1 i
v2 = primBaIndex ba2 o
goPtrPtr (Ptr addr1) (Ptr addr2) = pureST (loop start1 start2)
where
loop !i !o | i == end = la `compare` lb
| v1 == v2 = loop (i + o1) (o + o1)
| otherwise = v1 `compare` v2
where v1 = primAddrIndex addr1 i
v2 = primAddrIndex addr2 o
goBaPtr ba1 (Ptr addr2) = pureST (loop start1 start2)
where
loop !i !o | i == end = la `compare` lb
| v1 == v2 = loop (i + o1) (o + o1)
| otherwise = v1 `compare` v2
where v1 = primBaIndex ba1 i
v2 = primAddrIndex addr2 o
goPtrBa (Ptr addr1) ba2 = pureST (loop start1 start2)
where
loop !i !o | i == end = la `compare` lb
| v1 == v2 = loop (i + o1) (o + o1)
| otherwise = v1 `compare` v2
where v1 = primAddrIndex addr1 i
v2 = primBaIndex ba2 o
-- {-# SPECIALIZE [3] vCompare :: UArray Word8 -> UArray Word8 -> Ordering = vCompareBytes #-}
{-# RULES "UArray/Ord/Word8" [3] vCompare = vCompareBytes #-}
{-# INLINEABLE [2] vCompare #-}
vCompareBytes :: UArray Word8 -> UArray Word8 -> Ordering
vCompareBytes = vCompareMemcmp
vCompareMemcmp :: (Ord ty, PrimType ty) => UArray ty -> UArray ty -> Ordering
vCompareMemcmp a b = cintToOrdering $ memcmp a b sz
where
la = length a
lb = length b
sz = sizeInBytes $ min la lb
cintToOrdering :: CInt -> Ordering
cintToOrdering 0 = la `compare` lb
cintToOrdering r | r < 0 = LT
| otherwise = GT
{-# SPECIALIZE [3] vCompareMemcmp :: UArray Word8 -> UArray Word8 -> Ordering #-}
memcmp :: PrimType ty => UArray ty -> UArray ty -> CountOf Word8 -> CInt
memcmp a@(UArray (offsetInBytes -> o1) _ _) b@(UArray (offsetInBytes -> o2) _ _) sz = unsafeDewrap2
(\s1 s2 -> unsafeDupablePerformIO $ sysHsMemcmpBaBa s1 o1 s2 o2 sz)
(\s1 s2 -> unsafePrimToST $ sysHsMemcmpPtrPtr s1 o1 s2 o2 sz)
(\s1 s2 -> unsafePrimToST $ sysHsMemcmpBaPtr s1 o1 s2 o2 sz)
(\s1 s2 -> unsafePrimToST $ sysHsMemcmpPtrBa s1 o1 s2 o2 sz)
a b
{-# SPECIALIZE [3] memcmp :: UArray Word8 -> UArray Word8 -> CountOf Word8 -> CInt #-}
-- | Copy a number of elements from an array to another array with offsets
copyAt :: forall prim ty . (PrimMonad prim, PrimType ty)
=> MUArray ty (PrimState prim) -- ^ destination array
-> Offset ty -- ^ offset at destination
-> MUArray ty (PrimState prim) -- ^ source array
-> Offset ty -- ^ offset at source
-> CountOf ty -- ^ number of elements to copy
-> prim ()
copyAt (MUArray dstStart _ (MUArrayMBA (MutableBlock dstMba))) ed (MUArray srcStart _ (MUArrayMBA (MutableBlock srcBa))) es n =
primitive $ \st -> (# copyMutableByteArray# srcBa os dstMba od nBytes st, () #)
where
!sz = primSizeInBytes (Proxy :: Proxy ty)
!(Offset (I# os)) = offsetOfE sz (srcStart + es)
!(Offset (I# od)) = offsetOfE sz (dstStart + ed)
!(CountOf (I# nBytes)) = sizeOfE sz n
copyAt (MUArray dstStart _ (MUArrayMBA (MutableBlock dstMba))) ed (MUArray srcStart _ (MUArrayAddr srcFptr)) es n =
withFinalPtr srcFptr $ \srcPtr ->
let !(Ptr srcAddr) = srcPtr `plusPtr` os
in primitive $ \s -> (# copyAddrToByteArray# srcAddr dstMba od nBytes s, () #)
where
!sz = primSizeInBytes (Proxy :: Proxy ty)
!(Offset os) = offsetOfE sz (srcStart + es)
!(Offset (I# od)) = offsetOfE sz (dstStart + ed)
!(CountOf (I# nBytes)) = sizeOfE sz n
copyAt dst od src os n = loop od os
where
!endIndex = os `offsetPlusE` n
loop !d !i
| i == endIndex = return ()
| otherwise = unsafeRead src i >>= unsafeWrite dst d >> loop (d+1) (i+1)
-- TODO Optimise with copyByteArray#
-- | Copy @n@ sequential elements from the specified offset in a source array
-- to the specified position in a destination array.
--
-- This function does not check bounds. Accessing invalid memory can return
-- unpredictable and invalid values.
unsafeCopyAtRO :: forall prim ty . (PrimMonad prim, PrimType ty)
=> MUArray ty (PrimState prim) -- ^ destination array
-> Offset ty -- ^ offset at destination
-> UArray ty -- ^ source array
-> Offset ty -- ^ offset at source
-> CountOf ty -- ^ number of elements to copy
-> prim ()
unsafeCopyAtRO (MUArray dstStart _ (MUArrayMBA (MutableBlock dstMba))) ed (UArray srcStart _ (UArrayBA (Block srcBa))) es n =
primitive $ \st -> (# copyByteArray# srcBa os dstMba od nBytes st, () #)
where
sz = primSizeInBytes (Proxy :: Proxy ty)
!(Offset (I# os)) = offsetOfE sz (srcStart+es)
!(Offset (I# od)) = offsetOfE sz (dstStart+ed)
!(CountOf (I# nBytes)) = sizeOfE sz n
unsafeCopyAtRO (MUArray dstStart _ (MUArrayMBA (MutableBlock dstMba))) ed (UArray srcStart _ (UArrayAddr srcFptr)) es n =
withFinalPtr srcFptr $ \srcPtr ->
let !(Ptr srcAddr) = srcPtr `plusPtr` os
in primitive $ \s -> (# copyAddrToByteArray# srcAddr dstMba od nBytes s, () #)
where
sz = primSizeInBytes (Proxy :: Proxy ty)
!(Offset os) = offsetOfE sz (srcStart+es)
!(Offset (I# od)) = offsetOfE sz (dstStart+ed)
!(CountOf (I# nBytes)) = sizeOfE sz n
unsafeCopyAtRO dst od src os n = loop od os
where
!endIndex = os `offsetPlusE` n
loop d i
| i == endIndex = return ()
| otherwise = unsafeWrite dst d (unsafeIndex src i) >> loop (d+1) (i+1)
empty_ :: Block ()
empty_ = runST $ primitive $ \s1 ->
case newByteArray# 0# s1 of { (# s2, mba #) ->
case unsafeFreezeByteArray# mba s2 of { (# s3, ba #) ->
(# s3, Block ba #) }}
empty :: UArray ty
empty = UArray 0 0 (UArrayBA $ Block ba) where !(Block ba) = empty_
-- | Append 2 arrays together by creating a new bigger array
append :: PrimType ty => UArray ty -> UArray ty -> UArray ty
append a b
| la == azero = b
| lb == azero = a
| otherwise = runST $ do
r <- new (la+lb)
ma <- unsafeThaw a
mb <- unsafeThaw b
copyAt r (Offset 0) ma (Offset 0) la
copyAt r (sizeAsOffset la) mb (Offset 0) lb
unsafeFreeze r
where
!la = length a
!lb = length b
concat :: forall ty . PrimType ty => [UArray ty] -> UArray ty
concat original = runST $ do
r <- new total
goCopy r 0 original
unsafeFreeze r
where
!total = size 0 original
-- size
size !sz [] = sz
size !sz (x:xs) = size (length x + sz) xs
zero = Offset 0
goCopy r = loop
where
loop _ [] = pure ()
loop !i (x:xs) = do
unsafeCopyAtRO r i x zero lx
loop (i `offsetPlusE` lx) xs
where !lx = length x
-- | Create a Block from a UArray.
--
-- Note that because of the slice, the destination block
-- is re-allocated and copied, unless the slice point
-- at the whole array
toBlock :: PrimType ty => UArray ty -> Block ty
toBlock arr@(UArray start len (UArrayBA blk))
| start == 0 && BLK.length blk == len = blk
| otherwise = toBlock $ copy arr
toBlock arr = toBlock $ copy arr
| vincenthz/hs-foundation | basement/Basement/UArray/Base.hs | bsd-3-clause | 25,104 | 0 | 15 | 6,980 | 8,157 | 4,093 | 4,064 | -1 | -1 |
{-# LANGUAGE CPP, ForeignFunctionInterface, ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Utils
-- Copyright : Isaac Jones, Simon Marlow 2003-2004
-- License : BSD3
-- portions Copyright (c) 2007, Galois Inc.
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- A large and somewhat miscellaneous collection of utility functions used
-- throughout the rest of the Cabal lib and in other tools that use the Cabal
-- lib like @cabal-install@. It has a very simple set of logging actions. It
-- has low level functions for running programs, a bunch of wrappers for
-- various directory and file functions that do extra logging.
module Distribution.Simple.Utils (
cabalVersion,
-- * logging and errors
die,
dieWithLocation,
topHandler, topHandlerWith,
warn, notice, setupMessage, info, debug,
debugNoWrap, chattyTry,
printRawCommandAndArgs, printRawCommandAndArgsAndEnv,
-- * exceptions
handleDoesNotExist,
-- * running programs
rawSystemExit,
rawSystemExitCode,
rawSystemExitWithEnv,
rawSystemStdout,
rawSystemStdInOut,
rawSystemIOWithEnv,
createProcessWithEnv,
maybeExit,
xargs,
findProgramLocation,
findProgramVersion,
-- * copying files
smartCopySources,
createDirectoryIfMissingVerbose,
copyFileVerbose,
copyDirectoryRecursiveVerbose,
copyFiles,
copyFileTo,
-- * installing files
installOrdinaryFile,
installExecutableFile,
installMaybeExecutableFile,
installOrdinaryFiles,
installExecutableFiles,
installMaybeExecutableFiles,
installDirectoryContents,
copyDirectoryRecursive,
-- * File permissions
doesExecutableExist,
setFileOrdinary,
setFileExecutable,
-- * file names
currentDir,
shortRelativePath,
dropExeExtension,
exeExtensions,
-- * finding files
findFile,
findFirstFile,
findFileWithExtension,
findFileWithExtension',
findAllFilesWithExtension,
findModuleFile,
findModuleFiles,
getDirectoryContentsRecursive,
-- * environment variables
isInSearchPath,
addLibraryPath,
-- * simple file globbing
matchFileGlob,
matchDirFileGlob,
parseFileGlob,
FileGlob(..),
-- * modification time
moreRecentFile,
existsAndIsMoreRecentThan,
-- * temp files and dirs
TempFileOptions(..), defaultTempFileOptions,
withTempFile, withTempFileEx,
withTempDirectory, withTempDirectoryEx,
-- * .cabal and .buildinfo files
defaultPackageDesc,
findPackageDesc,
tryFindPackageDesc,
defaultHookedPackageDesc,
findHookedPackageDesc,
-- * reading and writing files safely
withFileContents,
writeFileAtomic,
rewriteFile,
-- * Unicode
fromUTF8,
toUTF8,
readUTF8File,
withUTF8FileContents,
writeUTF8File,
normaliseLineEndings,
-- * BOM
startsWithBOM,
fileHasBOM,
ignoreBOM,
-- * generic utils
dropWhileEndLE,
takeWhileEndLE,
equating,
comparing,
isInfixOf,
intercalate,
lowercase,
listUnion,
listUnionRight,
ordNub,
ordNubRight,
safeTail,
wrapText,
wrapLine,
) where
import Distribution.Text
import Distribution.Package
import Distribution.ModuleName as ModuleName
import Distribution.System
import Distribution.Version
import Distribution.Compat.CopyFile
import Distribution.Compat.Internal.TempFile
import Distribution.Compat.Exception
import Distribution.Verbosity
#if __GLASGOW_HASKELL__ < 711
#ifdef VERSION_base
#define BOOTSTRAPPED_CABAL 1
#endif
#else
#ifdef CURRENT_PACKAGE_KEY
#define BOOTSTRAPPED_CABAL 1
#endif
#endif
#ifdef BOOTSTRAPPED_CABAL
import qualified Paths_Cabal (version)
#endif
import Control.Monad
( when, unless, filterM )
import Control.Concurrent.MVar
( newEmptyMVar, putMVar, takeMVar )
import Data.Bits
( Bits((.|.), (.&.), shiftL, shiftR) )
import Data.Char as Char
( isDigit, toLower, chr, ord )
import Data.Foldable
( traverse_ )
import Data.List
( nub, unfoldr, intercalate, isInfixOf )
import Data.Typeable
( cast )
import Data.Ord
( comparing )
import qualified Data.ByteString.Lazy as BS
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
import qualified Data.Set as Set
import System.Directory
( Permissions(executable), getDirectoryContents, getPermissions
, doesDirectoryExist, doesFileExist, removeFile, findExecutable
, getModificationTime )
import System.Environment
( getProgName )
import System.Exit
( exitWith, ExitCode(..) )
import System.FilePath
( normalise, (</>), (<.>)
, getSearchPath, joinPath, takeDirectory, splitFileName
, splitExtension, splitExtensions, splitDirectories
, searchPathSeparator )
import System.Directory
( createDirectory, renameFile, removeDirectoryRecursive )
import System.IO
( Handle, openFile, openBinaryFile, openBinaryTempFileWithDefaultPermissions
, IOMode(ReadMode), hSetBinaryMode
, hGetContents, stderr, stdout, hPutStr, hFlush, hClose )
import System.IO.Error as IO.Error
( isDoesNotExistError, isAlreadyExistsError
, ioeSetFileName, ioeGetFileName, ioeGetErrorString )
import System.IO.Error
( ioeSetLocation, ioeGetLocation )
import System.IO.Unsafe
( unsafeInterleaveIO )
import qualified Control.Exception as Exception
import Control.Exception (IOException, evaluate, throwIO)
import Control.Concurrent (forkIO)
import qualified System.Process as Process
( CreateProcess(..), StdStream(..), proc)
import System.Process
( ProcessHandle, createProcess, rawSystem, runInteractiveProcess
, showCommandForUser, waitForProcess)
-- We only get our own version number when we're building with ourselves
cabalVersion :: Version
#if defined(BOOTSTRAPPED_CABAL)
cabalVersion = Paths_Cabal.version
#elif defined(CABAL_VERSION)
cabalVersion = Version [CABAL_VERSION] []
#else
cabalVersion = Version [1,9999] [] --used when bootstrapping
#endif
-- ----------------------------------------------------------------------------
-- Exception and logging utils
dieWithLocation :: FilePath -> Maybe Int -> String -> IO a
dieWithLocation filename lineno msg =
ioError . setLocation lineno
. flip ioeSetFileName (normalise filename)
$ userError msg
where
setLocation Nothing err = err
setLocation (Just n) err = ioeSetLocation err (show n)
die :: String -> IO a
die msg = ioError (userError msg)
topHandlerWith :: forall a. (Exception.SomeException -> IO a) -> IO a -> IO a
topHandlerWith cont prog =
Exception.catches prog [
Exception.Handler rethrowAsyncExceptions
, Exception.Handler rethrowExitStatus
, Exception.Handler handle
]
where
-- Let async exceptions rise to the top for the default top-handler
rethrowAsyncExceptions :: Exception.AsyncException -> IO a
rethrowAsyncExceptions = throwIO
-- ExitCode gets thrown asynchronously too, and we don't want to print it
rethrowExitStatus :: ExitCode -> IO a
rethrowExitStatus = throwIO
-- Print all other exceptions
handle :: Exception.SomeException -> IO a
handle se = do
hFlush stdout
pname <- getProgName
hPutStr stderr (wrapText (message pname se))
cont se
message :: String -> Exception.SomeException -> String
message pname (Exception.SomeException se) =
case cast se :: Maybe Exception.IOException of
Just ioe ->
let file = case ioeGetFileName ioe of
Nothing -> ""
Just path -> path ++ location ++ ": "
location = case ioeGetLocation ioe of
l@(n:_) | Char.isDigit n -> ':' : l
_ -> ""
detail = ioeGetErrorString ioe
in pname ++ ": " ++ file ++ detail
Nothing ->
#if __GLASGOW_HASKELL__ < 710
show se
#else
Exception.displayException se
#endif
topHandler :: IO a -> IO a
topHandler prog = topHandlerWith (const $ exitWith (ExitFailure 1)) prog
-- | Non fatal conditions that may be indicative of an error or problem.
--
-- We display these at the 'normal' verbosity level.
--
warn :: Verbosity -> String -> IO ()
warn verbosity msg =
when (verbosity >= normal) $ do
hFlush stdout
hPutStr stderr (wrapText ("Warning: " ++ msg))
-- | Useful status messages.
--
-- We display these at the 'normal' verbosity level.
--
-- This is for the ordinary helpful status messages that users see. Just
-- enough information to know that things are working but not floods of detail.
--
notice :: Verbosity -> String -> IO ()
notice verbosity msg =
when (verbosity >= normal) $
putStr (wrapText msg)
setupMessage :: Verbosity -> String -> PackageIdentifier -> IO ()
setupMessage verbosity msg pkgid =
notice verbosity (msg ++ ' ': display pkgid ++ "...")
-- | More detail on the operation of some action.
--
-- We display these messages when the verbosity level is 'verbose'
--
info :: Verbosity -> String -> IO ()
info verbosity msg =
when (verbosity >= verbose) $
putStr (wrapText msg)
-- | Detailed internal debugging information
--
-- We display these messages when the verbosity level is 'deafening'
--
debug :: Verbosity -> String -> IO ()
debug verbosity msg =
when (verbosity >= deafening) $ do
putStr (wrapText msg)
hFlush stdout
-- | A variant of 'debug' that doesn't perform the automatic line
-- wrapping. Produces better output in some cases.
debugNoWrap :: Verbosity -> String -> IO ()
debugNoWrap verbosity msg =
when (verbosity >= deafening) $ do
putStrLn msg
hFlush stdout
-- | Perform an IO action, catching any IO exceptions and printing an error
-- if one occurs.
chattyTry :: String -- ^ a description of the action we were attempting
-> IO () -- ^ the action itself
-> IO ()
chattyTry desc action =
catchIO action $ \exception ->
putStrLn $ "Error while " ++ desc ++ ": " ++ show exception
-- | Run an IO computation, returning @e@ if it raises a "file
-- does not exist" error.
handleDoesNotExist :: a -> IO a -> IO a
handleDoesNotExist e =
Exception.handleJust
(\ioe -> if isDoesNotExistError ioe then Just ioe else Nothing)
(\_ -> return e)
-- -----------------------------------------------------------------------------
-- Helper functions
-- | Wraps text to the default line width. Existing newlines are preserved.
wrapText :: String -> String
wrapText = unlines
. map (intercalate "\n"
. map unwords
. wrapLine 79
. words)
. lines
-- | Wraps a list of words to a list of lines of words of a particular width.
wrapLine :: Int -> [String] -> [[String]]
wrapLine width = wrap 0 []
where wrap :: Int -> [String] -> [String] -> [[String]]
wrap 0 [] (w:ws)
| length w + 1 > width
= wrap (length w) [w] ws
wrap col line (w:ws)
| col + length w + 1 > width
= reverse line : wrap 0 [] (w:ws)
wrap col line (w:ws)
= let col' = col + length w + 1
in wrap col' (w:line) ws
wrap _ [] [] = []
wrap _ line [] = [reverse line]
-- -----------------------------------------------------------------------------
-- rawSystem variants
maybeExit :: IO ExitCode -> IO ()
maybeExit cmd = do
res <- cmd
unless (res == ExitSuccess) $ exitWith res
printRawCommandAndArgs :: Verbosity -> FilePath -> [String] -> IO ()
printRawCommandAndArgs verbosity path args =
printRawCommandAndArgsAndEnv verbosity path args Nothing
printRawCommandAndArgsAndEnv :: Verbosity
-> FilePath
-> [String]
-> Maybe [(String, String)]
-> IO ()
printRawCommandAndArgsAndEnv verbosity path args menv
| verbosity >= deafening = do
traverse_ (putStrLn . ("Environment: " ++) . show) menv
print (path, args)
| verbosity >= verbose = putStrLn $ showCommandForUser path args
| otherwise = return ()
-- Exit with the same exit code if the subcommand fails
rawSystemExit :: Verbosity -> FilePath -> [String] -> IO ()
rawSystemExit verbosity path args = do
printRawCommandAndArgs verbosity path args
hFlush stdout
exitcode <- rawSystem path args
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
exitWith exitcode
rawSystemExitCode :: Verbosity -> FilePath -> [String] -> IO ExitCode
rawSystemExitCode verbosity path args = do
printRawCommandAndArgs verbosity path args
hFlush stdout
exitcode <- rawSystem path args
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
return exitcode
rawSystemExitWithEnv :: Verbosity
-> FilePath
-> [String]
-> [(String, String)]
-> IO ()
rawSystemExitWithEnv verbosity path args env = do
printRawCommandAndArgsAndEnv verbosity path args (Just env)
hFlush stdout
(_,_,_,ph) <- createProcess $
(Process.proc path args) { Process.env = (Just env)
#ifdef MIN_VERSION_process
#if MIN_VERSION_process(1,2,0)
-- delegate_ctlc has been added in process 1.2, and we still want to be able to
-- bootstrap GHC on systems not having that version
, Process.delegate_ctlc = True
#endif
#endif
}
exitcode <- waitForProcess ph
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
exitWith exitcode
-- Closes the passed in handles before returning.
rawSystemIOWithEnv :: Verbosity
-> FilePath
-> [String]
-> Maybe FilePath -- ^ New working dir or inherit
-> Maybe [(String, String)] -- ^ New environment or inherit
-> Maybe Handle -- ^ stdin
-> Maybe Handle -- ^ stdout
-> Maybe Handle -- ^ stderr
-> IO ExitCode
rawSystemIOWithEnv verbosity path args mcwd menv inp out err = do
(_,_,_,ph) <- createProcessWithEnv verbosity path args mcwd menv
(mbToStd inp) (mbToStd out) (mbToStd err)
exitcode <- waitForProcess ph
unless (exitcode == ExitSuccess) $ do
debug verbosity $ path ++ " returned " ++ show exitcode
return exitcode
where
mbToStd :: Maybe Handle -> Process.StdStream
mbToStd = maybe Process.Inherit Process.UseHandle
createProcessWithEnv ::
Verbosity
-> FilePath
-> [String]
-> Maybe FilePath -- ^ New working dir or inherit
-> Maybe [(String, String)] -- ^ New environment or inherit
-> Process.StdStream -- ^ stdin
-> Process.StdStream -- ^ stdout
-> Process.StdStream -- ^ stderr
-> IO (Maybe Handle, Maybe Handle, Maybe Handle,ProcessHandle)
-- ^ Any handles created for stdin, stdout, or stderr
-- with 'CreateProcess', and a handle to the process.
createProcessWithEnv verbosity path args mcwd menv inp out err = do
printRawCommandAndArgsAndEnv verbosity path args menv
hFlush stdout
(inp', out', err', ph) <- createProcess $
(Process.proc path args) {
Process.cwd = mcwd
, Process.env = menv
, Process.std_in = inp
, Process.std_out = out
, Process.std_err = err
#ifdef MIN_VERSION_process
#if MIN_VERSION_process(1,2,0)
-- delegate_ctlc has been added in process 1.2, and we still want to be able to
-- bootstrap GHC on systems not having that version
, Process.delegate_ctlc = True
#endif
#endif
}
return (inp', out', err', ph)
-- | Run a command and return its output.
--
-- The output is assumed to be text in the locale encoding.
--
rawSystemStdout :: Verbosity -> FilePath -> [String] -> IO String
rawSystemStdout verbosity path args = do
(output, errors, exitCode) <- rawSystemStdInOut verbosity path args
Nothing Nothing
Nothing False
when (exitCode /= ExitSuccess) $
die errors
return output
-- | Run a command and return its output, errors and exit status. Optionally
-- also supply some input. Also provides control over whether the binary/text
-- mode of the input and output.
--
rawSystemStdInOut :: Verbosity
-> FilePath -- ^ Program location
-> [String] -- ^ Arguments
-> Maybe FilePath -- ^ New working dir or inherit
-> Maybe [(String, String)] -- ^ New environment or inherit
-> Maybe (String, Bool) -- ^ input text and binary mode
-> Bool -- ^ output in binary mode
-> IO (String, String, ExitCode) -- ^ output, errors, exit
rawSystemStdInOut verbosity path args mcwd menv input outputBinary = do
printRawCommandAndArgs verbosity path args
Exception.bracket
(runInteractiveProcess path args mcwd menv)
(\(inh,outh,errh,_) -> hClose inh >> hClose outh >> hClose errh)
$ \(inh,outh,errh,pid) -> do
-- output mode depends on what the caller wants
hSetBinaryMode outh outputBinary
-- but the errors are always assumed to be text (in the current locale)
hSetBinaryMode errh False
-- fork off a couple threads to pull on the stderr and stdout
-- so if the process writes to stderr we do not block.
err <- hGetContents errh
out <- hGetContents outh
mv <- newEmptyMVar
let force str = (evaluate (length str) >> return ())
`Exception.finally` putMVar mv ()
--TODO: handle exceptions like text decoding.
_ <- forkIO $ force out
_ <- forkIO $ force err
-- push all the input, if any
case input of
Nothing -> return ()
Just (inputStr, inputBinary) -> do
-- input mode depends on what the caller wants
hSetBinaryMode inh inputBinary
hPutStr inh inputStr
hClose inh
--TODO: this probably fails if the process refuses to consume
-- or if it closes stdin (eg if it exits)
-- wait for both to finish, in either order
takeMVar mv
takeMVar mv
-- wait for the program to terminate
exitcode <- waitForProcess pid
unless (exitcode == ExitSuccess) $
debug verbosity $ path ++ " returned " ++ show exitcode
++ if null err then "" else
" with error message:\n" ++ err
++ case input of
Nothing -> ""
Just ("", _) -> ""
Just (inp, _) -> "\nstdin input:\n" ++ inp
return (out, err, exitcode)
{-# DEPRECATED findProgramLocation
"No longer used within Cabal, try findProgramOnSearchPath" #-}
-- | Look for a program on the path.
findProgramLocation :: Verbosity -> FilePath -> IO (Maybe FilePath)
findProgramLocation verbosity prog = do
debug verbosity $ "searching for " ++ prog ++ " in path."
res <- findExecutable prog
case res of
Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path")
Just path -> debug verbosity ("found " ++ prog ++ " at "++ path)
return res
-- | Look for a program and try to find it's version number. It can accept
-- either an absolute path or the name of a program binary, in which case we
-- will look for the program on the path.
--
findProgramVersion :: String -- ^ version args
-> (String -> String) -- ^ function to select version
-- number from program output
-> Verbosity
-> FilePath -- ^ location
-> IO (Maybe Version)
findProgramVersion versionArg selectVersion verbosity path = do
str <- rawSystemStdout verbosity path [versionArg]
`catchIO` (\_ -> return "")
`catchExit` (\_ -> return "")
let version :: Maybe Version
version = simpleParse (selectVersion str)
case version of
Nothing -> warn verbosity $ "cannot determine version of " ++ path
++ " :\n" ++ show str
Just v -> debug verbosity $ path ++ " is version " ++ display v
return version
-- | Like the Unix xargs program. Useful for when we've got very long command
-- lines that might overflow an OS limit on command line length and so you
-- need to invoke a command multiple times to get all the args in.
--
-- Use it with either of the rawSystem variants above. For example:
--
-- > xargs (32*1024) (rawSystemExit verbosity) prog fixedArgs bigArgs
--
xargs :: Int -> ([String] -> IO ())
-> [String] -> [String] -> IO ()
xargs maxSize rawSystemFun fixedArgs bigArgs =
let fixedArgSize = sum (map length fixedArgs) + length fixedArgs
chunkSize = maxSize - fixedArgSize
in mapM_ (rawSystemFun . (fixedArgs ++)) (chunks chunkSize bigArgs)
where chunks len = unfoldr $ \s ->
if null s then Nothing
else Just (chunk [] len s)
chunk acc _ [] = (reverse acc,[])
chunk acc len (s:ss)
| len' < len = chunk (s:acc) (len-len'-1) ss
| otherwise = (reverse acc, s:ss)
where len' = length s
-- ------------------------------------------------------------
-- * File Utilities
-- ------------------------------------------------------------
----------------
-- Finding files
-- | Find a file by looking in a search path. The file path must match exactly.
--
findFile :: [FilePath] -- ^search locations
-> FilePath -- ^File Name
-> IO FilePath
findFile searchPath fileName =
findFirstFile id
[ path </> fileName
| path <- nub searchPath]
>>= maybe (die $ fileName ++ " doesn't exist") return
-- | Find a file by looking in a search path with one of a list of possible
-- file extensions. The file base name should be given and it will be tried
-- with each of the extensions in each element of the search path.
--
findFileWithExtension :: [String]
-> [FilePath]
-> FilePath
-> IO (Maybe FilePath)
findFileWithExtension extensions searchPath baseName =
findFirstFile id
[ path </> baseName <.> ext
| path <- nub searchPath
, ext <- nub extensions ]
findAllFilesWithExtension :: [String]
-> [FilePath]
-> FilePath
-> IO [FilePath]
findAllFilesWithExtension extensions searchPath basename =
findAllFiles id
[ path </> basename <.> ext
| path <- nub searchPath
, ext <- nub extensions ]
-- | Like 'findFileWithExtension' but returns which element of the search path
-- the file was found in, and the file path relative to that base directory.
--
findFileWithExtension' :: [String]
-> [FilePath]
-> FilePath
-> IO (Maybe (FilePath, FilePath))
findFileWithExtension' extensions searchPath baseName =
findFirstFile (uncurry (</>))
[ (path, baseName <.> ext)
| path <- nub searchPath
, ext <- nub extensions ]
findFirstFile :: (a -> FilePath) -> [a] -> IO (Maybe a)
findFirstFile file = findFirst
where findFirst [] = return Nothing
findFirst (x:xs) = do exists <- doesFileExist (file x)
if exists
then return (Just x)
else findFirst xs
findAllFiles :: (a -> FilePath) -> [a] -> IO [a]
findAllFiles file = filterM (doesFileExist . file)
-- | Finds the files corresponding to a list of Haskell module names.
--
-- As 'findModuleFile' but for a list of module names.
--
findModuleFiles :: [FilePath] -- ^ build prefix (location of objects)
-> [String] -- ^ search suffixes
-> [ModuleName] -- ^ modules
-> IO [(FilePath, FilePath)]
findModuleFiles searchPath extensions moduleNames =
mapM (findModuleFile searchPath extensions) moduleNames
-- | Find the file corresponding to a Haskell module name.
--
-- This is similar to 'findFileWithExtension'' but specialised to a module
-- name. The function fails if the file corresponding to the module is missing.
--
findModuleFile :: [FilePath] -- ^ build prefix (location of objects)
-> [String] -- ^ search suffixes
-> ModuleName -- ^ module
-> IO (FilePath, FilePath)
findModuleFile searchPath extensions mod_name =
maybe notFound return
=<< findFileWithExtension' extensions searchPath
(ModuleName.toFilePath mod_name)
where
notFound = die $ "Error: Could not find module: " ++ display mod_name
++ " with any suffix: " ++ show extensions
++ " in the search path: " ++ show searchPath
-- | List all the files in a directory and all subdirectories.
--
-- The order places files in sub-directories after all the files in their
-- parent directories. The list is generated lazily so is not well defined if
-- the source directory structure changes before the list is used.
--
getDirectoryContentsRecursive :: FilePath -> IO [FilePath]
getDirectoryContentsRecursive topdir = recurseDirectories [""]
where
recurseDirectories :: [FilePath] -> IO [FilePath]
recurseDirectories [] = return []
recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do
(files, dirs') <- collect [] [] =<< getDirectoryContents (topdir </> dir)
files' <- recurseDirectories (dirs' ++ dirs)
return (files ++ files')
where
collect files dirs' [] = return (reverse files
,reverse dirs')
collect files dirs' (entry:entries) | ignore entry
= collect files dirs' entries
collect files dirs' (entry:entries) = do
let dirEntry = dir </> entry
isDirectory <- doesDirectoryExist (topdir </> dirEntry)
if isDirectory
then collect files (dirEntry:dirs') entries
else collect (dirEntry:files) dirs' entries
ignore ['.'] = True
ignore ['.', '.'] = True
ignore _ = False
------------------------
-- Environment variables
-- | Is this directory in the system search path?
isInSearchPath :: FilePath -> IO Bool
isInSearchPath path = fmap (elem path) getSearchPath
addLibraryPath :: OS
-> [FilePath]
-> [(String,String)]
-> [(String,String)]
addLibraryPath os paths = addEnv
where
pathsString = intercalate [searchPathSeparator] paths
ldPath = case os of
OSX -> "DYLD_LIBRARY_PATH"
_ -> "LD_LIBRARY_PATH"
addEnv [] = [(ldPath,pathsString)]
addEnv ((key,value):xs)
| key == ldPath =
if null value
then (key,pathsString):xs
else (key,value ++ (searchPathSeparator:pathsString)):xs
| otherwise = (key,value):addEnv xs
----------------
-- File globbing
data FileGlob
-- | No glob at all, just an ordinary file
= NoGlob FilePath
-- | dir prefix and extension, like @\"foo\/bar\/\*.baz\"@ corresponds to
-- @FileGlob \"foo\/bar\" \".baz\"@
| FileGlob FilePath String
parseFileGlob :: FilePath -> Maybe FileGlob
parseFileGlob filepath = case splitExtensions filepath of
(filepath', ext) -> case splitFileName filepath' of
(dir, "*") | '*' `elem` dir
|| '*' `elem` ext
|| null ext -> Nothing
| null dir -> Just (FileGlob "." ext)
| otherwise -> Just (FileGlob dir ext)
_ | '*' `elem` filepath -> Nothing
| otherwise -> Just (NoGlob filepath)
matchFileGlob :: FilePath -> IO [FilePath]
matchFileGlob = matchDirFileGlob "."
matchDirFileGlob :: FilePath -> FilePath -> IO [FilePath]
matchDirFileGlob dir filepath = case parseFileGlob filepath of
Nothing -> die $ "invalid file glob '" ++ filepath
++ "'. Wildcards '*' are only allowed in place of the file"
++ " name, not in the directory name or file extension."
++ " If a wildcard is used it must be with an file extension."
Just (NoGlob filepath') -> return [filepath']
Just (FileGlob dir' ext) -> do
files <- getDirectoryContents (dir </> dir')
case [ dir' </> file
| file <- files
, let (name, ext') = splitExtensions file
, not (null name) && ext' == ext ] of
[] -> die $ "filepath wildcard '" ++ filepath
++ "' does not match any files."
matches -> return matches
--------------------
-- Modification time
-- | Compare the modification times of two files to see if the first is newer
-- than the second. The first file must exist but the second need not.
-- The expected use case is when the second file is generated using the first.
-- In this use case, if the result is True then the second file is out of date.
--
moreRecentFile :: FilePath -> FilePath -> IO Bool
moreRecentFile a b = do
exists <- doesFileExist b
if not exists
then return True
else do tb <- getModificationTime b
ta <- getModificationTime a
return (ta > tb)
-- | Like 'moreRecentFile', but also checks that the first file exists.
existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool
existsAndIsMoreRecentThan a b = do
exists <- doesFileExist a
if not exists
then return False
else a `moreRecentFile` b
----------------------------------------
-- Copying and installing files and dirs
-- | Same as 'createDirectoryIfMissing' but logs at higher verbosity levels.
--
createDirectoryIfMissingVerbose :: Verbosity
-> Bool -- ^ Create its parents too?
-> FilePath
-> IO ()
createDirectoryIfMissingVerbose verbosity create_parents path0
| create_parents = createDirs (parents path0)
| otherwise = createDirs (take 1 (parents path0))
where
parents = reverse . scanl1 (</>) . splitDirectories . normalise
createDirs [] = return ()
createDirs (dir:[]) = createDir dir throwIO
createDirs (dir:dirs) =
createDir dir $ \_ -> do
createDirs dirs
createDir dir throwIO
createDir :: FilePath -> (IOException -> IO ()) -> IO ()
createDir dir notExistHandler = do
r <- tryIO $ createDirectoryVerbose verbosity dir
case (r :: Either IOException ()) of
Right () -> return ()
Left e
| isDoesNotExistError e -> notExistHandler e
-- createDirectory (and indeed POSIX mkdir) does not distinguish
-- between a dir already existing and a file already existing. So we
-- check for it here. Unfortunately there is a slight race condition
-- here, but we think it is benign. It could report an exception in
-- the case that the dir did exist but another process deletes the
-- directory and creates a file in its place before we can check
-- that the directory did indeed exist.
| isAlreadyExistsError e -> (do
isDir <- doesDirectoryExist dir
if isDir then return ()
else throwIO e
) `catchIO` ((\_ -> return ()) :: IOException -> IO ())
| otherwise -> throwIO e
createDirectoryVerbose :: Verbosity -> FilePath -> IO ()
createDirectoryVerbose verbosity dir = do
info verbosity $ "creating " ++ dir
createDirectory dir
setDirOrdinary dir
-- | Copies a file without copying file permissions. The target file is created
-- with default permissions. Any existing target file is replaced.
--
-- At higher verbosity levels it logs an info message.
--
copyFileVerbose :: Verbosity -> FilePath -> FilePath -> IO ()
copyFileVerbose verbosity src dest = do
info verbosity ("copy " ++ src ++ " to " ++ dest)
copyFile src dest
-- | Install an ordinary file. This is like a file copy but the permissions
-- are set appropriately for an installed file. On Unix it is \"-rw-r--r--\"
-- while on Windows it uses the default permissions for the target directory.
--
installOrdinaryFile :: Verbosity -> FilePath -> FilePath -> IO ()
installOrdinaryFile verbosity src dest = do
info verbosity ("Installing " ++ src ++ " to " ++ dest)
copyOrdinaryFile src dest
-- | Install an executable file. This is like a file copy but the permissions
-- are set appropriately for an installed file. On Unix it is \"-rwxr-xr-x\"
-- while on Windows it uses the default permissions for the target directory.
--
installExecutableFile :: Verbosity -> FilePath -> FilePath -> IO ()
installExecutableFile verbosity src dest = do
info verbosity ("Installing executable " ++ src ++ " to " ++ dest)
copyExecutableFile src dest
-- | Install a file that may or not be executable, preserving permissions.
installMaybeExecutableFile :: Verbosity -> FilePath -> FilePath -> IO ()
installMaybeExecutableFile verbosity src dest = do
perms <- getPermissions src
if (executable perms) --only checks user x bit
then installExecutableFile verbosity src dest
else installOrdinaryFile verbosity src dest
-- | Given a relative path to a file, copy it to the given directory, preserving
-- the relative path and creating the parent directories if needed.
copyFileTo :: Verbosity -> FilePath -> FilePath -> IO ()
copyFileTo verbosity dir file = do
let targetFile = dir </> file
createDirectoryIfMissingVerbose verbosity True (takeDirectory targetFile)
installOrdinaryFile verbosity file targetFile
-- | Common implementation of 'copyFiles', 'installOrdinaryFiles',
-- 'installExecutableFiles' and 'installMaybeExecutableFiles'.
copyFilesWith :: (Verbosity -> FilePath -> FilePath -> IO ())
-> Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO ()
copyFilesWith doCopy verbosity targetDir srcFiles = do
-- Create parent directories for everything
let dirs = map (targetDir </>) . nub . map (takeDirectory . snd) $ srcFiles
mapM_ (createDirectoryIfMissingVerbose verbosity True) dirs
-- Copy all the files
sequence_ [ let src = srcBase </> srcFile
dest = targetDir </> srcFile
in doCopy verbosity src dest
| (srcBase, srcFile) <- srcFiles ]
-- | Copies a bunch of files to a target directory, preserving the directory
-- structure in the target location. The target directories are created if they
-- do not exist.
--
-- The files are identified by a pair of base directory and a path relative to
-- that base. It is only the relative part that is preserved in the
-- destination.
--
-- For example:
--
-- > copyFiles normal "dist/src"
-- > [("", "src/Foo.hs"), ("dist/build/", "src/Bar.hs")]
--
-- This would copy \"src\/Foo.hs\" to \"dist\/src\/src\/Foo.hs\" and
-- copy \"dist\/build\/src\/Bar.hs\" to \"dist\/src\/src\/Bar.hs\".
--
-- This operation is not atomic. Any IO failure during the copy (including any
-- missing source files) leaves the target in an unknown state so it is best to
-- use it with a freshly created directory so that it can be simply deleted if
-- anything goes wrong.
--
copyFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO ()
copyFiles = copyFilesWith copyFileVerbose
-- | This is like 'copyFiles' but uses 'installOrdinaryFile'.
--
installOrdinaryFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO ()
installOrdinaryFiles = copyFilesWith installOrdinaryFile
-- | This is like 'copyFiles' but uses 'installExecutableFile'.
--
installExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)]
-> IO ()
installExecutableFiles = copyFilesWith installExecutableFile
-- | This is like 'copyFiles' but uses 'installMaybeExecutableFile'.
--
installMaybeExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)]
-> IO ()
installMaybeExecutableFiles = copyFilesWith installMaybeExecutableFile
-- | This installs all the files in a directory to a target location,
-- preserving the directory layout. All the files are assumed to be ordinary
-- rather than executable files.
--
installDirectoryContents :: Verbosity -> FilePath -> FilePath -> IO ()
installDirectoryContents verbosity srcDir destDir = do
info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.")
srcFiles <- getDirectoryContentsRecursive srcDir
installOrdinaryFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ]
-- | Recursively copy the contents of one directory to another path.
copyDirectoryRecursive :: Verbosity -> FilePath -> FilePath -> IO ()
copyDirectoryRecursive verbosity srcDir destDir = do
info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.")
srcFiles <- getDirectoryContentsRecursive srcDir
copyFilesWith (const copyFile) verbosity destDir [ (srcDir, f)
| f <- srcFiles ]
-------------------
-- File permissions
-- | Like 'doesFileExist', but also checks that the file is executable.
doesExecutableExist :: FilePath -> IO Bool
doesExecutableExist f = do
exists <- doesFileExist f
if exists
then do perms <- getPermissions f
return (executable perms)
else return False
---------------------------------
-- Deprecated file copy functions
{-# DEPRECATED smartCopySources
"Use findModuleFiles and copyFiles or installOrdinaryFiles" #-}
smartCopySources :: Verbosity -> [FilePath] -> FilePath
-> [ModuleName] -> [String] -> IO ()
smartCopySources verbosity searchPath targetDir moduleNames extensions =
findModuleFiles searchPath extensions moduleNames
>>= copyFiles verbosity targetDir
{-# DEPRECATED copyDirectoryRecursiveVerbose
"You probably want installDirectoryContents instead" #-}
copyDirectoryRecursiveVerbose :: Verbosity -> FilePath -> FilePath -> IO ()
copyDirectoryRecursiveVerbose verbosity srcDir destDir = do
info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.")
srcFiles <- getDirectoryContentsRecursive srcDir
copyFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ]
---------------------------
-- Temporary files and dirs
-- | Advanced options for 'withTempFile' and 'withTempDirectory'.
data TempFileOptions = TempFileOptions {
optKeepTempFiles :: Bool -- ^ Keep temporary files?
}
defaultTempFileOptions :: TempFileOptions
defaultTempFileOptions = TempFileOptions { optKeepTempFiles = False }
-- | Use a temporary filename that doesn't already exist.
--
withTempFile :: FilePath -- ^ Temp dir to create the file in
-> String -- ^ File name template. See 'openTempFile'.
-> (FilePath -> Handle -> IO a) -> IO a
withTempFile tmpDir template action =
withTempFileEx defaultTempFileOptions tmpDir template action
-- | A version of 'withTempFile' that additionally takes a 'TempFileOptions'
-- argument.
withTempFileEx :: TempFileOptions
-> FilePath -- ^ Temp dir to create the file in
-> String -- ^ File name template. See 'openTempFile'.
-> (FilePath -> Handle -> IO a) -> IO a
withTempFileEx opts tmpDir template action =
Exception.bracket
(openTempFile tmpDir template)
(\(name, handle) -> do hClose handle
unless (optKeepTempFiles opts) $
handleDoesNotExist () . removeFile $ name)
(uncurry action)
-- | Create and use a temporary directory.
--
-- Creates a new temporary directory inside the given directory, making use
-- of the template. The temp directory is deleted after use. For example:
--
-- > withTempDirectory verbosity "src" "sdist." $ \tmpDir -> do ...
--
-- The @tmpDir@ will be a new subdirectory of the given directory, e.g.
-- @src/sdist.342@.
--
withTempDirectory :: Verbosity
-> FilePath -> String -> (FilePath -> IO a) -> IO a
withTempDirectory verbosity targetDir template =
withTempDirectoryEx verbosity defaultTempFileOptions targetDir template
-- | A version of 'withTempDirectory' that additionally takes a
-- 'TempFileOptions' argument.
withTempDirectoryEx :: Verbosity
-> TempFileOptions
-> FilePath -> String -> (FilePath -> IO a) -> IO a
withTempDirectoryEx _verbosity opts targetDir template =
Exception.bracket
(createTempDirectory targetDir template)
(unless (optKeepTempFiles opts)
. handleDoesNotExist () . removeDirectoryRecursive)
-----------------------------------
-- Safely reading and writing files
-- | Gets the contents of a file, but guarantee that it gets closed.
--
-- The file is read lazily but if it is not fully consumed by the action then
-- the remaining input is truncated and the file is closed.
--
withFileContents :: FilePath -> (String -> IO a) -> IO a
withFileContents name action =
Exception.bracket (openFile name ReadMode) hClose
(\hnd -> hGetContents hnd >>= action)
-- | Writes a file atomically.
--
-- The file is either written successfully or an IO exception is raised and
-- the original file is left unchanged.
--
-- On windows it is not possible to delete a file that is open by a process.
-- This case will give an IO exception but the atomic property is not affected.
--
writeFileAtomic :: FilePath -> BS.ByteString -> IO ()
writeFileAtomic targetPath content = do
let (targetDir, targetFile) = splitFileName targetPath
Exception.bracketOnError
(openBinaryTempFileWithDefaultPermissions targetDir $ targetFile <.> "tmp")
(\(tmpPath, handle) -> hClose handle >> removeFile tmpPath)
(\(tmpPath, handle) -> do
BS.hPut handle content
hClose handle
renameFile tmpPath targetPath)
-- | Write a file but only if it would have new content. If we would be writing
-- the same as the existing content then leave the file as is so that we do not
-- update the file's modification time.
--
-- NB: the file is assumed to be ASCII-encoded.
rewriteFile :: FilePath -> String -> IO ()
rewriteFile path newContent =
flip catchIO mightNotExist $ do
existingContent <- readFile path
_ <- evaluate (length existingContent)
unless (existingContent == newContent) $
writeFileAtomic path (BS.Char8.pack newContent)
where
mightNotExist e | isDoesNotExistError e = writeFileAtomic path
(BS.Char8.pack newContent)
| otherwise = ioError e
-- | The path name that represents the current directory.
-- In Unix, it's @\".\"@, but this is system-specific.
-- (E.g. AmigaOS uses the empty string @\"\"@ for the current directory.)
currentDir :: FilePath
currentDir = "."
shortRelativePath :: FilePath -> FilePath -> FilePath
shortRelativePath from to =
case dropCommonPrefix (splitDirectories from) (splitDirectories to) of
(stuff, path) -> joinPath (map (const "..") stuff ++ path)
where
dropCommonPrefix :: Eq a => [a] -> [a] -> ([a],[a])
dropCommonPrefix (x:xs) (y:ys)
| x == y = dropCommonPrefix xs ys
dropCommonPrefix xs ys = (xs,ys)
-- | Drop the extension if it's one of 'exeExtensions', or return the path
-- unchanged.
dropExeExtension :: FilePath -> FilePath
dropExeExtension filepath =
case splitExtension filepath of
(filepath', extension) | extension `elem` exeExtensions -> filepath'
| otherwise -> filepath
-- | List of possible executable file extensions on the current platform.
exeExtensions :: [String]
exeExtensions = case buildOS of
-- Possible improvement: on Windows, read the list of extensions from the
-- PATHEXT environment variable. By default PATHEXT is ".com; .exe; .bat;
-- .cmd".
Windows -> ["", "exe"]
Ghcjs -> ["", "exe"]
_ -> [""]
-- ------------------------------------------------------------
-- * Finding the description file
-- ------------------------------------------------------------
-- |Package description file (/pkgname/@.cabal@)
defaultPackageDesc :: Verbosity -> IO FilePath
defaultPackageDesc _verbosity = tryFindPackageDesc currentDir
-- |Find a package description file in the given directory. Looks for
-- @.cabal@ files.
findPackageDesc :: FilePath -- ^Where to look
-> IO (Either String FilePath) -- ^<pkgname>.cabal
findPackageDesc dir
= do files <- getDirectoryContents dir
-- to make sure we do not mistake a ~/.cabal/ dir for a <pkgname>.cabal
-- file we filter to exclude dirs and null base file names:
cabalFiles <- filterM doesFileExist
[ dir </> file
| file <- files
, let (name, ext) = splitExtension file
, not (null name) && ext == ".cabal" ]
case cabalFiles of
[] -> return (Left noDesc)
[cabalFile] -> return (Right cabalFile)
multiple -> return (Left $ multiDesc multiple)
where
noDesc :: String
noDesc = "No cabal file found.\n"
++ "Please create a package description file <pkgname>.cabal"
multiDesc :: [String] -> String
multiDesc l = "Multiple cabal files found.\n"
++ "Please use only one of: "
++ intercalate ", " l
-- |Like 'findPackageDesc', but calls 'die' in case of error.
tryFindPackageDesc :: FilePath -> IO FilePath
tryFindPackageDesc dir = either die return =<< findPackageDesc dir
-- |Optional auxiliary package information file (/pkgname/@.buildinfo@)
defaultHookedPackageDesc :: IO (Maybe FilePath)
defaultHookedPackageDesc = findHookedPackageDesc currentDir
-- |Find auxiliary package information in the given directory.
-- Looks for @.buildinfo@ files.
findHookedPackageDesc
:: FilePath -- ^Directory to search
-> IO (Maybe FilePath) -- ^/dir/@\/@/pkgname/@.buildinfo@, if present
findHookedPackageDesc dir = do
files <- getDirectoryContents dir
buildInfoFiles <- filterM doesFileExist
[ dir </> file
| file <- files
, let (name, ext) = splitExtension file
, not (null name) && ext == buildInfoExt ]
case buildInfoFiles of
[] -> return Nothing
[f] -> return (Just f)
_ -> die ("Multiple files with extension " ++ buildInfoExt)
buildInfoExt :: String
buildInfoExt = ".buildinfo"
-- ------------------------------------------------------------
-- * Unicode stuff
-- ------------------------------------------------------------
-- This is a modification of the UTF8 code from gtk2hs and the
-- utf8-string package.
fromUTF8 :: String -> String
fromUTF8 [] = []
fromUTF8 (c:cs)
| c <= '\x7F' = c : fromUTF8 cs
| c <= '\xBF' = replacementChar : fromUTF8 cs
| c <= '\xDF' = twoBytes c cs
| c <= '\xEF' = moreBytes 3 0x800 cs (ord c .&. 0xF)
| c <= '\xF7' = moreBytes 4 0x10000 cs (ord c .&. 0x7)
| c <= '\xFB' = moreBytes 5 0x200000 cs (ord c .&. 0x3)
| c <= '\xFD' = moreBytes 6 0x4000000 cs (ord c .&. 0x1)
| otherwise = replacementChar : fromUTF8 cs
where
twoBytes c0 (c1:cs')
| ord c1 .&. 0xC0 == 0x80
= let d = ((ord c0 .&. 0x1F) `shiftL` 6)
.|. (ord c1 .&. 0x3F)
in if d >= 0x80
then chr d : fromUTF8 cs'
else replacementChar : fromUTF8 cs'
twoBytes _ cs' = replacementChar : fromUTF8 cs'
moreBytes :: Int -> Int -> [Char] -> Int -> [Char]
moreBytes 1 overlong cs' acc
| overlong <= acc && acc <= 0x10FFFF
&& (acc < 0xD800 || 0xDFFF < acc)
&& (acc < 0xFFFE || 0xFFFF < acc)
= chr acc : fromUTF8 cs'
| otherwise
= replacementChar : fromUTF8 cs'
moreBytes byteCount overlong (cn:cs') acc
| ord cn .&. 0xC0 == 0x80
= moreBytes (byteCount-1) overlong cs'
((acc `shiftL` 6) .|. ord cn .&. 0x3F)
moreBytes _ _ cs' _
= replacementChar : fromUTF8 cs'
replacementChar = '\xfffd'
toUTF8 :: String -> String
toUTF8 [] = []
toUTF8 (c:cs)
| c <= '\x07F' = c
: toUTF8 cs
| c <= '\x7FF' = chr (0xC0 .|. (w `shiftR` 6))
: chr (0x80 .|. (w .&. 0x3F))
: toUTF8 cs
| c <= '\xFFFF'= chr (0xE0 .|. (w `shiftR` 12))
: chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F))
: chr (0x80 .|. (w .&. 0x3F))
: toUTF8 cs
| otherwise = chr (0xf0 .|. (w `shiftR` 18))
: chr (0x80 .|. ((w `shiftR` 12) .&. 0x3F))
: chr (0x80 .|. ((w `shiftR` 6) .&. 0x3F))
: chr (0x80 .|. (w .&. 0x3F))
: toUTF8 cs
where w = ord c
-- | Whether BOM is at the beginning of the input
startsWithBOM :: String -> Bool
startsWithBOM ('\xFEFF':_) = True
startsWithBOM _ = False
-- | Check whether a file has Unicode byte order mark (BOM).
fileHasBOM :: FilePath -> IO Bool
fileHasBOM f = fmap (startsWithBOM . fromUTF8)
. hGetContents =<< openBinaryFile f ReadMode
-- | Ignore a Unicode byte order mark (BOM) at the beginning of the input
--
ignoreBOM :: String -> String
ignoreBOM ('\xFEFF':string) = string
ignoreBOM string = string
-- | Reads a UTF8 encoded text file as a Unicode String
--
-- Reads lazily using ordinary 'readFile'.
--
readUTF8File :: FilePath -> IO String
readUTF8File f = fmap (ignoreBOM . fromUTF8)
. hGetContents =<< openBinaryFile f ReadMode
-- | Reads a UTF8 encoded text file as a Unicode String
--
-- Same behaviour as 'withFileContents'.
--
withUTF8FileContents :: FilePath -> (String -> IO a) -> IO a
withUTF8FileContents name action =
Exception.bracket
(openBinaryFile name ReadMode)
hClose
(\hnd -> hGetContents hnd >>= action . ignoreBOM . fromUTF8)
-- | Writes a Unicode String as a UTF8 encoded text file.
--
-- Uses 'writeFileAtomic', so provides the same guarantees.
--
writeUTF8File :: FilePath -> String -> IO ()
writeUTF8File path = writeFileAtomic path . BS.Char8.pack . toUTF8
-- | Fix different systems silly line ending conventions
normaliseLineEndings :: String -> String
normaliseLineEndings [] = []
normaliseLineEndings ('\r':'\n':s) = '\n' : normaliseLineEndings s -- windows
normaliseLineEndings ('\r':s) = '\n' : normaliseLineEndings s -- old OS X
normaliseLineEndings ( c :s) = c : normaliseLineEndings s
-- ------------------------------------------------------------
-- * Common utils
-- ------------------------------------------------------------
-- | @dropWhileEndLE p@ is equivalent to @reverse . dropWhile p . reverse@, but
-- quite a bit faster. The difference between "Data.List.dropWhileEnd" and this
-- version is that the one in "Data.List" is strict in elements, but spine-lazy,
-- while this one is spine-strict but lazy in elements. That's what @LE@ stands
-- for - "lazy in elements".
--
-- Example:
--
-- @
-- > tail $ Data.List.dropWhileEnd (<3) [undefined, 5, 4, 3, 2, 1]
-- *** Exception: Prelude.undefined
-- > tail $ dropWhileEndLE (<3) [undefined, 5, 4, 3, 2, 1]
-- [5,4,3]
-- > take 3 $ Data.List.dropWhileEnd (<3) [5, 4, 3, 2, 1, undefined]
-- [5,4,3]
-- > take 3 $ dropWhileEndLE (<3) [5, 4, 3, 2, 1, undefined]
-- *** Exception: Prelude.undefined
-- @
dropWhileEndLE :: (a -> Bool) -> [a] -> [a]
dropWhileEndLE p = foldr (\x r -> if null r && p x then [] else x:r) []
-- | @takeWhileEndLE p@ is equivalent to @reverse . takeWhile p . reverse@, but
-- is usually faster (as well as being easier to read).
takeWhileEndLE :: (a -> Bool) -> [a] -> [a]
takeWhileEndLE p = fst . foldr go ([], False)
where
go x (rest, done)
| not done && p x = (x:rest, False)
| otherwise = (rest, True)
-- | Like "Data.List.nub", but has @O(n log n)@ complexity instead of
-- @O(n^2)@. Code for 'ordNub' and 'listUnion' taken from Niklas Hambüchen's
-- <http://github.com/nh2/haskell-ordnub ordnub> package.
ordNub :: (Ord a) => [a] -> [a]
ordNub l = go Set.empty l
where
go _ [] = []
go s (x:xs) = if x `Set.member` s then go s xs
else x : go (Set.insert x s) xs
-- | Like "Data.List.union", but has @O(n log n)@ complexity instead of
-- @O(n^2)@.
listUnion :: (Ord a) => [a] -> [a] -> [a]
listUnion a b = a ++ ordNub (filter (`Set.notMember` aSet) b)
where
aSet = Set.fromList a
-- | A right-biased version of 'ordNub'.
--
-- Example:
--
-- @
-- > ordNub [1,2,1]
-- [1,2]
-- > ordNubRight [1,2,1]
-- [2,1]
-- @
ordNubRight :: (Ord a) => [a] -> [a]
ordNubRight = fst . foldr go ([], Set.empty)
where
go x p@(l, s) = if x `Set.member` s then p
else (x:l, Set.insert x s)
-- | A right-biased version of 'listUnion'.
--
-- Example:
--
-- @
-- > listUnion [1,2,3,4,3] [2,1,1]
-- [1,2,3,4,3]
-- > listUnionRight [1,2,3,4,3] [2,1,1]
-- [4,3,2,1,1]
-- @
listUnionRight :: (Ord a) => [a] -> [a] -> [a]
listUnionRight a b = ordNubRight (filter (`Set.notMember` bSet) a) ++ b
where
bSet = Set.fromList b
-- | A total variant of 'tail'.
safeTail :: [a] -> [a]
safeTail [] = []
safeTail (_:xs) = xs
equating :: Eq a => (b -> a) -> b -> b -> Bool
equating p x y = p x == p y
lowercase :: String -> String
lowercase = map Char.toLower
| gbaz/cabal | Cabal/Distribution/Simple/Utils.hs | bsd-3-clause | 54,720 | 0 | 21 | 14,775 | 11,599 | 6,126 | 5,473 | 903 | 7 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveGeneric, DeriveAnyClass #-}
module Network.Data.OpenFlow.Statistics (
StatsRequest (..)
, TableQuery (..)
, PortQuery (..)
, QueueQuery (..)
, StatsReply (..)
, MoreToFollowFlag
, FlowStats (..)
, AggregateFlowStats (..)
, TableStats (..)
, PortStats (..)
, nullPortStats
, zeroPortStats
, liftIntoPortStats1
, liftIntoPortStats2
, Description (..)
, QueueStats (..)
) where
import Network.Data.OpenFlow.Port
import Network.Data.OpenFlow.Match
import Network.Data.OpenFlow.Action
import Network.Data.OpenFlow.FlowTable
import Control.Monad (liftM,liftM2)
import Data.Aeson.TH
import Data.Int
import Control.DeepSeq (NFData)
import GHC.Generics (Generic)
data StatsRequest
= FlowStatsRequest {
statsRequestMatch :: Match, -- ^fields to match
statsRequestTableID :: TableQuery, -- ^ID of table to read
statsRequestPort :: Maybe PseudoPort -- ^if present, require matching entries to include this as an output port
}
| AggregateFlowStatsRequest {
statsRequestMatch :: Match, -- ^fields to match
statsRequestTableID :: TableQuery, -- ^ID of table to read
statsRequestPort :: Maybe PseudoPort -- ^if present, require matching entries to include this as an output port
}
| TableStatsRequest
| DescriptionRequest
| PortStatsRequest {
portStatsQuery :: PortQuery
}
| QueueStatsRequest { queueStatsPort :: PortQuery, queueStatsQuery:: QueueQuery }
deriving (Show,Eq,Generic,NFData)
data PortQuery = AllPorts | SinglePort PortID deriving (Show,Eq,Ord,Generic,NFData)
data QueueQuery = AllQueues | SingleQueue QueueID deriving (Show,Eq,Ord,Generic,NFData)
data TableQuery = AllTables
| EmergencyTable
| Table FlowTableID
deriving (Show,Eq,Generic,NFData)
data StatsReply
= DescriptionReply Description
| FlowStatsReply !MoreToFollowFlag [FlowStats]
| AggregateFlowStatsReply AggregateFlowStats
| TableStatsReply !MoreToFollowFlag [TableStats]
| PortStatsReply !MoreToFollowFlag [(PortID,PortStats)]
| QueueStatsReply !MoreToFollowFlag [QueueStats]
deriving (Show,Eq,Generic,NFData)
type MoreToFollowFlag = Bool
data Description = Description { manufacturerDesc :: String
, hardwareDesc :: String
, softwareDesc :: String
, serialNumber :: String
, datapathDesc :: String
} deriving (Show,Eq,Generic,NFData)
data AggregateFlowStats =
AggregateFlowStats { aggregateFlowStatsPacketCount :: Integer,
aggregateFlowStatsByteCount :: Integer,
aggregateFlowStatsFlowCount :: Integer
} deriving (Show, Eq,Generic,NFData)
data FlowStats = FlowStats {
flowStatsTableID :: !FlowTableID, -- ^ Table ID of the flow
flowStatsMatch :: Match, -- ^ Match condition of the flow
flowStatsActions :: [Action], -- ^ Actions for the flow
flowStatsPriority :: !Priority, -- ^ Priority of the flow entry (meaningful when the match is not exact).
flowStatsCookie :: !Cookie, -- ^ Cookie associated with the flow.
flowStatsDurationSeconds :: !Int,
flowStatsDurationNanoseconds :: !Int,
flowStatsIdleTimeout :: !Int,
flowStatsHardTimeout :: !Int,
flowStatsPacketCount :: !Int64,
flowStatsByteCount :: !Int64
}
deriving (Show,Eq,Generic,NFData)
data TableStats =
TableStats {
tableStatsTableID :: FlowTableID,
tableStatsTableName :: String,
tableStatsMaxEntries :: Integer,
tableStatsActiveCount :: Integer,
tableStatsLookupCount :: Integer,
tableStatsMatchedCount :: Integer } deriving (Show,Eq,Generic,NFData)
data PortStats
= PortStats {
portStatsReceivedPackets :: Maybe Double,
portStatsSentPackets :: Maybe Double,
portStatsReceivedBytes :: Maybe Double,
portStatsSentBytes :: Maybe Double,
portStatsReceiverDropped :: Maybe Double,
portStatsSenderDropped :: Maybe Double,
portStatsReceiveErrors :: Maybe Double,
portStatsTransmitError :: Maybe Double,
portStatsReceivedFrameErrors :: Maybe Double,
portStatsReceiverOverrunError :: Maybe Double,
portStatsReceiverCRCError :: Maybe Double,
portStatsCollisions :: Maybe Double
} deriving (Show,Eq,Generic,NFData)
-- | A port stats value with all fields missing.
nullPortStats :: PortStats
nullPortStats = PortStats {
portStatsReceivedPackets = Nothing,
portStatsSentPackets = Nothing,
portStatsReceivedBytes = Nothing,
portStatsSentBytes = Nothing,
portStatsReceiverDropped = Nothing,
portStatsSenderDropped = Nothing,
portStatsReceiveErrors = Nothing,
portStatsTransmitError = Nothing,
portStatsReceivedFrameErrors = Nothing,
portStatsReceiverOverrunError = Nothing,
portStatsReceiverCRCError = Nothing,
portStatsCollisions = Nothing
}
-- | A port stats value with all fields present, but set to 0.
zeroPortStats :: PortStats
zeroPortStats =
PortStats {
portStatsReceivedPackets = Just 0,
portStatsSentPackets = Just 0,
portStatsReceivedBytes = Just 0,
portStatsSentBytes = Just 0,
portStatsReceiverDropped = Just 0,
portStatsSenderDropped = Just 0,
portStatsReceiveErrors = Just 0,
portStatsTransmitError = Just 0,
portStatsReceivedFrameErrors = Just 0,
portStatsReceiverOverrunError = Just 0,
portStatsReceiverCRCError = Just 0,
portStatsCollisions = Just 0
}
-- | Lift a unary function and apply to every member of a PortStats record.
liftIntoPortStats1 :: (Double -> Double) -> PortStats -> PortStats
liftIntoPortStats1 f pr1 =
PortStats { portStatsReceivedPackets = liftM f (portStatsReceivedPackets pr1),
portStatsSentPackets = liftM f (portStatsSentPackets pr1),
portStatsReceivedBytes = liftM f (portStatsReceivedBytes pr1),
portStatsSentBytes = liftM f (portStatsSentBytes pr1),
portStatsReceiverDropped = liftM f (portStatsReceiverDropped pr1),
portStatsSenderDropped = liftM f (portStatsSenderDropped pr1),
portStatsReceiveErrors = liftM f (portStatsReceiveErrors pr1),
portStatsTransmitError = liftM f (portStatsTransmitError pr1),
portStatsReceivedFrameErrors = liftM f (portStatsReceivedFrameErrors pr1),
portStatsReceiverOverrunError = liftM f (portStatsReceiverOverrunError pr1),
portStatsReceiverCRCError = liftM f (portStatsReceiverCRCError pr1),
portStatsCollisions = liftM f (portStatsCollisions pr1)
}
-- | Lift a binary function and apply to every member of a PortStats record.
liftIntoPortStats2 :: (Double -> Double -> Double) -> PortStats -> PortStats -> PortStats
liftIntoPortStats2 f pr1 pr2 =
PortStats { portStatsReceivedPackets = liftM2 f (portStatsReceivedPackets pr1) (portStatsReceivedPackets pr2),
portStatsSentPackets = liftM2 f (portStatsSentPackets pr1) (portStatsSentPackets pr2),
portStatsReceivedBytes = liftM2 f (portStatsReceivedBytes pr1) (portStatsReceivedBytes pr2),
portStatsSentBytes = liftM2 f (portStatsSentBytes pr1) (portStatsSentBytes pr2),
portStatsReceiverDropped = liftM2 f (portStatsReceiverDropped pr1) (portStatsReceiverDropped pr2),
portStatsSenderDropped = liftM2 f (portStatsSenderDropped pr1) (portStatsSenderDropped pr2),
portStatsReceiveErrors = liftM2 f (portStatsReceiveErrors pr1) (portStatsReceiveErrors pr2),
portStatsTransmitError = liftM2 f (portStatsTransmitError pr1) (portStatsTransmitError pr2),
portStatsReceivedFrameErrors = liftM2 f (portStatsReceivedFrameErrors pr1) (portStatsReceivedFrameErrors pr2),
portStatsReceiverOverrunError = liftM2 f (portStatsReceiverOverrunError pr1) (portStatsReceiverOverrunError pr2),
portStatsReceiverCRCError = liftM2 f (portStatsReceiverCRCError pr1) (portStatsReceiverCRCError pr2),
portStatsCollisions = liftM2 f (portStatsCollisions pr1) (portStatsCollisions pr2)
}
data QueueStats = QueueStats { queueStatsPortID :: PortID,
queueStatsQueueID :: QueueID,
queueStatsTransmittedBytes :: Integer,
queueStatsTransmittedPackets :: Integer,
queueStatsTransmittedErrors :: Integer } deriving (Show,Eq,Generic,NFData)
$(deriveJSON defaultOptions ''PortStats)
| AndreasVoellmy/nettle-openflow | src/Network/Data/OpenFlow/Statistics.hs | bsd-3-clause | 9,491 | 0 | 9 | 2,784 | 1,757 | 1,012 | 745 | 194 | 1 |
{-# LANGUAGE ViewPatterns, FlexibleContexts #-}
{-| Ganeti lock structure
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Locking.Locks
( GanetiLocks(..)
, lockName
, ClientType(..)
, ClientId(..)
, GanetiLockWaiting
, LockLevel(..)
, lockLevel
) where
import Prelude ()
import Ganeti.Prelude
import Control.Monad ((>=>), liftM)
import Data.List (stripPrefix)
import System.Posix.Types (ProcessID)
import qualified Text.JSON as J
import Ganeti.JSON (readEitherString)
import Ganeti.Locking.Types
import Ganeti.Locking.Waiting
import Ganeti.Types
-- | The type of Locks available in Ganeti. The order of this type
-- is the lock oder.
data GanetiLocks = ClusterLockSet
| BGL
| InstanceLockSet
| Instance String
| NodeGroupLockSet
| NodeGroup String
| NodeLockSet
| Node String
| NodeResLockSet
| NodeRes String
| NetworkLockSet
| Network String
-- | A lock used for a transitional period when WConfd
-- keeps the state of the configuration, but all the
-- operations are still performed on the Python side.
| ConfigLock
deriving (Ord, Eq, Show)
-- | Provide the String representation of a lock
lockName :: GanetiLocks -> String
lockName BGL = "cluster/BGL"
lockName ClusterLockSet = "cluster/[lockset]"
lockName InstanceLockSet = "instance/[lockset]"
lockName (Instance uuid) = "instance/" ++ uuid
lockName NodeGroupLockSet = "nodegroup/[lockset]"
lockName (NodeGroup uuid) = "nodegroup/" ++ uuid
lockName NodeLockSet = "node/[lockset]"
lockName (Node uuid) = "node/" ++ uuid
lockName NodeResLockSet = "node-res/[lockset]"
lockName (NodeRes uuid) = "node-res/" ++ uuid
lockName NetworkLockSet = "network/[lockset]"
lockName (Network uuid) = "network/" ++ uuid
lockName ConfigLock = "cluster/config"
-- | Obtain a lock from its name.
lockFromName :: String -> J.Result GanetiLocks
lockFromName "cluster/BGL" = return BGL
lockFromName "cluster/[lockset]" = return ClusterLockSet
lockFromName "instance/[lockset]" = return InstanceLockSet
lockFromName (stripPrefix "instance/" -> Just uuid) = return $ Instance uuid
lockFromName "nodegroup/[lockset]" = return NodeGroupLockSet
lockFromName (stripPrefix "nodegroup/" -> Just uuid) = return $ NodeGroup uuid
lockFromName "node-res/[lockset]" = return NodeResLockSet
lockFromName (stripPrefix "node-res/" -> Just uuid) = return $ NodeRes uuid
lockFromName "node/[lockset]" = return NodeLockSet
lockFromName (stripPrefix "node/" -> Just uuid) = return $ Node uuid
lockFromName "network/[lockset]" = return NetworkLockSet
lockFromName (stripPrefix "network/" -> Just uuid) = return $ Network uuid
lockFromName "cluster/config" = return ConfigLock
lockFromName n = fail $ "Unknown lock name '" ++ n ++ "'"
instance J.JSON GanetiLocks where
showJSON = J.JSString . J.toJSString . lockName
readJSON = readEitherString >=> lockFromName
-- | The levels, the locks belong to.
data LockLevel = LevelCluster
| LevelInstance
| LevelNodeGroup
| LevelNode
| LevelNodeRes
| LevelNetwork
-- | A transitional level for internal configuration locks
| LevelConfig
deriving (Eq, Show, Enum)
-- | Provide the names of the lock levels.
lockLevelName :: LockLevel -> String
lockLevelName LevelCluster = "cluster"
lockLevelName LevelInstance = "instance"
lockLevelName LevelNodeGroup = "nodegroup"
lockLevelName LevelNode = "node"
lockLevelName LevelNodeRes = "node-res"
lockLevelName LevelNetwork = "network"
lockLevelName LevelConfig = "config"
-- | Obtain a lock level from its name/
lockLevelFromName :: String -> J.Result LockLevel
lockLevelFromName "cluster" = return LevelCluster
lockLevelFromName "instance" = return LevelInstance
lockLevelFromName "nodegroup" = return LevelNodeGroup
lockLevelFromName "node" = return LevelNode
lockLevelFromName "node-res" = return LevelNodeRes
lockLevelFromName "network" = return LevelNetwork
lockLevelFromName "config" = return LevelConfig
lockLevelFromName n = fail $ "Unknown lock-level name '" ++ n ++ "'"
instance J.JSON LockLevel where
showJSON = J.JSString . J.toJSString . lockLevelName
readJSON = readEitherString >=> lockLevelFromName
-- | For a lock, provide its level.
lockLevel :: GanetiLocks -> LockLevel
lockLevel BGL = LevelCluster
lockLevel ClusterLockSet = LevelCluster
lockLevel InstanceLockSet = LevelInstance
lockLevel (Instance _) = LevelInstance
lockLevel NodeGroupLockSet = LevelNodeGroup
lockLevel (NodeGroup _) = LevelNodeGroup
lockLevel NodeLockSet = LevelNode
lockLevel (Node _) = LevelNode
lockLevel NodeResLockSet = LevelNodeRes
lockLevel (NodeRes _) = LevelNodeRes
lockLevel NetworkLockSet = LevelNetwork
lockLevel (Network _) = LevelNetwork
lockLevel ConfigLock = LevelConfig
instance Lock GanetiLocks where
lockImplications BGL = [ClusterLockSet]
lockImplications (Instance _) = [InstanceLockSet]
lockImplications (NodeGroup _) = [NodeGroupLockSet]
lockImplications (NodeRes _) = [NodeResLockSet]
lockImplications (Node _) = [NodeLockSet]
lockImplications (Network _) = [NetworkLockSet]
-- the ConfigLock is idependent of everything, it only synchronizes
-- access to the configuration
lockImplications ConfigLock = []
lockImplications _ = []
-- | Type of entities capable of owning locks. Usually, locks are owned
-- by jobs. However, occassionally other tasks need locks (currently, e.g.,
-- to lock the configuration). These are identified by a unique name,
-- reported to WConfD as a strig.
data ClientType = ClientOther String
| ClientJob JobId
deriving (Ord, Eq, Show)
instance J.JSON ClientType where
showJSON (ClientOther s) = J.showJSON s
showJSON (ClientJob jid) = J.showJSON jid
readJSON (J.JSString s) = J.Ok . ClientOther $ J.fromJSString s
readJSON jids = J.readJSON jids >>= \jid -> J.Ok (ClientJob jid)
-- | A client is identified as a job id, thread id, a path to its process
-- identifier file, and its process id.
--
-- The JobId isn't enough to identify a client as the master daemon
-- also handles client calls that aren't jobs, but which use the configuration.
-- These taks are identified by a unique name, reported to WConfD as a string.
data ClientId = ClientId
{ ciIdentifier :: ClientType
, ciLockFile :: FilePath
, ciPid :: ProcessID
}
deriving (Ord, Eq, Show)
-- | Obtain the ClientID from its JSON representation.
clientIdFromJSON :: J.JSValue -> J.Result ClientId
clientIdFromJSON (J.JSArray [clienttp, J.JSString lf, pid]) =
ClientId <$> J.readJSON clienttp <*> pure (J.fromJSString lf)
<*> liftM fromIntegral (J.readJSON pid :: J.Result Integer)
clientIdFromJSON x = J.Error $ "malformed client id: " ++ show x
instance J.JSON ClientId where
showJSON (ClientId client lf pid)
= J.showJSON (client, lf, fromIntegral pid :: Integer)
readJSON = clientIdFromJSON
-- | The type of lock Allocations in Ganeti. In Ganeti, the owner of
-- locks are jobs.
type GanetiLockWaiting = LockWaiting GanetiLocks ClientId Integer
| leshchevds/ganeti | src/Ganeti/Locking/Locks.hs | bsd-2-clause | 8,541 | 0 | 10 | 1,651 | 1,571 | 831 | 740 | 139 | 1 |
#!/usr/local/bin/env runghc
module Main where
{-
Uninstall.hs - a Haskell uninstaller for Mac OS X
This program is really far too big to be in a single file. However, I
wanted it to be easily distributable and runnable, and so have kept it all
together.
- Mark Lentczner
-}
import Prelude hiding ((.), id)
import Control.Arrow
import Control.Category
import Control.Exception (catch, IOException)
import Control.Monad ((>=>), msum, when)
import Data.Char (isDigit)
import Data.List (foldl', intercalate, isInfixOf, isPrefixOf, nub, sort)
import qualified Data.Map as Map
import Data.Maybe (catMaybes, isJust, mapMaybe)
import System.Console.GetOpt
import System.Directory (doesDirectoryExist, doesFileExist,
getDirectoryContents)
import System.Environment (getArgs, getEnvironment, getProgName)
import System.Exit (exitFailure, exitSuccess)
import System.FilePath ((</>), joinPath, splitDirectories, takeDirectory,
takeFileName)
import System.IO (hPutStrLn, stderr)
import System.Posix.Directory (removeDirectory)
import System.Posix.Files (createSymbolicLink, getSymbolicLinkStatus,
isSymbolicLink, isDirectory, readSymbolicLink, removeLink, rename)
import System.Process (readProcess)
--
-- Utilities
--
-- | Break a list apart into sections separated by a delimiter element
parts :: Eq a => a -> [a] -> [[a]]
parts d s = case break (== d) s of
([], []) -> []
(a, []) -> [a]
(a, (_:b)) -> a : parts d b
-- | Contents of a directory. Like getDirectoryContents, only a) safe, returning
-- [] if there is a problem, and b) excludes "." and ".."
contents :: FilePath -> IO [FilePath]
contents fp =
filter notSpecial `fmap` (getDirectoryContents fp `catchIO` (\_ -> return []))
where
notSpecial :: String -> Bool
notSpecial n = not $ n `elem` [".", ".."]
-- | Entries under a directory. Like contents, but with the dir path prepended.
entries :: FilePath -> IO [FilePath]
entries fp = map (fp </>) `fmap` contents fp
-- | FilePath doesn't start with a dot
notDot :: FilePath -> Bool
notDot = not . ("." `isPrefixOf`) . takeFileName
-- | simplifyPath path, elminiating . and .. components (if possible)
simplifyPath :: FilePath -> FilePath
simplifyPath = joinPath . simp [] . splitDirectories
where
simp ys [] = reverse ys
simp ys ( ".":xs) = simp ys xs
simp (y:ys) ("..":xs)
| y /= ".." = simp ys xs
simp ys ( x:xs) = simp (x:ys) xs
-- | A version of `catch` that catches `IOException`, and hence any exception
-- at all.
catchIO :: IO a -> (IOException -> IO a) -> IO a
catchIO = catch
--
-- Version Numbers
--
data Version = Version [Integer] String
deriving (Eq, Ord)
instance Show Version where
show (Version ns x) = intercalate "." $
map show ns ++ if null x then [] else [x]
version :: String -> Maybe Version
version s = case vparts s of
([], _) -> Nothing
([m], x) | m >= 600 && m < 800 -> Just $ let (a, b) = m `divMod` 100 in
Version [a, b] x
| otherwise -> Nothing
-- some old versions were installed in directories named "610" and "612"
(ns, x) -> Just $ Version ns x
where
vparts s' = case span isDigit s' of
("", x) -> ([], x)
(n, ('.':r)) -> let (m, x) = vparts r in (read n:m, x)
(n, x) -> ([read n], x)
ghcVersion :: String -> Maybe Version
ghcVersion s = case parts '-' s of
("ghc":v:_) -> version v
_ -> Nothing
partVersion :: String -> Maybe Version
partVersion = msum . map version . parts '-'
data VersionTest = VersionAll | VersionOnly Version
| VersionUpto Version | VersionThru Version
deriving (Eq)
versionTest :: VersionTest -> Version -> Bool
versionTest rt = case rt of
VersionAll -> const True
(VersionOnly v) -> (v ==)
(VersionUpto v) -> (v >)
(VersionThru v) -> (v >=)
--
-- Find Arrow: Finding things in the file system
--
-- | A Find takes an annotated FilePath to a list of annotated FilePaths
-- The annotations in and out can differ.
data Find a b = Find { unFind :: (a, FilePath) -> IO [(b, FilePath)] }
instance Category Find where
id = Find $ return . return
fbc . fab = Find $ unFind fab >=> fmap concat . mapM (unFind fbc)
instance Arrow Find where
arr f = Find $ \(a, fp) -> return [(f a, fp)]
first fab = Find $ \((a, x), fp) ->
unFind fab (a, fp) >>= return . map (\(b, fp') -> ((b, x), fp'))
runFind :: Find () a -> IO [(a, FilePath)]
runFind fua = unFind fua ((), "/")
runFinds :: [Find () a] -> IO [(a, FilePath)]
runFinds = fmap concat . mapM runFind
path :: FilePath -> Find a a
path p = Find $ \(a, f) -> return [(a, f </> p)]
star :: Find a a
star = Find $ \(a, fp) -> entries fp >>= return . map (\gp -> (a, gp))
fileTest :: (FilePath -> IO Bool) -> Find a a
fileTest p =
Find $ \(a, fp) -> p fp >>= return . (\b -> if b then [(a, fp)] else [])
fileExtract :: (a -> FilePath -> IO (Maybe b)) -> Find a b
fileExtract p =
Find $ \(a, fp) -> p a fp >>= return . maybe [] (\b -> [(b, fp)])
exists :: Find a a
exists = fileTest $ \fp -> do
dde <- doesDirectoryExist fp
dfe <- doesFileExist fp
return $ dde || dfe
fileExists :: Find a a
fileExists = fileTest doesFileExist
dirExists :: Find a a
dirExists = fileTest doesDirectoryExist
findFilter :: (a -> FilePath -> Maybe b) -> Find a b
findFilter p = Find $ \(a, fp) -> return $ maybe [] (\b -> [(b, fp)]) $ p a fp
test :: (a -> Bool) -> Find a a
test p = findFilter $ \a _fp -> if p a then Just a else Nothing
match :: (FilePath -> Bool) -> Find a a
match p = findFilter $ \a fp -> if p fp then Just a else Nothing
extract :: (FilePath -> Maybe b) -> Find a b
extract p = findFilter $ const p
matches :: (FilePath -> Bool) -> Find a a
matches p = star >>> match (p . takeFileName)
extracts :: (FilePath -> Maybe b) -> Find a b
extracts p = star >>> extract (p . takeFileName)
--
-- Finds for various places where Haskell bits are stored
--
ghcName :: FilePath -> Bool
ghcName = isJust . ghcVersion
-- | Find all the per-version installation directories.
findVersions :: IO (Map.Map Version [FilePath])
findVersions = makeMap `fmap` runFinds
[ path "/Library/Frameworks/GHC.framework/Versions" >>> extracts partVersion
, path "/Library/Frameworks/HaskellPlatform.framework/lib" >>> star >>> extracts ghcVersion
, path "/Library/Haskell" >>> extracts ghcVersion
, path "/Users" >>> star >>> path ".cabal/lib" >>> star >>> extracts ghcVersion
, path "/Users" >>> star >>> path ".ghc" >>> extracts partVersion
, path "/Users" >>> star >>> path "Library/Haskell" >>> extracts ghcVersion
, path "/usr/local/lib" >>> extracts ghcVersion
, path "/usr/local/lib" >>> matches (not . ghcName) >>> extracts ghcVersion
]
where
makeMap :: Ord a => [(a, b)] -> Map.Map a [b]
makeMap = Map.fromListWith (++) . map (\(a, b) -> (a, [b]))
-- | Find all the top level installation directories. Includes some per-version
-- directories where things were stored in common system lib directories.
findAll :: IO [FilePath]
findAll = map snd `fmap` runFinds
[ path "/Library/Frameworks/GHC.framework" >>> exists
, path "/Library/Frameworks/HaskellPlatform.framework" >>> exists
, path "/Library/Haskell" >>> exists
, path "/Users" >>> star >>> path ".cabal" >>> matches (excludePrefix "config")
, path "/Users" >>> star >>> path ".ghc" >>> matches (excludePrefix "ghci")
, path "/Users" >>> star >>> path "Library/Haskell" >>> exists
, path "/usr/local/lib" >>> matches ghcName
, path "/usr/local/lib" >>> matches (not . ghcName) >>> matches ghcName
]
where
excludePrefix :: String -> FilePath -> Bool
excludePrefix p fp = not $ p `isPrefixOf` fp
-- | Find symlinks on the PATH that point into directories that are going to be
-- removed.
findOrphanSymlinks :: [FilePath] -> IO [FilePath]
findOrphanSymlinks removed = do
pathDirs <- (maybe [] (parts ':') . lookup "PATH") `fmap` getEnvironment
let placesToLook =
map path (pathDirs ++ [ "/usr/bin", "/usr/local/bin" ])
++ [ path "/Users" >>> star >>> path "Library/Haskell/bin" ]
(nub . map snd) `fmap` runFinds
(map (\p -> p >>> star >>> sym >>> test orphan) placesToLook)
where
sym :: Find a FilePath
sym = fileExtract $ const $ \fp -> do
st <- getSymbolicLinkStatus fp
if isSymbolicLink st
then (Just . simplifyPath . (takeDirectory fp </>))
`fmap` readSymbolicLink fp
else return Nothing
orphan fp = any (`isPrefixOf` fp) removed
-- | Find all package directories where removing the per-version directory
-- might indicate that the whole package can be removed.
findEmptyPackages :: VersionTest -> IO [(Bool, FilePath)]
findEmptyPackages rt = libVersions >>= fmap catMaybes . mapM willEmpty
where
libVersions = map snd `fmap` runFinds packageFind
packageFind = case rt of
VersionAll -> packagesToAlwaysCheck
_ -> packagesToAlwaysCheck ++ packagesCoveredByAll
packagesToAlwaysCheck =
[ path "/usr/local/lib" >>> matches (not . ghcName) ]
packagesCoveredByAll =
[ path "/Library/Frameworks/HaskellPlatform.framework/lib" >>> star
, path "/Users" >>> star >>> path ".cabal/lib" >>> star
]
willEmpty :: FilePath -> IO (Maybe (Bool, FilePath))
willEmpty fp = do
names <- filter notDot `fmap` contents fp
let ghcVersions = catMaybes $ map ghcVersion names
let removingAll = all (versionTest rt) ghcVersions
let namesLeft = filter (not . willRemove) names
return $ if not (null ghcVersions) && removingAll
then Just (null namesLeft, fp)
else Nothing
willRemove = maybe False (versionTest rt) . ghcVersion
--
-- Program Options
--
data OptRemove = OptDryRun | OptScript | OptRemove
deriving (Eq, Ord)
data Options = Options { optVerbose, optHelp :: Bool,
optRemove :: OptRemove }
optReportRemove :: Options -> Bool
optReportRemove opts = case optRemove opts of
OptDryRun -> True
OptScript -> False
OptRemove -> optVerbose opts
optionsDescr :: [OptDescr (Options -> Options)]
optionsDescr =
[ Option ['v'] ["verbose"] (NoArg setVerbose) "report each path"
, Option ['n'] ["dry-run"] (NoArg setDryRun) "only report what would be removed"
, Option ['s'] ["sh", "script"] (NoArg setScript) "generate a shell script to remove files"
, Option ['r'] ["rm", "remove"] (NoArg setRemove) "actually remove files"
, Option ['?'] ["help"] (NoArg setHelp) "help (this message)"
]
where
setVerbose opts = opts { optVerbose = True }
setDryRun opts = opts { optRemove = OptDryRun }
setScript opts = opts { optRemove = OptScript }
setRemove opts = opts { optRemove = OptRemove }
setHelp opts = opts { optHelp = True }
parseOptions :: [String] -> IO (Options, [String])
parseOptions argv =
case getOpt Permute optionsDescr argv of
(o,n,[] ) -> return (foldl' (flip ($)) defaultOpts o,n)
(_,_,errs) -> usageFailure (concat errs)
where
defaultOpts =
Options { optVerbose = False, optHelp = False,
optRemove = OptDryRun }
progMessage :: String -> IO ()
progMessage msg = do
prog <- getProgName
putStr $ intercalate prog $ parts '$' msg
usage :: IO ()
usage = do
progMessage header
putStr $ usageInfo "Options (can appear anywhere):" optionsDescr
where
header =
"Usage: $ -- find versions on system\n\
\ $ thru VERSION -- remove VERSION and earlier\n\
\ $ only VERSION -- remove only VERSION\n\
\ $ all -- remove all\n\
\NOTE: Commands are 'dry run' by default and don't actually delete.\n"
usageFailure :: String -> IO a
usageFailure msg = do
mapM_ (putStrLn . ("*** " ++)) $ lines msg
usage
exitFailure
message :: Options -> String -> IO ()
message opts str = putStrLn $ messagePrefix ++ str
where
messagePrefix = if (optRemove opts == OptScript) then "echo " else ""
--
-- Primitive File Operations
--
safely :: FilePath -> IO () -> IO ()
safely fp = (`catchIO` (hPutStrLn stderr . fmt . show))
where
fmt msg = "** ERROR "
++ (if fp `isInfixOf` msg then "" else fp ++ ": ") ++ msg
-- | Recursively remove a directory. Like shell command "rm -rf".
-- Unlike System.Directory.removeDirectoryRecursive, doesn't follow symlinks.
removeDirectoryRecursive :: Options -> FilePath -> IO ()
removeDirectoryRecursive opts fp = do
when (optReportRemove opts) $ putStrLn fp
case (optRemove opts) of
OptDryRun -> return ()
OptScript -> putStrLn ("rm -rf " ++ fp)
OptRemove -> rmrf fp
where
rmrf f = do
st <- getSymbolicLinkStatus f
if isDirectory st
then do
entries f >>= mapM_ rmrf
safely f $ removeDirectory f
else
safely f $ removeLink f
-- | Remove a file. Like shell command "rm -f".
-- If file is a symlinks, removes the symlink, not what it points to.
removeFile :: Options -> FilePath -> IO ()
removeFile opts fp = do
when (optReportRemove opts) $ do
st <- getSymbolicLinkStatus fp
if isSymbolicLink st
then readSymbolicLink fp >>= putStrLn . ((fp ++ "@ -> ") ++)
else putStrLn fp
case (optRemove opts) of
OptDryRun -> return ()
OptScript -> putStrLn ("rm -f " ++ fp)
OptRemove -> safely fp $ removeLink fp
-- | Symlink a file. Like shell command "ln -sf".
-- If file is a symlinks, removes the symlink, not what it points to.
symlinkFile :: Options -> FilePath -> FilePath -> IO ()
symlinkFile opts dest fp = do
when (optReportRemove opts) $
putStrLn (fp ++ "@ update to -> " ++ dest)
case (optRemove opts) of
OptDryRun -> return ()
OptScript -> putStrLn ("ln -sf " ++ dest ++ " " ++ fp)
OptRemove -> safely fp $ removeLink fp >> createSymbolicLink dest fp
-- | Archive a file, by giving it a suffix with a unique integer attached
archiveFile :: Options -> String -> FilePath -> IO ()
archiveFile opts suffix fp = do
dest <- findFreeArchive 0
when (optReportRemove opts) $
putStrLn (fp ++ " rename to -> " ++ dest)
case (optRemove opts) of
OptDryRun -> return ()
OptScript -> putStrLn ("mv " ++ fp ++ " " ++ dest)
OptRemove -> safely fp $ rename fp dest
where
findFreeArchive :: Int -> IO FilePath
findFreeArchive n = do
let dest = fp ++ suffix ++ "." ++ show n
dfe <- doesFileExist dest
if dfe
then findFreeArchive (n + 1)
else return dest
-- | For each framework, update the Current symlink if the version it points
-- to will be removed, or remove the whole framework if nothing will be left.
updateFrameworks :: Options -> VersionTest -> IO ()
updateFrameworks opts rt = when (rt /= VersionAll) $
mapM_ updateFramework frameworks
where
frameworks =
[ ("/Library/Frameworks/GHC.framework", "Versions", "Current")
, ("/Library/Haskell", "", "current")
]
updateFramework (fp, vp, cp) = do
items <- contents $ fp </> vp
let remain = filter (willKeep cp) items
let remainVers = reverse . sort . mapMaybe andVersion $ remain
let curr = fp </> vp </> cp
currDest <- readSymbolicLink curr `catchIO` (\_ -> return "")
when (willRemove currDest) $ case (remain, remainVers) of
([], _) -> -- nothing will remain, remove whole framework
removeDirectoryRecursive opts fp
(_, []) -> do -- no versions will remain, but something will
removeFile opts curr
message opts $ "** " ++ fp ++
" is not empty, but has no more versions. Consider removing."
(_, ((_,newDest):_)) -> -- update to maximal remaining version
symlinkFile opts newDest curr
willRemove = maybe False (versionTest rt) . partVersion
willKeep cp fp = notDot fp && (fp /= cp) && (not $ willRemove fp)
andVersion fp = (\v -> (v, fp)) `fmap` partVersion fp
--
-- Main Operations
--
-- | Display versions found
showVersions :: Options -> Map.Map Version [FilePath] -> IO ()
showVersions opts m = do
whenVer blank
mapM_ disp (Map.toAscList m)
progMessage hints
where
whenVer = when (optVerbose opts)
blank = putStrLn ""
disp (v, fp) = do
putStrLn $ show v
whenVer $ do
mapM_ (putStrLn . (" " ++)) $ sort fp
blank
hints =
"-- To remove a version and all earlier: $ thru VERSION\n\
\-- To remove only a single version: $ only VERSION\n\n"
alertOlderVersions :: String -> Map.Map Version [FilePath] -> IO ()
alertOlderVersions appl m = when (not $ Map.null m) $ do
_ <- readProcess "osascript" [] alert
return ()
where
alert = "tell application \"" ++ appl ++ "\"\n\
\\tactivate\n\
\\tdisplay alert \"Older Versions\" message \"" ++ msg ++ "\"\n\
\end tell\n"
msg = "There are older versions of GHC and/or \
\Haskell Platform on this system.\r\
\\r\
\Run the command line tool uninstall-hs to \
\find out which and how to remove them."
-- | Remove file paths and associated other files.
-- Must be supplied the predicate used to select versions to remove so that the
-- associated files can be correctly identified.
remove :: Options -> VersionTest -> [FilePath] -> IO ()
remove opts rt fps = do
case sort fps of
[] -> message opts "** Nothing to remove"
sfps -> do
mapM_ (removeDirectoryRecursive opts) sfps
findOrphanSymlinks fps >>= mapM_ (removeFile opts)
findEmptyPackages rt >>= mapM_ removePackage
updateFrameworks opts rt
removeHints
where
removePackage (empty, fp) = do
if empty
then removeDirectoryRecursive opts fp
else message opts
("** " ++ fp ++
" is not empty, but has no more GHC libs. Consider removing.")
removeHints = when (optRemove opts == OptDryRun) $
putStrLn
"-- To actually remove these files, \
\sudo run the command again with --remove\n\
\-- To generate a script to remove these files, \
\run the command again with --script\n"
-- | Remove all Haskell versions, and the top level directories.
removeAll :: Options -> IO ()
removeAll opts = do
runFind cabalConfigs >>= mapM_ (archiveFile opts ".orig" . snd)
findAll >>= remove opts VersionAll
where
cabalConfigs = path "/Users" >>> star >>> path ".cabal/config" >>> exists
main :: IO ()
main = getArgs >>= parseOptions >>= uncurry main'
main' :: Options -> [String] -> IO ()
main' opts args = do
when (optHelp opts) $ usage >> exitSuccess
case args of
[] -> do
putStrLn "-- Versions found on this system"
findVersionsThat VersionAll >>= showVersions opts
["all"] -> do
removePlan "all Haskell directories"
removeAll opts
["test"] -> do
main' testOpts []
vers <- Map.keys `fmap` findVersions
mapM_ (\v -> main' testOpts ["only", show v]) vers
mapM_ (\v -> main' testOpts ["thru", show v]) vers
main' testOpts ["all"]
["thru", v] -> withVersion v $ \ver -> do
removePlan $ "version " ++ show ver ++ " and earlier"
removeVersionsThat (VersionThru ver)
["only", v] -> withVersion v $ \ver -> do
removePlan $ "just version " ++ show ver
removeVersionsThat (VersionOnly ver)
["install-check", v, a] -> withVersion v $ \ver -> do
findVersionsThat (VersionUpto ver) >>= alertOlderVersions a
_ -> usageFailure "unregcognized args"
where
removePlan s = message opts $ removePrefix ++ s
removePrefix = case optRemove opts of
OptDryRun -> "-- Would remove "
_ -> "-- Removing "
withVersion v a =
maybe (usageFailure "couldn't parse version") a $ version v
findVersionsThat rt =
Map.filterWithKey (const . versionTest rt) `fmap` findVersions
removeVersionsThat rt =
findVersionsThat rt >>= remove opts rt . concat . Map.elems
testOpts = opts { optVerbose = True, optRemove = OptDryRun }
| erantapaa/haskell-platform | hptool/os-extras/osx/bin/uninstall-hs.hs | bsd-3-clause | 20,950 | 7 | 18 | 5,788 | 6,292 | 3,269 | 3,023 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Yesod.Routes.TH.ParseRoute
( -- ** ParseRoute
mkParseRouteInstance
) where
import Yesod.Routes.TH.Types
import Language.Haskell.TH.Syntax
import Data.Text (Text)
import Yesod.Routes.Class
import Yesod.Routes.TH.Dispatch
mkParseRouteInstance :: Cxt -> Type -> [ResourceTree a] -> Q Dec
mkParseRouteInstance cxt typ ress = do
cls <- mkDispatchClause
MkDispatchSettings
{ mdsRunHandler = [|\_ _ x _ -> x|]
, mds404 = [|error "mds404"|]
, mds405 = [|error "mds405"|]
, mdsGetPathInfo = [|fst|]
, mdsMethod = [|error "mdsMethod"|]
, mdsGetHandler = \_ _ -> [|error "mdsGetHandler"|]
, mdsSetPathInfo = [|\p (_, q) -> (p, q)|]
, mdsSubDispatcher = [|\_runHandler _getSub toMaster _env -> fmap toMaster . parseRoute|]
, mdsUnwrapper = return
}
(map removeMethods ress)
helper <- newName "helper"
fixer <- [|(\f x -> f () x) :: (() -> ([Text], [(Text, Text)]) -> Maybe (Route a)) -> ([Text], [(Text, Text)]) -> Maybe (Route a)|]
return $ instanceD cxt (ConT ''ParseRoute `AppT` typ)
[ FunD 'parseRoute $ return $ Clause
[]
(NormalB $ fixer `AppE` VarE helper)
[FunD helper [cls]]
]
where
-- We do this in order to ski the unnecessary method parsing
removeMethods (ResourceLeaf res) = ResourceLeaf $ removeMethodsLeaf res
removeMethods (ResourceParent w x y z) = ResourceParent w x y $ map removeMethods z
removeMethodsLeaf res = res { resourceDispatch = fixDispatch $ resourceDispatch res }
fixDispatch (Methods x _) = Methods x []
fixDispatch x = x
instanceD :: Cxt -> Type -> [Dec] -> Dec
instanceD = InstanceD Nothing
| geraldus/yesod | yesod-core/src/Yesod/Routes/TH/ParseRoute.hs | mit | 1,802 | 0 | 14 | 488 | 438 | 251 | 187 | 37 | 3 |
{-# LANGUAGE OverloadedStrings #-}
-- | Low level XMLHttpRequest support. IE6 and older are not supported.
module Haste.Ajax (Method (..), URL, ajaxRequest, noParams) where
import Haste.Foreign
import Haste.Prim
import Haste.Prim.JSType
import Control.Monad.IO.Class
import Control.Monad (join)
ajaxReq :: Method -- method (GET/POST)
-> JSString -- URL
-> Bool -- async?
-> JSString -- POST data
-> (Maybe JSString -> IO ()) -- callback
-> IO ()
ajaxReq = ffi "(function(method, url, async, postdata, cb) {\
\var xhr = new XMLHttpRequest();\
\xhr.open(method, url, async);\
\if(method == 'POST') {\
\xhr.setRequestHeader('Content-type',\
\'application/x-www-form-urlencoded');\
\}\
\xhr.onreadystatechange = function() {\
\if(xhr.readyState == 4) {\
\cb(xhr.status == 200 ? xhr.responseText : null);\
\}\
\};\
\xhr.send(postdata);})"
data Method = GET | POST deriving Show
instance ToAny Method where
toAny GET = toAny ("GET" :: JSString)
toAny POST = toAny ("POST" :: JSString)
-- | Pass to 'ajaxRequest' instead of @[]@ when no parameters are needed, to
-- avoid type ambiguity errors.
noParams :: [((), ())]
noParams = []
-- | Perform an AJAX request.
ajaxRequest :: (MonadIO m, JSType a, JSType b, JSType c)
=> Method -- ^ GET or POST. For GET, pass all params in URL.
-- For POST, pass all params as post data.
-> URL -- ^ URL to make AJAX request to.
-> [(a, b)] -- ^ A list of (key, value) parameters.
-> (Maybe c -> IO ()) -- ^ Callback to invoke on completion.
-> m ()
ajaxRequest m url kv cb = liftIO $ do
_ <- ajaxReq m url' True pd (cb . join . fmap fromJSString)
return ()
where
url' = case m of
GET
| null kv -> toJSString url
| otherwise -> catJSStr "?" [toJSString url, toQueryString kv]
POST -> toJSString url
pd = case m of
GET -> ""
POST
| null kv -> ""
| otherwise -> toQueryString kv
toQueryString :: (JSType a, JSType b) =>[(a, b)] -> JSString
toQueryString = catJSStr "&" . map f
where f (k, v) = catJSStr "=" [toJSString k,toJSString v]
| beni55/haste-compiler | libraries/haste-lib/src/Haste/Ajax.hs | bsd-3-clause | 2,326 | 0 | 13 | 696 | 527 | 281 | 246 | 42 | 3 |
module HAD.Y2014.M04.D11.Solution where
{- | thirdOfFive
return the third of five arguments
No other interest than pointFree
prop> \(x1, x2, x3, x4, x5) -> thirdOfFive x1 x2 x3 x4 x5 == (x3 :: Int)
thirdOfFive a b c d e = c
thirdOfFive a b c d = const c
thirdOfFive a b c = const $ const c
thirdOfFive a b c = const . const $ c
thirdOfFive a b = const . const
thirdOfFive a = const (const . const)
thirdOfFive = const (const (const . const))
thirdOfFive = const $ const (const . const)
thirdOfFive = const $ const $ const . const
thirdOfFive = const . const $ const . const
-}
thirdOfFive :: a -> b -> c -> d -> e -> c
thirdOfFive = const . const $ const . const . id
| weima/1HAD | exercises/HAD/Y2014/M04/D11/Solution.hs | mit | 715 | 0 | 9 | 188 | 56 | 32 | 24 | 3 | 1 |
module ListClone () where
import Language.Haskell.Liquid.Prelude
make2d :: a -> Int -> Int -> [[a]]
make2d x n m = clone (clone x n) m
clone :: a -> Int -> [a]
clone x n
| n == 0
= []
| otherwise
= x : (clone x (n-1))
-- check [] = [liquidAssertB True]
-- check (xs:xss) = let n = length xs in map (\xs' -> liquidAssertB (length xs' == n)) xss
chk :: [[a]] -> Bool
chk [] = liquidAssertB True
chk (xs:xss) =
case xss of
(xs1:xss1) -> let n = length xs in liquidAssertB (length xs1 == n) && chk xss
[] -> liquidAssertB True
fooL = [[1, 1, 3], [2, 2, 5]]
fooL1 = let f = make2d n0 n1 n2 in f
where n0 = 0
n1 = 2
n2 = 3
propL = chk fooL1
prop = chk fooL
| mightymoose/liquidhaskell | tests/pos/ListLen.hs | bsd-3-clause | 749 | 0 | 14 | 245 | 320 | 170 | 150 | 23 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
module Build
( getDeps
, touchDeps
, touch
, recompDeps
, isNewerThan
, safeReadFile
) where
import Control.Applicative ((<|>), many, (<$>))
import qualified Data.Attoparsec.Text as A
import Data.Char (isSpace, isUpper)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import Control.Exception (SomeException, try, IOException)
import Control.Exception.Lifted (handle)
import Control.Monad (when, filterM, forM, forM_, (>=>))
import Control.Monad.Trans.State (StateT, get, put, execStateT)
import Control.Monad.Trans.Writer (WriterT, tell, execWriterT)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Trans.Class (lift)
import Data.Monoid (Monoid (mappend, mempty))
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified System.Posix.Types
import System.Directory
import System.FilePath (takeExtension, replaceExtension, (</>), takeDirectory,
splitPath, joinPath)
import System.PosixCompat.Files (getFileStatus, setFileTimes,
accessTime, modificationTime)
import Text.Shakespeare (Deref)
import Text.Julius (juliusUsedIdentifiers)
import Text.Cassius (cassiusUsedIdentifiers)
import Text.Lucius (luciusUsedIdentifiers)
safeReadFile :: MonadIO m => FilePath -> m (Either IOException ByteString)
safeReadFile = liftIO . try . S.readFile
touch :: IO ()
touch = do
m <- handle (\(_ :: SomeException) -> return Map.empty) $ readFile touchCache >>= readIO
x <- fmap snd (getDeps [])
m' <- execStateT (execWriterT $ touchDeps id updateFileTime x) m
createDirectoryIfMissing True $ takeDirectory touchCache
writeFile touchCache $ show m'
where
touchCache = "dist/touchCache.txt"
-- | Returns True if any files were touched, otherwise False
recompDeps :: [FilePath] -> StateT (Map.Map FilePath (Set.Set Deref)) IO Bool
recompDeps =
fmap toBool . execWriterT . (liftIO . getDeps >=> touchDeps hiFile removeHi . snd)
where
toBool NoFilesTouched = False
toBool SomeFilesTouched = True
type Deps = Map.Map FilePath ([FilePath], ComparisonType)
getDeps :: [FilePath] -> IO ([FilePath], Deps)
getDeps hsSourceDirs = do
let defSrcDirs = case hsSourceDirs of
[] -> ["."]
ds -> ds
hss <- fmap concat $ mapM findHaskellFiles defSrcDirs
deps' <- mapM determineDeps hss
return $ (hss, fixDeps $ zip hss deps')
data AnyFilesTouched = NoFilesTouched | SomeFilesTouched
instance Monoid AnyFilesTouched where
mempty = NoFilesTouched
mappend NoFilesTouched NoFilesTouched = mempty
mappend _ _ = SomeFilesTouched
touchDeps :: (FilePath -> FilePath) ->
(FilePath -> FilePath -> IO ()) ->
Deps -> WriterT AnyFilesTouched (StateT (Map.Map FilePath (Set.Set Deref)) IO) ()
touchDeps f action deps = (mapM_ go . Map.toList) deps
where
go (x, (ys, ct)) = do
isChanged <- handle (\(_ :: SomeException) -> return True) $ lift $
case ct of
AlwaysOutdated -> return True
CompareUsedIdentifiers getDerefs -> do
derefMap <- get
ebs <- safeReadFile x
let newDerefs =
case ebs of
Left _ -> Set.empty
Right bs -> Set.fromList $ getDerefs $ T.unpack $ decodeUtf8With lenientDecode bs
put $ Map.insert x newDerefs derefMap
case Map.lookup x derefMap of
Just oldDerefs | oldDerefs == newDerefs -> return False
_ -> return True
when isChanged $ forM_ ys $ \y -> do
n <- liftIO $ x `isNewerThan` f y
when n $ do
liftIO $ putStrLn ("Forcing recompile for " ++ y ++ " because of " ++ x)
liftIO $ action x y
tell SomeFilesTouched
-- | remove the .hi files for a .hs file, thereby forcing a recompile
removeHi :: FilePath -> FilePath -> IO ()
removeHi _ hs = mapM_ removeFile' hiFiles
where
removeFile' file = try' (removeFile file) >> return ()
hiFiles = map (\e -> "dist/build" </> removeSrc (replaceExtension hs e))
["hi", "p_hi"]
-- | change file mtime of .hs file to that of the dependency
updateFileTime :: FilePath -> FilePath -> IO ()
updateFileTime x hs = do
(_ , modx) <- getFileStatus' x
(access, _ ) <- getFileStatus' hs
_ <- try' (setFileTimes hs access modx)
return ()
hiFile :: FilePath -> FilePath
hiFile hs = "dist/build" </> removeSrc (replaceExtension hs "hi")
removeSrc :: FilePath -> FilePath
removeSrc f = case splitPath f of
("src/" : xs) -> joinPath xs
_ -> f
try' :: IO x -> IO (Either SomeException x)
try' = try
isNewerThan :: FilePath -> FilePath -> IO Bool
isNewerThan f1 f2 = do
(_, mod1) <- getFileStatus' f1
(_, mod2) <- getFileStatus' f2
return (mod1 > mod2)
getFileStatus' :: FilePath ->
IO (System.Posix.Types.EpochTime, System.Posix.Types.EpochTime)
getFileStatus' fp = do
efs <- try' $ getFileStatus fp
case efs of
Left _ -> return (0, 0)
Right fs -> return (accessTime fs, modificationTime fs)
fixDeps :: [(FilePath, [(ComparisonType, FilePath)])] -> Deps
fixDeps =
Map.unionsWith combine . map go
where
go :: (FilePath, [(ComparisonType, FilePath)]) -> Deps
go (x, ys) = Map.fromList $ map (\(ct, y) -> (y, ([x], ct))) ys
combine (ys1, ct) (ys2, _) = (ys1 `mappend` ys2, ct)
findHaskellFiles :: FilePath -> IO [FilePath]
findHaskellFiles path = do
contents <- getDirectoryContents path
fmap concat $ mapM go contents
where
go ('.':_) = return []
go filename = do
d <- doesDirectoryExist full
if not d
then if isHaskellFile
then return [full]
else return []
else if isHaskellDir
then findHaskellFiles full
else return []
where
-- this could fail on unicode
isHaskellDir = isUpper (head filename)
isHaskellFile = takeExtension filename `elem` watch_files
full = path </> filename
watch_files = [".hs", ".lhs"]
data TempType = StaticFiles FilePath
| Verbatim | Messages FilePath | Hamlet | Widget | Julius | Cassius | Lucius
deriving Show
-- | How to tell if a file is outdated.
data ComparisonType = AlwaysOutdated
| CompareUsedIdentifiers (String -> [Deref])
determineDeps :: FilePath -> IO [(ComparisonType, FilePath)]
determineDeps x = do
y <- safeReadFile x
case y of
Left _ -> return []
Right bs -> do
let z = A.parseOnly (many $ (parser <|> (A.anyChar >> return Nothing)))
$ decodeUtf8With lenientDecode bs
case z of
Left _ -> return []
Right r -> mapM go r >>= filterM (doesFileExist . snd) . concat
where
go (Just (StaticFiles fp, _)) = map ((,) AlwaysOutdated) <$> getFolderContents fp
go (Just (Hamlet, f)) = return [(AlwaysOutdated, f)]
go (Just (Widget, f)) = return
[ (AlwaysOutdated, "templates/" ++ f ++ ".hamlet")
, (CompareUsedIdentifiers $ map fst . juliusUsedIdentifiers, "templates/" ++ f ++ ".julius")
, (CompareUsedIdentifiers $ map fst . luciusUsedIdentifiers, "templates/" ++ f ++ ".lucius")
, (CompareUsedIdentifiers $ map fst . cassiusUsedIdentifiers, "templates/" ++ f ++ ".cassius")
]
go (Just (Julius, f)) = return [(CompareUsedIdentifiers $ map fst . juliusUsedIdentifiers, f)]
go (Just (Cassius, f)) = return [(CompareUsedIdentifiers $ map fst . cassiusUsedIdentifiers, f)]
go (Just (Lucius, f)) = return [(CompareUsedIdentifiers $ map fst . luciusUsedIdentifiers, f)]
go (Just (Verbatim, f)) = return [(AlwaysOutdated, f)]
go (Just (Messages f, _)) = map ((,) AlwaysOutdated) <$> getFolderContents f
go Nothing = return []
parser = do
ty <- (do _ <- A.string "\nstaticFiles \""
x' <- A.many1 $ A.satisfy (/= '"')
return $ StaticFiles x')
<|> (A.string "$(parseRoutesFile " >> return Verbatim)
<|> (A.string "$(hamletFile " >> return Hamlet)
<|> (A.string "$(ihamletFile " >> return Hamlet)
<|> (A.string "$(whamletFile " >> return Hamlet)
<|> (A.string "$(html " >> return Hamlet)
<|> (A.string "$(widgetFile " >> return Widget)
<|> (A.string "$(Settings.hamletFile " >> return Hamlet)
<|> (A.string "$(Settings.widgetFile " >> return Widget)
<|> (A.string "$(juliusFile " >> return Julius)
<|> (A.string "$(cassiusFile " >> return Cassius)
<|> (A.string "$(luciusFile " >> return Lucius)
<|> (A.string "$(persistFile " >> return Verbatim)
<|> (
A.string "$(persistFileWith " >>
A.many1 (A.satisfy (/= '"')) >>
return Verbatim)
<|> (do
_ <- A.string "\nmkMessage \""
A.skipWhile (/= '"')
_ <- A.string "\" \""
x' <- A.many1 $ A.satisfy (/= '"')
_ <- A.string "\" \""
_y <- A.many1 $ A.satisfy (/= '"')
_ <- A.string "\""
return $ Messages x')
case ty of
Messages{} -> return $ Just (ty, "")
StaticFiles{} -> return $ Just (ty, "")
_ -> do
A.skipWhile isSpace
_ <- A.char '"'
y <- A.many1 $ A.satisfy (/= '"')
_ <- A.char '"'
A.skipWhile isSpace
_ <- A.char ')'
return $ Just (ty, y)
getFolderContents :: FilePath -> IO [FilePath]
getFolderContents fp = do
cs <- getDirectoryContents fp
let notHidden ('.':_) = False
notHidden ('t':"mp") = False
notHidden ('f':"ay") = False
notHidden _ = True
fmap concat $ forM (filter notHidden cs) $ \c -> do
let f = fp ++ '/' : c
isFile <- doesFileExist f
if isFile then return [f] else getFolderContents f
| ygale/yesod | yesod-bin/Build.hs | mit | 10,871 | 1 | 30 | 3,426 | 3,442 | 1,783 | 1,659 | -1 | -1 |
main = print . sum . map ((read :: String -> Int) . (:[])) . show $ factorial 100
factorial 1 = 1
factorial n = n * factorial (n - 1)
| nickspinale/euler | complete/020.hs | mit | 135 | 0 | 12 | 33 | 81 | 42 | 39 | 3 | 1 |
{-
We compare the JSON to the version we parsed from the original file, to ensure
the pretty printer doesn't accidentally change syntax.
-}
module Main where
import Data.Aeson (eitherDecodeFileStrict)
import Data.Text.Lazy (pack)
import Data.Text.Lazy.Encoding (encodeUtf8)
import Data.Text.Prettyprint.Doc (Pretty (pretty))
import Language.Scala (Source)
import Language.Scala.Parser.External (runNodeScalametaParsers)
import System.FilePath (dropExtension, takeBaseName, (</>))
import Test.Tasty (TestTree, defaultMain, testGroup)
import Test.Tasty.Golden (findByExtension, goldenVsStringDiff)
main :: IO ()
main =
tests >>= defaultMain
scalametaParserTest :: FilePath -> TestTree
scalametaParserTest f =
goldenVsStringDiff (dropExtension (takeBaseName f)) (\ref new -> ["diff", "-u", ref, new]) f $ do
parsed <- eitherDecodeFileStrict f :: IO (Either String Source)
either error ((encodeUtf8 . pack <$>) . runNodeScalametaParsers . show . pretty) parsed
tests :: IO TestTree
tests =
testGroup "scalameta-parsers" . map scalametaParserTest <$> findByExtension [".json"] ("test" </> "fixtures")
| puffnfresh/language-scala | test/scalameta-parsers/Main.hs | mit | 1,115 | 0 | 14 | 149 | 304 | 174 | 130 | 21 | 1 |
module ProjectEuler.Problem95
( problem
) where
import Data.List
import Data.Ord
import Data.Word
import Control.Monad
import Control.Monad.ST
import qualified Data.IntSet as IS
import qualified Data.IntMap.Strict as IM
import qualified Data.Vector.Unboxed as V
import qualified Data.Vector.Unboxed.Mutable as VM
import ProjectEuler.Types
problem :: Problem
problem = pureProblem 95 Solved result
{-
Some ideas:
- https://en.wikipedia.org/wiki/Sociable_number
- https://en.wikipedia.org/wiki/Aliquot_sequence
Since we want to find chains of values formed by Aliquot sequence,
we can first compute (x, sumOfProperDivisors x) for all values in the search space,
then we restrict keys by collecting all values of `sumOfProperDivisors` (cutting),
and doing this repeatly will eventually get us to a point
that all remaining values of this map is in some cycle and no more cutting
can be made.
Fortunately with 20 seconds, only 117 pairs are remaining,
which is far more managable to search through.
Update: since we are working with a larget number of values,
whose sum of proper divisors are all needed, we can avoid
dealing with each individual ones and just work with an vector to
sort it out - it's actually way faster.
TODO: we don't really need multiple rounds of reduction to get to the
"cycle nodes only" point - the vector should be fast enough to deal with.
-}
maxN :: Int
maxN = 1000000
sumOfProperDivisorsVec :: V.Vector Word32
sumOfProperDivisorsVec = runST $ do
vec <- VM.replicate (maxN+1) 1
VM.write vec 0 0
VM.write vec 1 0
forM_ [2..fromIntegral maxN] $ \i ->
forM_ [i+i,i+i+i..maxN] $ \j ->
VM.modify vec (+ fromIntegral i) j
V.unsafeFreeze vec
loopMapInit :: IM.IntMap Int
loopMapInit = cutClear $ IM.fromList pairs
where
pairs = V.foldl' go [] sumOfProperDivisorsVec
where
go :: [(Int,Int)] -> Word32 -> [(Int,Int)]
go xs val
| val < 2 || val > fromIntegral maxN = xs
| otherwise =
let v = fromIntegral val
in (v, fromIntegral $ V.unsafeIndex sumOfProperDivisorsVec v) : xs
cut :: IM.IntMap Int -> IM.IntMap Int
cut m = IM.restrictKeys m vals
where
vals = IS.fromList $ IM.elems m
cutClear :: IM.IntMap Int -> IM.IntMap Int
cutClear m =
if IM.size m == IM.size m'
then m
else cutClear m'
where
m' = cut m
extractLoop :: IM.IntMap Int -> (IS.IntSet, IM.IntMap Int)
extractLoop m = case IM.minViewWithKey m of
Nothing -> (IS.empty, m)
Just ((k,_), _) ->
let findLoop start cur acc =
let next = m IM.! cur
in if next == start
then acc
else findLoop start next (IS.insert cur acc)
loop = findLoop k k (IS.singleton k)
in (loop, cutClear (IM.withoutKeys m loop))
result :: Int
result = IS.findMin $ head sortedLoopGroups
where
-- sort by descending group size to find the maximum
sortedLoopGroups = sortOn (Down . IS.size) loopGroups
loopGroups :: [IS.IntSet]
loopGroups = unfoldr go loopMapInit
go loopMap =
if IS.null grp
then Nothing
else Just (grp, loopMap')
where
(grp, loopMap') = extractLoop loopMap
| Javran/Project-Euler | src/ProjectEuler/Problem95.hs | mit | 3,234 | 0 | 18 | 786 | 803 | 421 | 382 | 64 | 3 |
{-# LANGUAGE GADTs #-}
module Data.TBinaryTree where
import Data.Interface.TSequence
data TBinTree c x y where
Empty :: TBinTree c x x
Leaf :: c x y -> TBinTree c x y
Node :: TBinTree c x y -> TBinTree c y z -> TBinTree c x z
instance TSequence TBinTree where
tempty = Empty
tsingleton c = Leaf c
(><) = Node
tviewl Empty = TEmptyL
tviewl (Leaf c) = c :| Empty
tviewl (Node (Node l m) r) = tviewl (Node l (Node m r))
tviewl (Node (Leaf c) r) = c :| r
tviewl (Node Empty r) = tviewl r
| atzeus/reflectionwithoutremorse | Data/TBinaryTree.hs | mit | 546 | 0 | 10 | 164 | 232 | 120 | 112 | 16 | 0 |
{- |
Module : Numeric.Information.Model.IT
Description : Information quantities on models
Copyright : (c) Malte Harder
License : MIT
Maintainer : [email protected]
Stability : experimental
Portability : portable
-}
module Numeric.Information.Model.IT
(
-- * Entropy & Mutual Information
nodeEntropy
, nodeCondEntropy
, nodeMutualInfo
, nodeCondMutualInfo
-- * Specific Information
, nodeSpecificInformation
, nodeMinimalInformation
-- * Partial Information Decomposition
-- ** Data Types
, PINode (..)
, PILattice
-- ** Functions
, piDecomp
-- ** Helper Functions
, piLattice
, piSet
, (-<<)
, (-<=)
, subset
-- * Information Flow
, nodeInformationFlow
) where
import Numeric.Information.Model
import Numeric.Information.Distribution
import Numeric.Information.IT
import Data.Graph.Inductive
import Data.List
--import Statistics.Math hiding(log2)
import Numeric.SpecFunctions hiding(log2)
nodeEntropy :: (Floating prob, Ord prob, Ord a)
=> [String]
-> ModelDistribution prob a
-> prob
nodeEntropy v m = (entropy . extract) (marginalize v m)
nodeCondEntropy :: (Floating prob, Ord prob, Ord a)
=> [String]
-> [String]
-> ModelDistribution prob a
-> prob
nodeCondEntropy yv xv m =
let p_ygx = extractC $ conditionalize yv xv m
p_x = extract $ marginalize xv m
in condEntropy p_ygx p_x
nodeMutualInfo :: (Floating prob, Ord prob, Ord a)
=> [String]
-> [String]
-> ModelDistribution prob a
-> prob
nodeMutualInfo yv xv m =
let p_ygx = extractC $ conditionalize yv xv m
p_x = extract $ marginalize xv m
in mutualInfo p_ygx p_x
nodeCondMutualInfo :: (Floating prob, Ord prob, Ord a)
=> [String]
-> [String]
-> [String]
-> ModelDistribution prob a
-> prob
nodeCondMutualInfo yv xv zv m =
let p_xgz = extractC $ conditionalize xv zv m
p_xgyz = \(y,z) -> (extractC $ conditionalize xv (yv++zv) m) $ y ++ z
p_z = extract $ marginalize zv m
p_ygz = extractC $ conditionalize yv zv m
in condMutualInfo p_xgz p_xgyz p_z (p_ygz -|- p_z)
nodeSpecificInformation :: (Floating prob, Ord prob, Ord a)
=> [String]
-> [a]
-> [String]
-> ModelDistribution prob a
-> prob
nodeSpecificInformation sv s av m =
let p_s = (extract $ marginalize sv m) ?= s
p_ags = (extractC $ conditionalize av sv m) s
p_sga = (extractC $ conditionalize sv av m)
in expected p_ags (\a-> log2 ((p_sga a) ?= s / p_s))
nodeMinimalInformation :: (Floating prob, Ord prob, Ord a)
=> [String]
-> [[String]]
-> ModelDistribution prob a
-> prob
nodeMinimalInformation sv avs m =
let p_s = (extract $ marginalize sv m)
in expected p_s (\s -> minimum $
map (\av -> nodeSpecificInformation sv s av m) avs)
nodeMaxMinimalInformation :: (Floating prob, Ord prob, Ord a)
=> [String]
-> [[[String]]]
-> ModelDistribution prob a
-> prob
nodeMaxMinimalInformation sv avss m =
let p_s = (extract $ marginalize sv m)
in expected p_s (\s -> maximum' $
map (\avs -> minimum $
map (\av -> nodeSpecificInformation sv s av m)
avs) avss)
nodeInformationFlow :: (Floating prob, Ord prob, Ord a)
=> [String]
-> [String]
-> Model prob a
-> prob
nodeInformationFlow as bs m = undefined
maximum' [] = 0
maximum' xs = maximum xs
data PINode prob a = PINode { partial :: [[a]]
, value :: prob
} deriving (Eq)
instance (Show a, Show prob) => Show (PINode prob a) where
show n = (intercalate "," $
map (\xs -> (intercalate " & " (map (trim . show . show) xs))) $ partial n)
++ " = "++ (show $ value n)
trim = reverse . tail . reverse . tail
type PILattice prob a = Gr (PINode prob a) ()
piDecomp :: (Floating prob, Ord prob, Ord a)
=> [String]
-> [String]
-> ModelDistribution prob a
-> PILattice prob String
piDecomp s as m =
let g = piLattice as
g' = nmap minimalInformation g
in gmap (subtractLower g') g'
where minimalInformation piN =
piN { value = (nodeMinimalInformation s (partial piN) m)}
subtractLower g' (inA, n, piN, outA) =
(inA,
n,
piN { value = (value piN) -
(nodeMaxMinimalInformation s
(map (partial . snd) $ filter
(\(n',piN') -> (n',n) `elem` (edges g') ) $ labNodes g')
m )} ,
outA)
piLattice :: (Eq a, Fractional prob) => [a] -> PILattice prob a
piLattice modelNodes =
let nodes = autoLabel $ map (\n -> PINode { partial = n, value = 0 })
$ piSet modelNodes
size = (length nodes)-1
edges = [ (i,j,()) | i <- [0..size], j <- [0..size],
i /= j && -- Reflexive reduction
(partial $ snd $ nodes !! i) -<= (partial $ snd $ nodes !! j) ]
compositionEdges = [ (i,k,()) | (i,j,()) <- edges, k <- [0..size],
(j,k,()) `elem` edges]
reducedEdges = edges \\ compositionEdges
in mkGraph nodes reducedEdges
piSet :: (Eq a) => [a] -> [[[a]]]
piSet r = [ a | a <- (powerset' . powerset') r,
(null [ a | x <- a, y <- a,
x /= y &&
(length x <= length y) && (subset x y)])]
muSet :: (Eq a) => Int -> [a] -> [([[a]], Double)]
muSet k ls =
let r = piSet ls
p = powerset' ls
n = length ls
in map (\beta -> (beta, (fromIntegral n)/(fromIntegral k) * (esoohc n k) * (fromIntegral $ length $
filter (\a -> length a == k && beta -<= [a]) p) - 1 ) ) r
muSet' :: (Eq a) => [a] -> [([[a]], Double)]
muSet' ls =
let r = piSet ls
p = powerset' ls
n = length ls
in map (\beta ->
(beta,
(sum [ (esoohc n k) * ( fromIntegral
$ (length $ filter
(\a -> length a == k && beta -<= [a]) p)) - (fromIntegral k)/(fromIntegral n)
| k <- [1 .. (n-1)] ] ) ) ) r
esoohc :: Int -> Int -> Double
esoohc n k = 1.0 / (n `choose` k)
(-<<) :: (Eq a) => [[a]] -> [[a]] -> Bool
(-<<) a b = (a -<= b) && (a /= b)
(-<=) :: (Eq a) => [[a]] -> [[a]] -> Bool
(-<=) a b = foldr q True b
where q bel cur = cur && (not . null $ filter (\ael -> subset ael bel) a )
subset :: (Eq a) => [a] -> [a] -> Bool
subset a b = foldr q True a
where q el cur = cur && (elem el b)
powerset' :: [a] -> [[a]]
powerset' = tail . powerset
powerset :: [a] -> [[a]]
powerset [] = [[]]
powerset (x:xs) = xss /\/ map (x:) xss
where xss = powerset xs
(/\/) :: [a] -> [a] -> [a]
[] /\/ ys = ys
(x:xs) /\/ ys = x : (ys /\/ xs) | mahrz/hit | src/Numeric/Information/Model/IT.hs | mit | 7,597 | 0 | 27 | 2,891 | 2,755 | 1,486 | 1,269 | 180 | 1 |
{-# LANGUAGE TupleSections #-}
module SparseMatrix (
pageRank
) where
import Data.Graph (Graph, Edge, graphFromEdges, edges, outdegree)
import Data.Array ((!), range, bounds)
import qualified Numeric.LinearAlgebra as M
import Numeric.LinearAlgebra ((!#>), tr, mkSparse, cmap, size, konst)
import Numeric.Natural (Natural)
import qualified Data.Map.Strict as Map
import Data.Map.Strict (keys, mapWithKey)
type RealNum = Double
type Vector = M.Vector RealNum
type Matrix = M.GMatrix
type Map a b = Map.Map a b
pageRank :: (Ord a) => RealNum -> Natural -> Map a [a] -> Map a RealNum
pageRank damp its as = Map.fromList . zip (keys as) . M.toList . pageRank' damp its $ as
pageRank' :: (Ord a) => RealNum -> Natural -> Map a [a] -> Vector
pageRank' damp its m =
(!! fromIntegral its)
. flip iterate (uniform . length $ m) . (.) (dampen damp) . (!#>)
. pageRankMatrix $ m
uniform :: Int -> Vector
uniform n = konst (recip . fromIntegral $ n) n
dampen :: RealNum -> Vector -> Vector
dampen factor v = cmap ((+) ((1-factor)/(fromIntegral . size $ v)) . (*) factor) v
pageRankMatrix :: (Ord a) => Map a [a] -> Matrix
pageRankMatrix = adjacency . normalize . unstick . connectionGraph
normalize :: Graph -> Map Edge RealNum
normalize g = mapWithKey (\ (f,_) x -> x / out f) . Map.fromAscList . fmap (,1) . edges $ g
where
out = fromIntegral . (outdegree g !)
adjacency :: Map Edge RealNum -> Matrix
adjacency = tr . mkSparse . Map.toList
connectionGraph :: (Ord a) => Map a [a] -> Graph
connectionGraph = (\ (x,_,_) -> x) . graphFromEdges . fmap (\ (x,xs) -> (x,x,xs)) . Map.toList
unstick :: Graph -> Graph
unstick g = fmap (\ l -> if null l then (range . bounds) g else l) g
| Magnap/pagerank-hs | src/SparseMatrix.hs | mit | 1,716 | 0 | 13 | 345 | 750 | 418 | 332 | 36 | 2 |
{-# htermination digitToInt :: Char -> Int #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_digitToInt_1.hs | mit | 47 | 0 | 2 | 8 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Y2018.M01.D08.Solution where
{--
Okay, one more, then a thought on generalization.
Friday, we looked at sections classifying newspaper articles as a graph, but
now we want to store unique sections in a table and relate those sections back
to the articles they classify.
Like before. So.
--}
import Control.Monad (void)
import Control.Monad.State
import qualified Data.Map as Map
import qualified Data.Set as Set
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.FromRow
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.ToRow
-- below imports available via 1HaskellADay git repository
import qualified Data.MemoizingTable as MT
import Store.SQL.Connection (withConnection)
import Store.SQL.Util.Indexed
import Store.SQL.Util.Pivots
import Y2017.M12.D20.Solution -- for Block
import Y2017.M12.D27.Solution hiding (pa) -- for DatedArticle
import Y2017.M12.D29.Solution hiding (etl) -- for filtering out AP articles
import Y2018.M01.D02.Solution hiding (storeAncilliary) -- for keywords and etl
import Y2018.M01.D04.Solution hiding (storeAncilliary) -- for Author
-- Okay, sections are like subjects are like keywords, but not quite like
-- authors (not quite). In Prolog we call a simple value with typing information
-- a tagged-type. We don't have those here, but maybe we should?
-- first thought on generalization
data Section = Sect { section :: String }
deriving (Eq, Ord, Show)
fetchSectionStmt :: Query
fetchSectionStmt = [sql|SELECT * FROM section|]
fetchSections :: Connection -> IO [IxValue Section]
fetchSections conn = query_ conn fetchSectionStmt
instance FromRow Section where
fromRow = Sect <$> field
-- okay, we can get sections previously stored. Fetch them and populate a
-- memoizing table with them.
-- Now, from an article, extract its sections. That's easy enough.
-- Now we put that all together and populate the section table with new
-- section information
storeSections :: Connection -> [IxValue (DatedArticle Authors)] -> IO ()
storeSections conn =
-- we write our storeSections using the memoizeStore from the bonus:
memoizeStore conn fetchSections insertSections insertArtSectPivotStmt
(map Sect . sections)
{--
fetchSections conn >>= \presects ->
let memtable = MT.start (map ix2tup presects)
(ids,arts) = unzip (map ix2tup ixarts)
stat = execState (zipWithM_ MT.triageM ids (map sectionf arts))
(memtable,Map.empty)
substate = Set.toList (MT.newValues (fst stat))
in insertSections conn substate >>= \ixsec ->
let table = MT.update (zip (map idx ixsec) substate) (fst stat)
in void (executeMany conn insertArtSectPivotStmt
(evalState buildPivots (table, snd stat)))
where sectionf = map Sect . sections
--}
insertSectionStmt :: Query
insertSectionStmt = [sql|INSERT INTO section (section) VALUES (?) returning id|]
insertSections :: Connection -> [Section] -> IO [Index]
insertSections conn = returning conn insertSectionStmt
insertArtSectPivotStmt :: Query
insertArtSectPivotStmt =
[sql|INSERT INTO article_section (article_id,section_id) VALUES (?,?)|]
-- hint: use Pivots module for pivot inserts
instance ToRow Section where
toRow (Sect sect) = [toField sect]
-- with storeSections defined, define a new storeAncilliary to extract and
-- store values for keywords, authors and sections.
storeAncilliary :: Connection -> [IxValue (DatedArticle Authors)] -> IO ()
storeAncilliary conn arts =
storeSubjects conn arts >>
storeAuthors conn arts >>
storeSections conn arts
-- and apply this storeAncilliary to the etl function, and you've captured
-- the information we're interested in with the articles sampled.
-- Hint: use pa to process the articles
{--
>>> withConnection (flip (etl pa storeAncilliary) sample)
There are 72 sections:
$ select * from section LIMIT 10;
id section
1 business/banking
2 business/field-notes
3 business/stocks
4 entertainment/arts
5 entertainment/music
6 entertainment/tv/larry-bonko
7 inside-business/calendar
8 inside-business/news/columns
9 inside-business/news/economic-development
10 inside-business/news/first-person-features
There are 125 article-section pivots
$ select * from article_section LIMIT 10;
id article_id section_10
1 1 68
2 2 30
3 3 38
4 4 56
5 5 51
6 6 57
7 7 58
8 8 38
9 9 33
10 9 34
--}
{-- BONUS -----------------------------------------------------------------
Keywords, Authors, and sections. All three follow the same pattern:
* fetch previously stored values
* memoize
* materials values from article set, see which values are new
* store new values, relate all values.
If we are following the same pattern, is there a generalization of the
functions for all of this, so we can have one function that does the work
for any memoize-y type? What would this function look like?
memoizeStore :: ToRow a => Ord a =>
Connection
-> (Connection -> IO [IxValue a])
-> (Connection -> [a] -> IO [Index])
-> Query
-> (DatedArticle Authors -> [a])
-> [IxValue (DatedArticle Authors)]
-> IO ()
memoizeStore conn fetcher storer pivotQuery getter ixarts =
-- so, 1. we need to fetch currently-stored values and start the MemoizingTable
fetcher conn >>= \ixvals ->
let memtable = MT.start (map ix2tup ixvals)
(ids,arts) = unzip (map ix2tup ixarts)
stat = execState (zipWithM_ MT.triageM ids (map getter arts))
(memtable,Map.empty)
substate = Set.toList (MT.newValues (fst stat))
in storer conn substate >>= \ixnewvals ->
let table = MT.update (zip (map idx ixnewvals) substate) (fst stat)
in void (executeMany conn pivotQuery
(evalState buildPivots (table, snd stat)))
-- AHA! IT WORKED! Moving this to the Pivots module after this exercise.
--}
| geophf/1HaskellADay | exercises/HAD/Y2018/M01/D08/Solution.hs | mit | 6,048 | 0 | 10 | 1,189 | 498 | 305 | 193 | 46 | 1 |
-- | Perform an actual build, generate a binary package database and a
-- documentation directory in the process.
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Stackage.PerformBuild
( performBuild
, PerformBuild (..)
, BuildException (..)
, pbDocDir
) where
import Control.Concurrent.Async (async)
import Control.Concurrent.STM.TSem
import Control.Monad.Writer.Strict (execWriter, tell)
import qualified Data.Map as Map
import Data.NonNull (fromNullable)
import Filesystem (canonicalizePath, createTree,
getWorkingDirectory, isDirectory,
removeTree, rename)
import Filesystem.Path (parent)
import qualified Filesystem.Path as F
import Stackage.BuildConstraints
import Stackage.BuildPlan
import Stackage.PackageDescription
import Stackage.Prelude hiding (pi)
import System.Directory (findExecutable)
import System.Environment (getEnvironment)
import System.IO (IOMode (WriteMode),
withBinaryFile)
import System.IO.Temp (withSystemTempDirectory)
data BuildException = BuildException (Map PackageName BuildFailure) [Text]
deriving Typeable
instance Exception BuildException
instance Show BuildException where
show (BuildException m warnings) =
unlines $ map go (mapToList m) ++ map unpack warnings
where
go (PackageName name, bf) = concat
[ name
, ": "
, show bf
]
data BuildFailure = DependencyFailed PackageName
| DependencyMissing PackageName
| ToolMissing ExeName
| NotImplemented
| BuildFailureException SomeException
deriving (Show, Typeable)
instance Exception BuildFailure
data PerformBuild = PerformBuild
{ pbPlan :: BuildPlan
, pbInstallDest :: FilePath
, pbLog :: ByteString -> IO ()
, pbLogDir :: FilePath
, pbJobs :: Int
, pbGlobalInstall :: Bool
-- ^ Register packages in the global database
, pbEnableTests :: Bool
, pbEnableLibProfiling :: Bool
, pbVerbose :: Bool
, pbAllowNewer :: Bool
-- ^ Pass --allow-newer to cabal configure
}
data PackageInfo = PackageInfo
{ piPlan :: PackagePlan
, piName :: PackageName
, piResult :: TMVar Bool
}
waitForDeps :: Map ExeName (Set PackageName)
-> Map PackageName PackageInfo
-> Set Component
-> BuildPlan
-> PackageInfo
-> IO a
-> IO a
waitForDeps toolMap packageMap activeComps bp pi action = do
atomically $ do
mapM_ checkPackage $ Map.keys $ filterUnused $ sdPackages $ ppDesc $ piPlan pi
forM_ (Map.keys $ filterUnused $ sdTools $ ppDesc $ piPlan pi) $ \exe -> do
case lookup exe toolMap >>= fromNullable . map checkPackage . setToList of
Nothing
| isCoreExe exe -> return ()
-- https://github.com/jgm/zip-archive/issues/23
-- | otherwise -> throwSTM $ ToolMissing exe
| otherwise -> return ()
Just packages -> ofoldl1' (<|>) packages
action
where
filterUnused :: Ord key => Map key DepInfo -> Map key DepInfo
filterUnused =
mapFromList . filter (go . snd) . mapToList
where
go = not . null . intersection activeComps . diComponents
checkPackage package | package == piName pi = return ()
checkPackage package =
case lookup package packageMap of
Nothing
| isCore package -> return ()
| otherwise -> throwSTM $ DependencyMissing package
Just dep -> do
res <- readTMVar $ piResult dep
unless res $ throwSTM $ DependencyFailed package
isCore = (`member` siCorePackages (bpSystemInfo bp))
isCoreExe = (`member` siCoreExecutables (bpSystemInfo bp))
withCounter :: TVar Int -> IO a -> IO a
withCounter counter = bracket_
(atomically $ modifyTVar counter (+ 1))
(atomically $ modifyTVar counter (subtract 1))
withTSem :: TSem -> IO a -> IO a
withTSem sem = bracket_ (atomically $ waitTSem sem) (atomically $ signalTSem sem)
-- | Returns @Nothing@ if installing to a global database
pbDatabase :: PerformBuild -> Maybe FilePath
pbDatabase pb
| pbGlobalInstall pb = Nothing
| otherwise = Just $ pbInstallDest pb </> "pkgdb"
pbBinDir, pbLibDir, pbDataDir, pbDocDir :: PerformBuild -> FilePath
pbBinDir pb = pbInstallDest pb </> "bin"
pbLibDir pb = pbInstallDest pb </> "lib"
pbDataDir pb = pbInstallDest pb </> "share"
pbDocDir pb = pbInstallDest pb </> "doc"
performBuild :: PerformBuild -> IO [Text]
performBuild pb = do
cwd <- getWorkingDirectory
performBuild' pb
{ pbInstallDest = cwd </> pbInstallDest pb
, pbLogDir = cwd </> pbLogDir pb
}
performBuild' :: PerformBuild -> IO [Text]
performBuild' pb@PerformBuild {..} = withBuildDir $ \builddir -> do
-- First make sure to fetch all of the dependencies... just in case Hackage
-- has an outage. Don't feel like wasting hours of CPU time.
pbLog $ encodeUtf8 "Pre-fetching all packages\n"
let toDownload = flip map (mapToList $ bpPackages pbPlan)
$ \(name, plan) -> unpack $ concat
[ display name
, "-"
, display $ ppVersion plan
]
withCheckedProcess
(proc "cabal"
$ "fetch"
: "--no-dependencies"
: toDownload)
$ \ClosedStream Inherited Inherited -> return ()
let removeTree' fp = whenM (isDirectory fp) (removeTree fp)
mapM_ removeTree' [pbInstallDest, pbLogDir]
forM_ (pbDatabase pb) $ \db -> do
createTree $ parent db
withCheckedProcess (proc "ghc-pkg" ["init", fpToString db])
$ \ClosedStream Inherited Inherited -> return ()
pbLog $ encodeUtf8 "Copying built-in Haddocks\n"
copyBuiltInHaddocks (pbDocDir pb)
sem <- atomically $ newTSem pbJobs
active <- newTVarIO (0 :: Int)
let toolMap = makeToolMap $ bpPackages pbPlan
packageMap <- fmap fold $ forM (mapToList $ bpPackages pbPlan)
$ \(name, plan) -> do
let piPlan = plan
piName = name
piResult <- newEmptyTMVarIO
return $ singletonMap name PackageInfo {..}
errsVar <- newTVarIO mempty
warningsVar <- newTVarIO id
mutex <- newMVar ()
env <- getEnvironment
haddockFiles <- newTVarIO mempty
forM_ packageMap $ \pi -> void $ async $ singleBuild pb SingleBuild
{ sbSem = sem
, sbErrsVar = errsVar
, sbWarningsVar = warningsVar
, sbActive = active
, sbToolMap = toolMap
, sbPackageMap = packageMap
, sbBuildDir = builddir
, sbPackageInfo = pi
, sbRegisterMutex = mutex
, sbModifiedEnv = maybe
id
(\db -> (("HASKELL_PACKAGE_SANDBOX", fpToString db):))
(pbDatabase pb)
(filter allowedEnv $ map fixEnv env)
, sbHaddockFiles = haddockFiles
}
void $ tryAny $ atomically $ readTVar active >>= checkSTM . (== 0)
warnings <- ($ []) <$> readTVarIO warningsVar
errs <- readTVarIO errsVar
when (not $ null errs) $ throwM $ BuildException errs warnings
return warnings
where
withBuildDir f = withSystemTempDirectory "stackage-build" (f . fpFromString)
fixEnv (p, x)
-- Thank you Windows having case-insensitive environment variables...
| toUpper p == "PATH" = (p, fpToString (pbBinDir pb) ++ pathSep : x)
| otherwise = (p, x)
allowedEnv (k, _) = k `notMember` bannedEnvs
-- | Separate for the PATH environment variable
pathSep :: Char
#ifdef mingw32_HOST_OS
pathSep = ';'
#else
pathSep = ':'
#endif
-- | Environment variables we don't allow to be passed on to child processes.
bannedEnvs :: Set String
bannedEnvs = setFromList
[ "STACKAGE_AUTH_TOKEN"
]
data SingleBuild = SingleBuild
{ sbSem :: TSem
, sbErrsVar :: TVar (Map PackageName BuildFailure)
, sbWarningsVar :: TVar ([Text] -> [Text])
, sbActive :: TVar Int
, sbToolMap :: Map ExeName (Set PackageName)
, sbPackageMap :: Map PackageName PackageInfo
, sbBuildDir :: FilePath
, sbPackageInfo :: PackageInfo
, sbRegisterMutex :: MVar ()
, sbModifiedEnv :: [(String, String)]
, sbHaddockFiles :: TVar (Map Text FilePath) -- ^ package-version, .haddock file
}
singleBuild :: PerformBuild -> SingleBuild -> IO ()
singleBuild pb@PerformBuild {..} SingleBuild {..} =
withCounter sbActive
$ handle updateErrs
$ (`finally` void (atomically $ tryPutTMVar (piResult sbPackageInfo) False))
$ inner
where
libComps = setFromList [CompLibrary, CompExecutable]
testComps = insertSet CompTestSuite libComps
inner = do
let wfd comps =
waitForDeps sbToolMap sbPackageMap comps pbPlan sbPackageInfo
. withTSem sbSem
wfd libComps buildLibrary
wfd testComps runTests
name = display $ piName sbPackageInfo
namever = concat
[ name
, "-"
, display $ ppVersion $ piPlan sbPackageInfo
]
runIn wdir outH cmd args =
withCheckedProcess cp $ \ClosedStream UseProvidedHandle UseProvidedHandle ->
(return () :: IO ())
where
cp = (proc (unpack $ asText cmd) (map (unpack . asText) args))
{ cwd = Just $ fpToString wdir
, std_out = UseHandle outH
, std_err = UseHandle outH
, env = Just sbModifiedEnv
}
runParent = runIn sbBuildDir
runChild = runIn childDir
childDir = sbBuildDir </> fpFromText namever
log' t = do
i <- readTVarIO sbActive
errs <- readTVarIO sbErrsVar
pbLog $ encodeUtf8 $ concat
[ t
, " (pending: "
, tshow i
, ", failures: "
, tshow $ length errs
, ")\n"
]
libOut = pbLogDir </> fpFromText namever </> "build.out"
testOut = pbLogDir </> fpFromText namever </> "test.out"
testRunOut = pbLogDir </> fpFromText namever </> "test-run.out"
wf fp inner' = do
createTree $ parent fp
withBinaryFile (fpToString fp) WriteMode inner'
configArgs = ($ []) $ execWriter $ do
when pbAllowNewer $ tell' "--allow-newer"
tell' "--package-db=clear"
tell' "--package-db=global"
forM_ (pbDatabase pb) $ \db -> tell' $ "--package-db=" ++ fpToText db
tell' $ "--libdir=" ++ fpToText (pbLibDir pb)
tell' $ "--bindir=" ++ fpToText (pbBinDir pb)
tell' $ "--datadir=" ++ fpToText (pbDataDir pb)
tell' $ "--docdir=" ++ fpToText (pbDocDir pb)
tell' $ "--flags=" ++ flags
when (pbEnableLibProfiling && pcEnableLibProfile) $
tell' "--enable-library-profiling"
where
tell' x = tell (x:)
flags :: Text
flags = unwords $ map go $ mapToList pcFlagOverrides
where
go (name', isOn) = concat
[ if isOn then "" else "-"
, unFlagName name'
]
PackageConstraints {..} = ppConstraints $ piPlan sbPackageInfo
buildLibrary = wf libOut $ \outH -> do
let run a b = do when pbVerbose $ log' (unwords (a : b))
runChild outH a b
log' $ "Unpacking " ++ namever
runParent outH "cabal" ["unpack", namever]
log' $ "Configuring " ++ namever
run "cabal" $ "configure" : configArgs
log' $ "Building " ++ namever
run "cabal" ["build"]
log' $ "Copying/registering " ++ namever
run "cabal" ["copy"]
withMVar sbRegisterMutex $ const $
run "cabal" ["register"]
-- Even if the tests later fail, we can allow other libraries to build
-- on top of our successful results
--
-- FIXME do we need to wait to do this until after Haddocks build?
-- otherwise, we could have a race condition and try to build a
-- dependency's haddocks before this finishes
atomically $ putTMVar (piResult sbPackageInfo) True
when (pcHaddocks /= Don'tBuild && not (null $ sdModules $ ppDesc $ piPlan sbPackageInfo)) $ do
log' $ "Haddocks " ++ namever
hfs <- readTVarIO sbHaddockFiles
let hfsOpts = flip map (mapToList hfs) $ \(pkgVer, hf) -> concat
[ "--haddock-options=--read-interface="
, "../"
, pkgVer
, "/,"
, fpToText hf
]
args = "haddock"
: "--hyperlink-source"
: "--html"
: "--hoogle"
: "--html-location=../$pkg-$version/"
: hfsOpts
eres <- tryAny $ run "cabal" args
forM_ eres $ \() -> do
renameOrCopy
(childDir </> "dist" </> "doc" </> "html" </> fpFromText name)
(pbDocDir pb </> fpFromText namever)
enewPath <- tryIO
$ canonicalizePath
$ pbDocDir pb
</> fpFromText namever
</> fpFromText name <.> "haddock"
case enewPath of
Left e -> warn $ tshow e
Right newPath -> atomically
$ modifyTVar sbHaddockFiles
$ insertMap namever newPath
case (eres, pcHaddocks) of
(Left e, ExpectSuccess) -> throwM e
(Right (), ExpectFailure) -> warn $ namever ++ ": unexpected Haddock success"
_ -> return ()
runTests = wf testOut $ \outH -> do
let run = runChild outH
when (pbEnableTests && pcTests /= Don'tBuild) $ do
log' $ "Test configure " ++ namever
run "cabal" $ "configure" : "--enable-tests" : configArgs
eres <- tryAny $ do
log' $ "Test build " ++ namever
run "cabal" ["build"]
log' $ "Test run " ++ namever
run "cabal" ["test", "--log=" ++ fpToText testRunOut]
case (eres, pcTests) of
(Left e, ExpectSuccess) -> throwM e
(Right (), ExpectFailure) -> warn $ namever ++ ": unexpected test success"
_ -> return ()
warn t = atomically $ modifyTVar sbWarningsVar (. (t:))
updateErrs exc = do
log' $ concat
[ display (piName sbPackageInfo)
, ": "
, tshow exc
]
atomically $ modifyTVar sbErrsVar $ insertMap (piName sbPackageInfo) exc'
where
exc' =
case fromException exc of
Just bf -> bf
Nothing -> BuildFailureException exc
renameOrCopy :: FilePath -> FilePath -> IO ()
renameOrCopy src dest = rename src dest `catchIO` \_ -> copyDir src dest
copyDir :: FilePath -> FilePath -> IO ()
copyDir src dest =
runResourceT $ sourceDirectoryDeep False src $$ mapM_C go
where
src' = src </> ""
go fp = forM_ (F.stripPrefix src' fp) $ \suffix -> do
let dest' = dest </> suffix
liftIO $ createTree $ parent dest'
sourceFile fp $$ (sinkFile dest' :: Sink ByteString (ResourceT IO) ())
copyBuiltInHaddocks :: FilePath -> IO ()
copyBuiltInHaddocks docdir = do
mghc <- findExecutable "ghc"
case mghc of
Nothing -> error "GHC not found on PATH"
Just ghc -> do
src <- canonicalizePath
(parent (fpFromString ghc) </> "../share/doc/ghc/html/libraries")
copyDir src docdir
| jeffreyrosenbluth/stackage | Stackage/PerformBuild.hs | mit | 16,404 | 0 | 23 | 5,611 | 4,242 | 2,137 | 2,105 | 350 | 8 |
module GHCJS.DOM.WindowBase64 (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/WindowBase64.hs | mit | 42 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
module Test.GenServer (htf_thisModulesTests) where
import Test.Framework
import Control.Monad.State
import Control.Concurrent.MVar (
MVar,
newEmptyMVar,
putMVar,
takeMVar,
isEmptyMVar
)
import Data.Unique (Unique)
import Control.Concurrent (threadDelay, forkIO)
import Concurrency.OTP.GenServer
import Concurrency.OTP.Process
ms n = threadDelay $ n * 1000
data CounterState = Counter { counter :: Int }
data Command = Get | Inc
instance GenServerState Command Int CounterState where
handle_call Get _ = gets $ reply . counter
handle_cast Inc = do
modify $ \st -> st { counter = counter st + 1 }
return noreply
test_successStart = do
cell <- newEmptyMVar
Ok serv <- start $ do
liftIO $ putMVar cell ()
return $ Counter 0
isEmptyMVar cell >>= assertBool . not
test_failureStart = do
result <- start $ do
error "Bad params"
return $ Counter 0
assertBool $ isFail result
where isFail Fail = True
isFail _ = False
test_call = do
Ok serv <- start $ return $ Counter 0
call serv Get >>= assertEqual 0
test_callWithTimeout = do
Ok serv <- start $ return $ Counter 0
callWithTimeout serv (Just 500) Get >>= assertEqual (Just 0)
data SlowCallState = SlowCallState
instance GenServerState () () SlowCallState where
handle_call _ _ = liftIO (ms 10) >> return (reply ())
handle_cast _ = return noreply
test_callWithFailByTimeout = do
Ok serv <- start $ return SlowCallState
callWithTimeout serv (Just 10) () >>= assertEqual Nothing
callWithTimeout serv (Just 20) () >>= assertEqual (Just ())
data StopOnCallState = StopOnCallState
instance GenServerState (MVar ()) () StopOnCallState where
handle_call _ _ = return $ stop "stop"
handle_cast r = do
liftIO $ putMVar r ()
return $ stop "stop"
test_stopOnCall = do
req <- newEmptyMVar
Ok serv <- start $ return StopOnCallState
assertThrowsIO (call serv req) isServerDead
test_stopOnCast = do
req <- newEmptyMVar
Ok serv <- start $ return StopOnCallState
cast serv req
takeMVar req
isAlive serv >>= assertBool . not
data ReplyAndStopState = ReplyAndStopState
instance GenServerState () () ReplyAndStopState where
handle_call () _ = return $ replyAndStop () "stop"
handle_cast () = return noreply
test_replyAndStop = do
Ok serv <- start $ return ReplyAndStopState
call serv ()
threadDelay 1000
isAlive serv >>= assertBool . not
data DelayedReplyState = DelayedReplyState (Maybe Unique)
instance GenServerState () () DelayedReplyState where
handle_call () reqId = do
put $ DelayedReplyState $ Just reqId
return noreply
handle_cast () = do
DelayedReplyState (Just reqId) <- get
replyWith reqId ()
return noreply
test_delayedReply = do
Ok server <- start $ return $ DelayedReplyState Nothing
forkIO $ do
threadDelay $ 10*1000
cast server ()
call server ()
assertBool True
test_cast = do
Ok serv <- start $ return $ Counter 1
call serv Get >>= assertEqual 1
cast serv Inc
call serv Get >>= assertEqual 2
data TerminatedState = TS { termCell :: MVar () }
instance GenServerState Int () TerminatedState where
handle_call 1 _ = return $ reply () -- for fail
handle_cast 1 = return noreply
onTerminate (TS cell) = putMVar cell ()
test_terminate = do
cell <- newEmptyMVar
Ok serv <- start $ return $ TS cell
call serv 1 -- ok
isEmptyMVar cell >>= assertBool
cast serv 2 -- fail
takeMVar cell >>= assertEqual ()
isServerDead ServerIsDead = True
test_failOnCall = do
cell <- newEmptyMVar
Ok serv <- start $ return $ TS cell
call serv 1 -- ok
isEmptyMVar cell >>= assertBool
assertThrowsIO (call serv 2) isServerDead
takeMVar cell >>= assertEqual ()
| SPY/haskell-otp | tests/Test/GenServer.hs | mit | 3,807 | 0 | 13 | 809 | 1,345 | 631 | 714 | 116 | 2 |
{-|
Module : EU4.IdeaGroups
Description : Feature handler for Europa Universalis IV idea groups
-}
module EU4.IdeaGroups (
IdeaGroup (..)
, Idea (..)
, parseEU4IdeaGroups
, writeEU4IdeaGroups
) where
import Control.Arrow (first)
import Control.Monad (forM, forM_, foldM)
import Control.Monad.Trans (MonadTrans (..), MonadIO (..))
import Control.Monad.Except (ExceptT (..), runExceptT, MonadError (..))
import Control.Monad.State (MonadState (..), gets)
import Data.Array ((!))
import Data.Either (partitionEithers)
import Data.Monoid ((<>))
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HM
import Data.ByteString (ByteString)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Text.PrettyPrint.Leijen.Text (Doc)
import qualified Text.PrettyPrint.Leijen.Text as PP
import Text.Regex.TDFA (Regex)
import qualified Text.Regex.TDFA as RE
import Debug.Trace (traceM)
import Abstract -- everything
import qualified Doc
import EU4.Common -- everything
import FileIO (Feature (..), writeFeatures)
import Messages -- everything
import QQ (pdx)
import SettingsTypes ( PPT, Settings (..), Game (..)
, IsGame (..), IsGameData (..), IsGameState (..)
, getGameL10n
, setCurrentFile, withCurrentFile)
-- | Empty idea group. Starts off Nothing everywhere, except id and name
-- (should get filled in immediately).
newIdeaGroup :: IdeaGroup
newIdeaGroup = IdeaGroup undefined undefined Nothing Nothing Nothing Nothing False [] Nothing Nothing
-- | Take the idea group scripts from game data and parse them into idea group
-- data structures.
parseEU4IdeaGroups :: (IsGameData (GameData g),
IsGameState (GameState g),
Monad m) =>
HashMap String GenericScript -> PPT g m IdeaTable
parseEU4IdeaGroups ideaGroupScripts = do
groupFiles <- sequenceA $ flip HM.mapWithKey ideaGroupScripts $ \path ideaGroupScript -> do
-- For each file, parse the groups
groupsWithErrors <- setCurrentFile path (mapM (runExceptT . parseIdeaGroup) ideaGroupScript)
let (errs, groups) = partitionEithers groupsWithErrors
forM_ errs $ \err -> traceM $ "Warning while parsing idea groups: " ++ err
return . HM.fromList . map (\ig -> (ig_name ig, ig)) $ groups
-- groupFiles :: HashMap String (HashMap Text IdeaGroup)
-- This maps a filename to a map of id -> group.
return (HM.unions (HM.elems groupFiles))
-- | Interpret a single idea group script.
parseIdeaGroup :: (IsGameData (GameData g), IsGameState (GameState g), Monad m) =>
GenericStatement -> ExceptT String (PPT g m) IdeaGroup
parseIdeaGroup (StatementBare _) = throwError "bare statement at top level"
parseIdeaGroup [pdx| %left = %right |] = case right of
CompoundRhs parts -> case left of
CustomLhs _ -> throwError "internal error: custom lhs"
IntLhs _ -> throwError "int lhs at top level"
AtLhs _ -> throwError "statement starting with @ in idea group file"
GenericLhs name _ -> do
name_loc <- lift $ getGameL10n name
ig <- foldM (curry (lift . uncurry ideaGroupAddSection))
(newIdeaGroup { ig_name = name
, ig_name_loc = name_loc }) parts
lift . withCurrentFile $ \file -> return (ig { ig_path = Just file })
_ -> throwError "warning: unknown statement in idea group file"
parseIdeaGroup _ = error "idea group defined via operator other than ="
-- | Interpret one section of an idea group script.
ideaGroupAddSection :: (IsGameData (GameData g), Monad m) =>
IdeaGroup -> GenericStatement -> PPT g m IdeaGroup
ideaGroupAddSection ig [pdx| $label = %rhs |] =
case label of
"category" -> case T.toLower <$> textRhs rhs of
Just "adm" -> return ig { ig_category = Just Administrative }
Just "dip" -> return ig { ig_category = Just Diplomatic }
Just "mil" -> return ig { ig_category = Just Military }
_ -> return ig
"start" -> case rhs of
CompoundRhs scr -> return ig { ig_start = Just scr }
_ -> return ig
"bonus" -> case rhs of
CompoundRhs scr -> return ig { ig_bonus = Just scr }
_ -> return ig
"trigger" -> case rhs of
CompoundRhs scr -> return ig { ig_trigger = Just scr }
_ -> return ig
"ai_will_do" -> case rhs of
CompoundRhs scr -> return ig { ig_ai_will_do = Just (aiWillDo scr) }
_ -> return ig
"free" -> case T.toLower <$> textRhs rhs of
Just "yes" -> return ig { ig_free = True }
_ -> return ig
_ -> case rhs of
CompoundRhs scr -> do
ideaname_loc <- getGameL10n label
return ig { ig_ideas = ig_ideas ig ++ [Idea label ideaname_loc scr] }
_ -> return ig
ideaGroupAddSection ig _ = return ig
-- | Pick an icon for the idea, based on the first of its effects.
iconForIdea' :: Idea -> Maybe Text
iconForIdea' idea = case idea_effects idea of
([pdx| $eff = %_ |]:_) -> iconKey eff
_ -> Nothing
iconForIdea :: Idea -> Doc
iconForIdea idea = case iconForIdea' idea of
Nothing -> mempty
Just icon -> Doc.strictText icon
-- | Do some text substitutions to add 'ideaNicon' args to the idea group
-- template, and remove/comment out undesirable icon templates.
--
-- TODO: convert icon keys to icon file names.
--
-- XXX: do this properly in the first place.
fixup :: Doc -> Doc
fixup = Doc.strictText . T.unlines . map (TE.decodeUtf8
-- . mungIdeaIcons multiIdeaIcons
. mungIdeaIcons singleIdeaIcons
. killIcons
. TE.encodeUtf8) . T.lines . Doc.doc2text where
badIcons, singleIdeaIcons, multiIdeaIcons{-, multiIdeaStartIcons -} :: Regex
badIcons = RE.makeRegex ("((tradition.|bonus) = |\\* )({{icon[^}]*}}) "::ByteString)
singleIdeaIcons = RE.makeRegex ("idea(.)effect = {{icon\\|([a-z ]*)\\|28px}} "::ByteString)
multiIdeaIcons = RE.makeRegex ("(:)({{icon[^}]*}}) "::ByteString)
-- multiIdeaStartIcons = RE.makeRegex ("idea(.)effect = {{plainlist\\|\\* {{icon\\|([a-z ]*)\\|28px}} "::ByteString)
killIcons :: ByteString -> ByteString
killIcons s = case RE.matchOnceText badIcons s of
Just (pre, matcharr, post) -> mconcat
[pre, fst (matcharr ! 1)
,"<!-- ", fst (matcharr ! 3), " -->"
,post]
Nothing -> case RE.matchOnceText multiIdeaIcons s of
Just (pre, matcharr, post) -> mconcat
[pre, fst (matcharr ! 1)
,"<!-- ", fst (matcharr ! 2), " -->"
,post]
Nothing -> s
mungIdeaIcons :: Regex -> ByteString -> ByteString
mungIdeaIcons re s = case RE.matchOnceText re s of
Nothing -> s
Just (pre, matcharr, post) -> let nth = fst (matcharr ! 1) in mconcat
[pre, "idea", nth, "icon = ", iconFileB (fst (matcharr ! 2))
,"\n| idea", nth, "effect = ", post]
-- | Present the parsed idea groups as wiki text and write them to the
-- appropriate files.
writeEU4IdeaGroups :: (EU4Info g, MonadIO m) => PPT g m ()
writeEU4IdeaGroups = do
groups <- getIdeaGroups
let pathedGroups :: [Feature IdeaGroup]
pathedGroups = map (\ig -> Feature {
featurePath = ig_path ig
, featureId = Just (ig_name ig)
, theFeature = Right ig })
(HM.elems groups)
writeFeatures "idea groups"
pathedGroups
ppIdeaGroup {- need IdeaGroup -> PPT g IO (FilePath, Doc) -}
-- | Present a single idea group.
ppIdeaGroup :: (EU4Info g, Monad m) => IdeaGroup -> PPT g (ExceptT Text m) Doc
ppIdeaGroup ig = fixup <$> do
version <- gets (gameVersion . getSettings)
let name = ig_name_loc ig
case (ig_bonus ig, length (ig_ideas ig)) of
(Just bonus, 7) -> do
let rawideas = ig_ideas ig
unindent = map (first (const 0))
ideas <- forM rawideas $ \idea -> do
effmsgs <- ppMany (idea_effects idea)
case effmsgs of
-- Remove the bullets from a single effect
[_] -> imsg2doc (unindent effmsgs)
{- This doesn't work, due to the template's abuse of
deflist markup and misbehaviour of MediaWiki.
-- Wrap multiple effects in a plainlist
_ -> do
effsdoc <- imsg2doc effmsgs
return $ templateDoc "plainlist" [effsdoc]
-}
-- Instead, replace bullets with colons.
[] -> return mempty
(msg:msgs) -> do
firstmsg <- imsg2doc (unindent [msg])
rest <- mapM (\m -> (":" <>) <$> imsg2doc [m]) (unindent msgs)
return (firstmsg <> PP.line <> PP.vsep rest)
bonus_pp'd <- imsg2doc . unindent =<< ppMany bonus
mtrigger_pp'd <- case ig_trigger ig of
Nothing -> return Nothing
Just trigger -> Just <$> (imsg2doc =<< ppMany trigger)
let name_loc = Doc.strictText . T.replace " Ideas" "" $ name
ig_id_t = ig_name ig
ig_id = Doc.strictText ig_id_t
trads <- case ig_start ig of
Just [trad1s, trad2s] -> do
trad1 <- imsg2doc . map (first (const 0)) =<< ppOne trad1s
trad2 <- imsg2doc . map (first (const 0)) =<< ppOne trad2s
return $ Right (trad1, trad2)
Just trads -> return . Left . Just . length $ trads
Nothing -> return (Left Nothing)
return . mconcat $
["<section begin=", ig_id, "/>", PP.line
,"{{Idea group", PP.line
,"| name = ", name_loc, PP.line
,"| version = ", Doc.strictText version, PP.line
,case ig_category ig of
Nothing -> case trads of
Right (trad1, trad2) -> mconcat -- assume groups with no category are country ideas
["| country = yes", PP.line
,"| tradition1 = ", trad1, PP.line
,"| tradition2 = ", trad2, PP.line
]
Left (Just ntrads) -> mconcat
["<!-- Looks like a country idea group, but has non-standard number of traditions ("
,Doc.strictText (T.pack (show ntrads))
,") -->", PP.line]
Left Nothing -> mconcat
["<!-- Looks like a country idea group, but has no traditions -->", PP.line]
Just cat -> mconcat
["<!-- Category: ", Doc.pp_string (show cat), " -->", PP.line
,"| events = ", name_loc, " idea group events", PP.line]
,"| idea1 = ", Doc.strictText (idea_name_loc (rawideas !! 0)), PP.line
,iconForIdea (rawideas !! 0)
,"| idea1effect = ", ideas !! 0, PP.line
,"| idea2 = ", Doc.strictText (idea_name_loc (rawideas !! 1)), PP.line
,iconForIdea (rawideas !! 1)
,"| idea2effect = ", ideas !! 1, PP.line
,"| idea3 = ", Doc.strictText (idea_name_loc (rawideas !! 2)), PP.line
,iconForIdea (rawideas !! 2)
,"| idea3effect = ", ideas !! 2, PP.line
,"| idea4 = ", Doc.strictText (idea_name_loc (rawideas !! 3)), PP.line
,iconForIdea (rawideas !! 3)
,"| idea4effect = ", ideas !! 3, PP.line
,"| idea5 = ", Doc.strictText (idea_name_loc (rawideas !! 4)), PP.line
,iconForIdea (rawideas !! 4)
,"| idea5effect = ", ideas !! 4, PP.line
,"| idea6 = ", Doc.strictText (idea_name_loc (rawideas !! 5)), PP.line
,iconForIdea (rawideas !! 5)
,"| idea6effect = ", ideas !! 5, PP.line
,"| idea7 = ", Doc.strictText (idea_name_loc (rawideas !! 6)), PP.line
,iconForIdea (rawideas !! 6)
,"| idea7effect = ", ideas !! 6, PP.line
,"| bonus = ", bonus_pp'd, PP.line
] ++ (case mtrigger_pp'd of
Just trigger_pp'd ->
["| notes = Can be selected only if the following are true:", PP.line
,trigger_pp'd
,PP.line]
Nothing -> [])
++ ["}}", PP.line
,"<section end=", ig_id, "/>"]
(Nothing, _) -> throwError $ "Idea group " <> name <> " has no bonus"
(_, n) -> throwError $ "Idea group " <> name <> " has non-standard number of ideas (" <> T.pack (show n) <> ")"
| HairyDude/pdxparse | src/EU4/IdeaGroups.hs | mit | 13,306 | 0 | 29 | 4,497 | 3,369 | 1,799 | 1,570 | -1 | -1 |
{-# LANGUAGE CPP, OverloadedStrings #-}
{- |
Module : System.JBI.Commands.Cabal
Description : cabal-install support
Copyright : (c) Ivan Lazar Miljenovic
License : MIT
Maintainer : [email protected]
-}
module System.JBI.Commands.Cabal
( Cabal
, CabalMode
, Sandbox
, Nix
) where
import System.JBI.Commands.BuildTool
import System.JBI.Commands.Nix
import System.JBI.Commands.Tool
import System.JBI.Environment
import System.JBI.Tagged
import Control.Applicative (liftA2, (<*>))
import Control.Monad (filterM)
import Data.Bool (bool)
import Data.Maybe (isJust, maybeToList)
import Data.Proxy (Proxy(Proxy))
import Data.Version (Version, makeVersion)
import System.Directory (doesFileExist, getCurrentDirectory, listDirectory,
removeFile)
import System.Exit (ExitCode, die, exitSuccess)
import System.FilePath (takeExtension, (</>))
import System.IO.Error (ioError, isDoesNotExistError, tryIOError)
import qualified Distribution.Package as CPkg
import Distribution.PackageDescription (GenericPackageDescription,
condBenchmarks,
condExecutables,
condLibrary,
condTestSuites)
import qualified Distribution.PackageDescription.Parse as CParse
import Distribution.Verbosity (silent)
#if MIN_VERSION_Cabal (2,0,0)
import Distribution.Types.UnqualComponentName (UnqualComponentName,
unUnqualComponentName)
#endif
--------------------------------------------------------------------------------
data Cabal mode
instance Tool (Cabal mode) where
commandName = "cabal"
instance (CabalMode mode) => BuildTool (Cabal mode) where
canUseCommand = canUseMode
commandProjectRoot = cabalProjectRoot
hasBuildArtifacts = hasModeArtifacts
commandPrepare = cabalPrepare
commandTargets = cabalTargets
commandBuild env cmd = cabalTry env cmd . cabalBuild env cmd
commandRepl env cmd rargs = cabalTry env cmd . cabalRepl env cmd rargs
commandClean = cabalClean
commandTest = liftA2 (<*>) cabalTry cabalTest
commandBench = liftA2 (<*>) cabalTry cabalBench
commandExec = cabalExec
commandRun env cmd = (cabalTry env cmd .) . cabalRun env cmd
commandUpdate = cabalUpdate
cabalTry :: (CabalMode mode) => Env -> Tagged (Cabal mode) CommandPath
-> IO ExitCode -> IO ExitCode
cabalTry env cmd = tryCommand "Command failed, trying to re-configure"
(cabalConfigure env cmd)
instance (CabalMode mode) => NamedTool (Cabal mode) where
prettyName p = "cabal+" ++ modeName (getMode p)
getMode :: proxy (Cabal mode) -> Proxy mode
getMode _ = Proxy
class CabalMode mode where
modeName :: proxy mode -> String
-- | Optional minimal version of @cabal@ required. Used to provide
-- default instance of @canUseMode@.
--
-- @since 0.2.0.0
needsMinCabal :: Maybe (Tagged (Cabal mode) Version)
needsMinCabal = Nothing
-- | @since 0.2.0.0
canUseMode :: Env -> Tagged (Cabal mode) CommandPath -> IO Bool
canUseMode env cp = case needsMinCabal of
Nothing -> return hasGHC
Just mv -> maybe hasGHC (mv <=)
<$> commandVersion (envConfig env) cp
where
hasGHC = isJust (ghc (envTools env))
cabalProjectRoot :: Tagged (Cabal mode) CommandPath
-> IO (Maybe (Tagged (Cabal mode) ProjectRoot))
cabalProjectRoot = withTaggedF go
where
-- Type signature needed to make withTaggedF happy, though we
-- don't actually use the command itself for this.
go :: FilePath -> IO (Maybe FilePath)
go _ = recurseUpFindFile isCabalFile
hasModeArtifacts :: Tagged (Cabal mode) ProjectRoot -> IO Bool
cabalPrepare :: Env -> Tagged (Cabal mode) CommandPath -> IO ExitCode
cabalTargets :: Config -> Tagged (Cabal mode) CommandPath
-> IO [Tagged (Cabal mode) ProjectTarget]
cabalTargets _ = withTaggedF go
where
-- Make withTaggedF happy
go :: FilePath -> IO [String]
go _ = cabalFileComponents
-- | This is an additional function than found in 'BuildTool'. May
-- include installing dependencies.
cabalConfigure :: Env -> Tagged (Cabal mode) CommandPath -> IO ExitCode
cabalBuild :: Env -> Tagged (Cabal mode) CommandPath
-> Maybe (Tagged (Cabal mode) ProjectTarget) -> IO ExitCode
cabalBuild = commandArgTarget "build"
cabalRepl :: Env -> Tagged (Cabal mode) CommandPath
-> Tagged (Cabal mode) Args
-> Maybe (Tagged (Cabal mode) ProjectTarget)
-> IO ExitCode
cabalRepl env cmd rargs = commandArgsTarget ("repl" : ghcArgs) env cmd
where
ghcArgs = ["--ghc-options", unwords (stripTag rargs :: Args)]
cabalClean :: Env -> Tagged (Cabal mode) CommandPath -> IO ExitCode
cabalTest :: Env -> Tagged (Cabal mode) CommandPath -> IO ExitCode
cabalTest = commandArg "test"
cabalBench :: Env -> Tagged (Cabal mode) CommandPath -> IO ExitCode
cabalBench = commandArg "bench"
cabalExec :: Env -> Tagged (Cabal mode) CommandPath -> String -> Args -> IO ExitCode
cabalExec env cmd prog progArgs = commandArgs args env cmd
where
args = "exec" : prog : "--" : progArgs
cabalRun :: Env -> Tagged (Cabal mode) CommandPath -> Tagged (Cabal mode) ProjectTarget
-> Args -> IO ExitCode
cabalRun env cmd prog progArgs = commandArgs args env cmd
where
args = "run" : componentName (stripTag prog) : "--" : progArgs
cabalUpdate :: Env -> Tagged (Cabal mode) CommandPath -> IO ExitCode
cabalUpdate = commandArg "update"
--------------------------------------------------------------------------------
data Sandbox
instance CabalMode Sandbox where
modeName _ = "sandbox"
needsMinCabal = Just (tag (makeVersion [1,18]))
hasModeArtifacts pr = doesFileExist (stripTag pr </> "cabal.sandbox.config")
cabalPrepare = commandArgs ["sandbox", "init"]
cabalConfigure env cmd = tryConfigure
where
install = commandArgs ["install", "--only-dependencies"
, "--enable-tests", "--enable-benchmarks"]
env cmd
tryInstall = tryCommand "Installation failed; updating index."
(cabalUpdate env cmd)
install
tryConfigure = tryCommand "Configuring failed; checking dependencies"
tryInstall
configure
configure = commandArgs ["configure", "--enable-tests", "--enable-benchmarks"]
env cmd
-- Note: we don't treat "dist" as part of the tool artifacts, but it
-- doesn't make sense without the sandbox so remove it as well.
cabalClean env cmd = commandArg "clean" env cmd
.&&. commandArgs ["sandbox", "delete"] env cmd
--------------------------------------------------------------------------------
data Nix
instance CabalMode Nix where
modeName _ = "nix"
-- We don't test for nix-instantiate here, as it's just used if it
-- can be used.
canUseMode env _ = return (has nixShell && has cabal2Nix)
where
has :: (NixSupport -> Maybe (Installed a)) -> Bool
has f = isJust (f (nix (envTools env)))
hasModeArtifacts pr = or <$> mapM (doesFileExist . (stripTag pr </>))
["shell.nix", "default.nix"]
-- Note that commandPrepare is meant to be run within ProjectRoot
cabalPrepare env _ = case path <$> cabal2Nix (nix (envTools env)) of
Nothing -> die "cabal2Nix required"
Just c2n -> tryRunToFile (envConfig env) "shell.nix" c2n ["--shell", "."]
-- It is tempting to want to run cabal2nix again here just in case,
-- but people might have customised variants (different
-- haskellPackages set, etc.).
--
-- Instead, people need to run @jbi prepare@ if the .cabal file
-- changes.
cabalConfigure env _ = case path <$> nixShell nixEnv of
Nothing -> die "nix-shell required"
Just ns -> do
-- We now evaluate canBench twice, which isn't ideal.
--
-- Should also warn if it's False.
args <- extraArgs
cArgs <- cabalArgs
tryRunErr
"Configuration failed; you may need to manually enable 'withBenchmarkDepends' or 'doBenchmark' in your shell.nix file."
(tryRun cfg ns (args ++ ["--run", cArgs]))
where
extraArgs = bool [] ["--arg", "doBenchmark", "true"] <$> canBench
nixEnv = nix (envTools env)
cfg = envConfig env
canBench =
case path <$> nixInstantiate nixEnv of
Nothing -> return False
Just ni -> do
res <- tryRunLine cfg (stripTag ni) ["--eval", "--expr", "with import <nixpkgs> {}; haskell.lib ? doBenchmark"]
return $ case res of
Just "true" -> maybe False (>= c2nBenchSupport) (cabal2Nix nixEnv >>= version)
_ -> False
c2nBenchSupport :: Tagged Cabal2Nix Version
c2nBenchSupport = tag (makeVersion [2,6])
cabalArgs = unwords . (["cabal", "configure", "--enable-tests"] ++) . bnchArgs <$> canBench
where
bnchArgs canB
| canB = ["--enable-benchmarks"]
| otherwise = []
cabalClean env cmd = commandArg "clean" env cmd
.&&. rmFile "shell.nix"
.&&. rmFile "default.nix"
where
rmFile file = do
rmStatus <- tryIOError (removeFile file)
case rmStatus of
-- We're guessing as to which file is the one being used
-- here, so an error because a file doesn't exist is OK;
-- anything else is serious and should be re-thrown.
Left err | not (isDoesNotExistError err) -> ioError err
_ -> exitSuccess
--------------------------------------------------------------------------------
isCabalFile :: FilePath -> Bool
isCabalFile = (== ".cabal") . takeExtension
--------------------------------------------------------------------------------
-- The Cabal library likes to really keep changing things...
cabalFileComponents :: IO [String]
cabalFileComponents = do
dir <- getCurrentDirectory
cntns <- map (dir </>) <$> listDirectory dir
files <- filterM doesFileExist cntns
let cabalFiles = filter isCabalFile files
case cabalFiles of
[] -> return []
(c:_) -> getComponents <$> parseCabalFile c
parseCabalFile :: FilePath -> IO GenericPackageDescription
parseCabalFile =
#if MIN_VERSION_Cabal(2,0,0)
CParse.readGenericPackageDescription
#else
CParse.readPackageDescription
#endif
silent
type ComponentName =
#if MIN_VERSION_Cabal (2,0,0)
UnqualComponentName
#else
String
#endif
rawComponentName :: ComponentName -> String
rawComponentName =
#if MIN_VERSION_Cabal (2,0,0)
unUnqualComponentName
#else
id
#endif
packageName :: GenericPackageDescription -> String
packageName =
#if MIN_VERSION_Cabal (2,0,0)
CPkg.unPackageName
#else
(\(CPkg.PackageName nm) -> nm)
#endif
. CPkg.packageName
getComponents :: GenericPackageDescription -> [String]
getComponents gpd = concat
[ getLib
, getType condExecutables "exe"
, getType condTestSuites "test"
, getType condBenchmarks "bench"
]
where
pkgName = packageName gpd
getLib
| isJust (condLibrary gpd) = ["lib:" ++ pkgName]
| otherwise = []
getType f typ = map (\cmp -> typ ++ ':' : rawComponentName (fst cmp)) (f gpd)
--------------------------------------------------------------------------------
commandArgsTarget :: Args -> Env -> Tagged (Cabal mode) CommandPath
-> Maybe (Tagged (Cabal mode) ProjectTarget) -> IO ExitCode
commandArgsTarget args env cmd mt = commandArgs args' env cmd
where
args' = args ++ maybeToList (fmap stripTag mt)
commandArgTarget :: String -> Env -> Tagged (Cabal mode) CommandPath
-> Maybe (Tagged (Cabal mode) ProjectTarget) -> IO ExitCode
commandArgTarget = commandArgsTarget . (:[])
commandArg :: String -> Env -> Tagged (Cabal mode) CommandPath
-> IO ExitCode
commandArg arg = commandArgs [arg]
commandArgs :: Args -> Env -> Tagged (Cabal mode) CommandPath
-> IO ExitCode
commandArgs args env cmd = tryRun (envConfig env) cmd args
| ivan-m/jbi | lib/System/JBI/Commands/Cabal.hs | mit | 13,031 | 0 | 20 | 3,741 | 2,950 | 1,536 | 1,414 | -1 | -1 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module Algebraic.Config where
import qualified Autolib.TES.Binu as B
import Expression.Op
import Data.Typeable
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Set
data Information = Formula | Value deriving Typeable
$(derives [makeReader, makeToDoc] [''Information])
data Ops a => Type c a =
Make { max_formula_size_for_instance :: Int
, operators_in_instance :: B.Binu ( Op a )
, operators_in_solution :: B.Binu ( Op a )
, restrictions :: [ c ]
, information :: Information
, max_formula_size_for_solution :: Int
}
deriving ( Typeable )
$(derives [makeReader, makeToDoc] [''Type])
-- local variables:
-- mode: haskell
-- end;
| Erdwolf/autotool-bonn | src/Algebraic/Config.hs | gpl-2.0 | 732 | 2 | 11 | 137 | 191 | 113 | 78 | 19 | 0 |
{- |
Module : $Header$
Description : Utility functions for writing object logic instances
Copyright : (c) Kristina Sojakova, DFKI Bremen 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
-}
module Framework.WriteLogicUtils where
import Data.List
tab :: String
tab = " "
multiOpt :: String
multiOpt = "MultiParamTypeClasses"
synOpt :: String
synOpt = "TypeSynonymInstances"
prefixBy :: String -> [String] -> [String]
prefixBy s xs = map (\ x -> s ++ x) xs
sepHoriz :: [String] -> String
sepHoriz = concat . (prefixBy " ")
sepTabVert :: [String] -> String
sepTabVert = concat . (prefixBy $ "\n" ++ tab)
mkCompOpt :: [String] -> String
mkCompOpt opts = "{-# LANGUAGE " ++ intercalate ", " opts ++ " #-}"
mkModDecl :: String -> String
mkModDecl n = "module " ++ n ++ " where"
mkImports :: [String] -> String
mkImports imps =
intercalate "\n" $ prefixBy "import " imps
mkLid :: String -> String
mkLid lid = "data " ++ lid ++ " = " ++ lid ++ " deriving Show"
mkImpl :: String -> String -> String -> String
mkImpl f lid imp =
f ++ " " ++ lid ++ " = " ++ imp
inheritImpl :: String -> String -> String -> String
inheritImpl s l ml = mkImpl s l $ s ++ " " ++ ml
mkInst :: String -> String -> [String] -> [String] -> String
mkInst inst lid args impls =
let header = "instance " ++ inst ++ " " ++ lid
argL = length args > 1
impE = not $ null impls
in header ++
if argL && impE then
sepTabVert $ args ++ ["where"] ++ impls else
if argL then
sepTabVert args else
if impE then
sepHoriz args ++ " where" ++ sepTabVert impls else
sepHoriz args
mkDecl :: String -> String -> String -> String
mkDecl n t v = n ++ " :: " ++ t ++ "\n" ++ n ++ " = " ++ v
| nevrenato/Hets_Fork | Framework/WriteLogicUtils.hs | gpl-2.0 | 1,888 | 0 | 13 | 501 | 569 | 300 | 269 | 43 | 4 |
module T where
import Tests.KesterelBasis
-- A local signal, not emitted.
e = signalE $ \(s::Signal) -> loopE pauseE
c = unitA >>> runE e
prop_correct = property (\xs -> simulate c xs == zip (repeat false) xs)
test_constructive = isJust (isConstructive c)
| peteg/ADHOC | Tests/08_Kesterel/021_signal_not_emitted.hs | gpl-2.0 | 260 | 0 | 11 | 47 | 96 | 51 | 45 | -1 | -1 |
-- Copyright (C) 2002-2004 David Roundy
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2, or (at your option)
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; see the file COPYING. If not, write to
-- the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
-- Boston, MA 02110-1301, USA.
module Darcs.Repository.Motd
( getMotd
, showMotd
) where
import Control.Monad ( unless )
import qualified Data.ByteString as B (null, hPut, empty, ByteString)
import System.IO ( stdout )
import Darcs.Repository.External ( fetchFilePS, Cachable(..) )
import Darcs.Util.Global ( darcsdir )
import Darcs.Util.Exception ( catchall )
-- | Fetch and return the message of the day for a given repository.
getMotd :: String -> IO B.ByteString
getMotd repo = fetchFilePS motdPath (MaxAge 600) `catchall` return B.empty
where
motdPath = repo ++ "/" ++ darcsdir ++ "/prefs/motd"
-- | Display the message of the day for a given repository,
showMotd :: String -> IO ()
showMotd repo = do
motd <- getMotd repo
unless (B.null motd) $ do
B.hPut stdout motd
putStrLn $ replicate 22 '*'
| DavidAlphaFox/darcs | src/Darcs/Repository/Motd.hs | gpl-2.0 | 1,602 | 0 | 11 | 319 | 250 | 145 | 105 | 18 | 1 |
f =>= g = g . extend f | hmemcpy/milewski-ctfp-pdf | src/content/3.7/code/haskell/snippet13.hs | gpl-3.0 | 22 | 0 | 6 | 7 | 19 | 8 | 11 | 1 | 1 |
-- Trie data structure for counting words in a string built using "basic"
-- components and data structures.
-- The trie type has been made instance of both Monad and Traversable
-- (and thus also Functor, Applicative and Foldable) as well as Show.
-- A new custom type class Mapping is implemented for finite map behaviours.
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-- {-# LANGUAGE Strict #-} -- Enabled in GHC 8.x but no apparent impact on performance detected
module Trie where
import Control.Applicative (Applicative, pure, (<$>), (<*>))
import Control.Monad hiding (sequence)
import qualified Data.Foldable as F
import qualified Data.Map.Strict as M
import Data.Traversable
import Prelude hiding (lookup, sequence)
import Text.Printf
-- Will use existence of value to mark end of word
data Trie c v = Trie { value :: !(Maybe v), tails :: !(M.Map c (Trie c v)) }
class Ord c => Mapping m c v where
empty :: m c v
lookup :: m c v -> [c] -> Maybe v
update :: (Maybe v -> Maybe v) -> m c v -> [c] -> m c v
delete :: m c v -> [c] -> m c v
toList :: m c v -> [([c],v)]
instance Ord c => Mapping Trie c v where
empty = Trie { value = Nothing, tails = M.empty }
lookup trie [] = value trie
lookup trie (c:cs) = do
tail <- M.lookup c (tails trie)
lookup tail cs
{-# Specialise lookup :: Trie Char Int -> String -> Maybe Int #-}
update f trie [] = trie { value = f $ value trie }
update f trie (c:cs) = trie { tails = updated } where
updated = case M.lookup c (tails trie) of
Just sub -> M.update (\_ -> Just $ update f sub cs) c (tails trie)
Nothing -> M.insert c newbranch (tails trie) where
newbranch = update f empty cs
{-# Specialise update :: (Maybe Int -> Maybe Int)-> Trie Char Int -> String -> Trie Char Int #-}
delete = update (const Nothing)
toList trie = rv ++ concatMap builder (M.toList $ tails trie) where
rv = case value trie of
Nothing -> []
Just v -> [([],v)]
builder :: Ord c => (c, Trie c v) -> [([c],v)]
builder (c,t) = case value t of
Nothing -> prefix c $ toList t
Just v -> prefix c $ toList t
where prefix c ll = fmap (\(s,v) -> (c:s,v)) ll
add :: Ord c => v -> Trie c v -> [c] -> Trie c v
add value = update (\_ -> Just value )
-- If key is repeated, then later value overrides earlier value
fromList :: Ord c => [([c],v)] -> Trie c v
fromList = foldl (\ t (k,v) -> add v t k ) empty
instance (Ord c, Eq c, Eq v) => Eq (Trie c v) where
t1 == t2 = toList t1 == toList t2
instance Ord c => Traversable (Trie c) where
traverse f t = Trie <$> root <*> children where
root = sequenceA( fmap f (value t) )
children = sequenceA $ M.map (traverse f) (tails t)
instance Ord c => F.Foldable (Trie c) where
foldMap = foldMapDefault
instance Ord c => Functor (Trie c) where
fmap = fmapDefault
--
-- Notice that (like with Data.Map et al.) Foldable.toList makes list of only values and unlike
-- Trie.toList does NOT produce an association list (from which the trie can be reconstructed).
instance Ord c => Monad (Trie c) where
(>>=) t f = let tl = toList t in (fromList.concat) (map g tl) where
g (k,v) = map (\(a,b) -> (k++a, b)) (toList (f v))
return v = add v empty []
instance Ord c=> Applicative (Trie c) where
(<*>) = ap
pure = return
instance (Show v, Show c, Ord c) => Show (Trie c v) where
show t = summary ++ display (take 15 graph) where
graph = toList t
summary = printf "Trie with %v key-value pairs, starting with:\n" (length graph)
display = concatMap ( \(k,v) -> printf "%15s : %4v \n" (show k) (show v) )
size :: Ord c => Trie c v -> Int
size t = 1 + sum (fmap (size.snd) $ M.toList $ tails t)
{- Made Trie into a monad with t >>= f defined as follows:
-
- for each key k in t with corresponding value v, take the keys ks of f v. Form new keys k's = (k ++ ks)
- by concatenating. Replace the key k in t with the keys k's and give them values from (f v).
-}
| JBons/Haskell-wordcount | src/trie.hs | gpl-3.0 | 4,334 | 0 | 15 | 1,261 | 1,456 | 764 | 692 | 74 | 1 |
data FreeF f a = forall i. FMap (i -> a) (f i) | hmemcpy/milewski-ctfp-pdf | src/content/3.11/code/haskell/snippet09.hs | gpl-3.0 | 46 | 0 | 8 | 12 | 32 | 18 | 14 | -1 | -1 |
process s = do
upStr <- upCase s
toWords upStr | hmemcpy/milewski-ctfp-pdf | src/content/3.4/code/haskell/snippet18.hs | gpl-3.0 | 55 | 0 | 8 | 18 | 25 | 10 | 15 | 3 | 1 |
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# OPTIONS_GHC -fcontext-stack=100 #-}
module Main where
import Control.Applicative ((<$>), (<*>))
import Control.Concurrent (forkIO)
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Monad (void)
import Data.Map
import Data.Monoid
import Data.Proxy
import Data.Vinyl (rget)
import HBooru.Network
import HBooru.Parsers.Gelbooru
import HBooru.Parsers.Ichijou
import HBooru.Parsers.Konachan
import HBooru.Parsers.Safebooru
import HBooru.Parsers.Yandere
import HBooru.Types
import System.Directory
import System.Environment (getArgs)
import System.Exit
import System.FilePath
main ∷ IO ()
main = getArgs >>= \case
[] → putStr help
"-d":d:ts → downloadTo d ts
xs → fetchImageLinks xs >>= \x → mapM_ putStrLn [ y | Right y ← x ]
-- | Downloads all the images with the given tag to the given
-- directory.
downloadTo ∷ FilePath → [Tag] → IO ()
downloadTo fp xs = doesDirectoryExist fp >>= \case
False → print (fp <> " doesn't exist.") >> exitFailure
True → do
let f p = run <$> fetchAllTaggedPosts p XML xs
where
run s =
fromList [ (unVAL $ (Proxy ∷ Proxy "md5") `rget` r,
unVAL $ (Proxy ∷ Proxy "file_url") `rget` r)
| Right r ← s ]
g ← f Gelbooru
i ← f Ichijou
k ← f Konachan
s ← f Safebooru
y ← f Yandere
let ls = g <> i <> k <> s <> y
mkFp (m, u) = let e = snd $ splitExtension u
in fp </> m <.> e
dcs ← Prelude.map (fp </>) <$> getDirectoryContents fp
let notInFiles = (`notElem` dcs) . snd
fs = Prelude.filter notInFiles [ (snd x, mkFp x) | x ← assocs ls ]
lfs = show $ length fs
ds ← atomically newTChan
got ← newTVarIO (0 ∷ Integer)
let loop = atomically (readTChan ds) >>= \case
EndOfQueue → putStrLn "Done."
x → do
v ← atomically $ modifyTVar got succ >> readTVar got
putStrLn (concat ["(", show v, "/", lfs, "): ", show x]) >> loop
void . forkIO $ downloadFiles fs ds 5
loop
-- | Fetches a map of images, with keys being the md5 and values being
-- URLs.
fetchImageLinks ∷ [Tag] → IO [Parse String]
fetchImageLinks xs = do
let f p = Prelude.map getInfo <$> fetchAllTaggedPosts p XML xs
where
getInfo (Left (PF m)) = Left . PF $ unwords [show p, m]
getInfo (Right r) =
return . unVAL $ (Proxy ∷ Proxy "file_url") `rget` r
(g, i, k, s, y) ← runConcurrently $ (,,,,)
<$> Concurrently (f Gelbooru)
<*> Concurrently (f Ichijou)
<*> Concurrently (f Konachan)
<*> Concurrently (f Safebooru)
<*> Concurrently (f Yandere)
let ls = g <> i <> k <> s <> y
return $ length ls `seq` ls
help ∷ String
help = unlines $
[ "Usage: h-booru tag1 [tag2] … [tagN]"
, ""
, "Prints a list of links matching the tags"
, ""
, "h-booru -d DIRECTORY tag1 [tag2] … [tagN]"
, ""
, "Downloads the files with the given tags to the given directory."
, "Naming scheme is md5.originalextension"
, "The downloader will skip files it already sees downloaded, by filename."
]
| Fuuzetsu/h-booru | src/Main.hs | gpl-3.0 | 3,376 | 0 | 25 | 926 | 1,071 | 558 | 513 | 86 | 3 |
-- determine inorder preorder and postorder traversal of a tree
data Tree a = Empty | Branch a (Tree a) (Tree a)
-- inorder
p68 :: Tree Char -> String
p68 Empty = ""
p68 (Branch x Empty Empty) = [x]
p68 (Branch x l r) = p68 l ++ [x] ++ p68 r
-- preorder
p68' :: Tree Char -> String
p68' Empty = ""
p68' (Branch x Empty Empty) = [x]
p68' (Branch x l r) = x : p68' l ++ p68' r
-- postorder
p68'' :: Tree Char -> String
p68'' Empty = ""
p68'' (Branch x Empty Empty) = [x]
p68'' (Branch x l r) = p68'' l ++ p68'' r ++ [x]
| yalpul/CENG242 | H99/61-69/p68.hs | gpl-3.0 | 522 | 0 | 8 | 122 | 259 | 132 | 127 | 13 | 1 |
module Graphics.UI.Bottle.Animation.Id
( AnimId
, joinId, subId
, mappingFromPrefixMap
) where
import Control.Lens.Operators
import qualified Data.ByteString as SBS
import Data.List.Lens (prefixed)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
type AnimId = [SBS.ByteString]
joinId :: AnimId -> AnimId -> AnimId
joinId = (++)
subId :: AnimId -> AnimId -> Maybe AnimId
subId folder path = path ^? prefixed folder
mappingFromPrefixMap :: Map AnimId AnimId -> AnimId -> AnimId
mappingFromPrefixMap m animId =
do
(animIdPrefixCandidate, newAnimId) <- Map.lookupLE animId m
suffix <- animId ^? prefixed animIdPrefixCandidate
newAnimId <> suffix & Just
& fromMaybe animId
| rvion/lamdu | bottlelib/Graphics/UI/Bottle/Animation/Id.hs | gpl-3.0 | 834 | 0 | 10 | 202 | 232 | 132 | 100 | 23 | 1 |
module Z3Test where
import Language.SMTLib2.Z3
import Language.SMTLib2.QuickCheck
import Distribution.TestSuite
import Distribution.TestSuite.QuickCheck
import Data.Either
tests :: IO [Test]
tests = return [testProperty "round-trip"
(roundTripTest emptyContext (return z3Solver))]
| hguenther/smtlib2 | backends/z3/test/Z3Test.hs | gpl-3.0 | 300 | 0 | 11 | 45 | 73 | 42 | 31 | 9 | 1 |
module SharedWorld.Utils (
checksum
) where
import Data.Digest.Pure.SHA ( sha1, showDigest )
import Data.Serialize ( Serialize, encode )
import qualified Data.ByteString.Lazy.Char8 as BL
-- | Compute the checksum of some serializable object.
checksum :: (Serialize s) => s -> String
checksum = showDigest . sha1 . BL.fromStrict . encode
| scvalex/shared-world | src/SharedWorld/Utils.hs | gpl-3.0 | 350 | 0 | 7 | 62 | 87 | 54 | 33 | 7 | 1 |
module System.DevUtils.Parser.Lines.String (
defaultLines,
runLines,
weirdLines
) where
-- runLines defaultLines "#a\r\nb\rc\nd\r\ne\ndkgodkgsdo"
-- runLines weirdLines "0.1>.{....}3.4"
import Text.Parsec
import Text.Parsec.String
import Data.Maybe
type Line = String
type St a = GenParser Char Lines a
data Lines = Lines {
_ident :: St Char,
_eol :: St String,
_comment :: St String
}
defaultLines :: Lines
defaultLines = _Lines
where
_Lines = Lines {
_ident = anyToken,
_eol = (try (string "\r\n") <|> try (string "\r") <|> try (string "\n") <?> "end of line"),
_comment = do { (try (string "#") <|> try (string ";"));
(try (manyTill anyToken (_eol _Lines)) <|> (try (many anyToken)))
}
}
weirdLines :: Lines
weirdLines = Lines {
_ident = anyToken,
_eol = (try (string ".")),
_comment = do { string "{" ;
; manyTill anyChar (try (string "}"))
}
}
comment :: St ()
comment = do
st <- getState
_comment st
return ()
item :: St String
item = do
st <- getState
it <- (try (manyTill (_ident st) (try $ _eol st))
<|> try (many1 (_ident st))
<?> "item")
return it
line :: St (Maybe String)
line = do
do {
try (comment >> return Nothing)
<|> try (item >>= \s -> case s of
[] -> return Nothing
_ -> return $ Just s)
}
parseLines :: St [String]
parseLines = do
linelist <- many line
<?> "line"
return $ catMaybes linelist
runLines' :: Lines -> St [String] -> String -> Either String [String]
runLines' lineInfo p input = do
case (runParser p lineInfo "Lines" input) of
Left err -> Left $ "Parser error: " ++ show err
Right val -> Right val
runLines :: Lines -> String -> Either String [String]
runLines lineInfo input = runLines' lineInfo parseLines input
| adarqui/DevUtils-Parser | src/System/DevUtils/Parser/Lines/String.hs | gpl-3.0 | 1,787 | 1 | 18 | 427 | 697 | 359 | 338 | 57 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ResourceViews.ZoneViews.AddResources
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Add resources to the view.
--
-- /See:/ <https://developers.google.com/compute/ Google Compute Engine Instance Groups API Reference> for @resourceviews.zoneViews.addResources@.
module Network.Google.Resource.ResourceViews.ZoneViews.AddResources
(
-- * REST Resource
ZoneViewsAddResourcesResource
-- * Creating a Request
, zoneViewsAddResources
, ZoneViewsAddResources
-- * Request Lenses
, zvarResourceView
, zvarProject
, zvarZone
, zvarPayload
) where
import Network.Google.Prelude
import Network.Google.ResourceViews.Types
-- | A resource alias for @resourceviews.zoneViews.addResources@ method which the
-- 'ZoneViewsAddResources' request conforms to.
type ZoneViewsAddResourcesResource =
"resourceviews" :>
"v1beta2" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"resourceViews" :>
Capture "resourceView" Text :>
"addResources" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ZoneViewsAddResourcesRequest :>
Post '[JSON] Operation
-- | Add resources to the view.
--
-- /See:/ 'zoneViewsAddResources' smart constructor.
data ZoneViewsAddResources = ZoneViewsAddResources'
{ _zvarResourceView :: !Text
, _zvarProject :: !Text
, _zvarZone :: !Text
, _zvarPayload :: !ZoneViewsAddResourcesRequest
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ZoneViewsAddResources' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'zvarResourceView'
--
-- * 'zvarProject'
--
-- * 'zvarZone'
--
-- * 'zvarPayload'
zoneViewsAddResources
:: Text -- ^ 'zvarResourceView'
-> Text -- ^ 'zvarProject'
-> Text -- ^ 'zvarZone'
-> ZoneViewsAddResourcesRequest -- ^ 'zvarPayload'
-> ZoneViewsAddResources
zoneViewsAddResources pZvarResourceView_ pZvarProject_ pZvarZone_ pZvarPayload_ =
ZoneViewsAddResources'
{ _zvarResourceView = pZvarResourceView_
, _zvarProject = pZvarProject_
, _zvarZone = pZvarZone_
, _zvarPayload = pZvarPayload_
}
-- | The name of the resource view.
zvarResourceView :: Lens' ZoneViewsAddResources Text
zvarResourceView
= lens _zvarResourceView
(\ s a -> s{_zvarResourceView = a})
-- | The project name of the resource view.
zvarProject :: Lens' ZoneViewsAddResources Text
zvarProject
= lens _zvarProject (\ s a -> s{_zvarProject = a})
-- | The zone name of the resource view.
zvarZone :: Lens' ZoneViewsAddResources Text
zvarZone = lens _zvarZone (\ s a -> s{_zvarZone = a})
-- | Multipart request metadata.
zvarPayload :: Lens' ZoneViewsAddResources ZoneViewsAddResourcesRequest
zvarPayload
= lens _zvarPayload (\ s a -> s{_zvarPayload = a})
instance GoogleRequest ZoneViewsAddResources where
type Rs ZoneViewsAddResources = Operation
type Scopes ZoneViewsAddResources =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/ndev.cloudman"]
requestClient ZoneViewsAddResources'{..}
= go _zvarProject _zvarZone _zvarResourceView
(Just AltJSON)
_zvarPayload
resourceViewsService
where go
= buildClient
(Proxy :: Proxy ZoneViewsAddResourcesResource)
mempty
| rueshyna/gogol | gogol-resourceviews/gen/Network/Google/Resource/ResourceViews/ZoneViews/AddResources.hs | mpl-2.0 | 4,409 | 0 | 18 | 1,066 | 550 | 326 | 224 | 89 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Classroom.Courses.Students.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Adds a user as a student of a course. This method returns the following
-- error codes: * \`PERMISSION_DENIED\` if the requesting user is not
-- permitted to create students in this course or for access errors. *
-- \`NOT_FOUND\` if the requested course ID does not exist. *
-- \`FAILED_PRECONDITION\` if the requested user\'s account is disabled,
-- for the following request errors: * CourseMemberLimitReached *
-- CourseNotModifiable * UserGroupsMembershipLimitReached *
-- \`ALREADY_EXISTS\` if the user is already a student or teacher in the
-- course.
--
-- /See:/ <https://developers.google.com/classroom/ Google Classroom API Reference> for @classroom.courses.students.create@.
module Network.Google.Resource.Classroom.Courses.Students.Create
(
-- * REST Resource
CoursesStudentsCreateResource
-- * Creating a Request
, coursesStudentsCreate
, CoursesStudentsCreate
-- * Request Lenses
, cscXgafv
, cscUploadProtocol
, cscCourseId
, cscAccessToken
, cscUploadType
, cscPayload
, cscEnrollmentCode
, cscCallback
) where
import Network.Google.Classroom.Types
import Network.Google.Prelude
-- | A resource alias for @classroom.courses.students.create@ method which the
-- 'CoursesStudentsCreate' request conforms to.
type CoursesStudentsCreateResource =
"v1" :>
"courses" :>
Capture "courseId" Text :>
"students" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "enrollmentCode" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Student :> Post '[JSON] Student
-- | Adds a user as a student of a course. This method returns the following
-- error codes: * \`PERMISSION_DENIED\` if the requesting user is not
-- permitted to create students in this course or for access errors. *
-- \`NOT_FOUND\` if the requested course ID does not exist. *
-- \`FAILED_PRECONDITION\` if the requested user\'s account is disabled,
-- for the following request errors: * CourseMemberLimitReached *
-- CourseNotModifiable * UserGroupsMembershipLimitReached *
-- \`ALREADY_EXISTS\` if the user is already a student or teacher in the
-- course.
--
-- /See:/ 'coursesStudentsCreate' smart constructor.
data CoursesStudentsCreate =
CoursesStudentsCreate'
{ _cscXgafv :: !(Maybe Xgafv)
, _cscUploadProtocol :: !(Maybe Text)
, _cscCourseId :: !Text
, _cscAccessToken :: !(Maybe Text)
, _cscUploadType :: !(Maybe Text)
, _cscPayload :: !Student
, _cscEnrollmentCode :: !(Maybe Text)
, _cscCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CoursesStudentsCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cscXgafv'
--
-- * 'cscUploadProtocol'
--
-- * 'cscCourseId'
--
-- * 'cscAccessToken'
--
-- * 'cscUploadType'
--
-- * 'cscPayload'
--
-- * 'cscEnrollmentCode'
--
-- * 'cscCallback'
coursesStudentsCreate
:: Text -- ^ 'cscCourseId'
-> Student -- ^ 'cscPayload'
-> CoursesStudentsCreate
coursesStudentsCreate pCscCourseId_ pCscPayload_ =
CoursesStudentsCreate'
{ _cscXgafv = Nothing
, _cscUploadProtocol = Nothing
, _cscCourseId = pCscCourseId_
, _cscAccessToken = Nothing
, _cscUploadType = Nothing
, _cscPayload = pCscPayload_
, _cscEnrollmentCode = Nothing
, _cscCallback = Nothing
}
-- | V1 error format.
cscXgafv :: Lens' CoursesStudentsCreate (Maybe Xgafv)
cscXgafv = lens _cscXgafv (\ s a -> s{_cscXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cscUploadProtocol :: Lens' CoursesStudentsCreate (Maybe Text)
cscUploadProtocol
= lens _cscUploadProtocol
(\ s a -> s{_cscUploadProtocol = a})
-- | Identifier of the course to create the student in. This identifier can
-- be either the Classroom-assigned identifier or an alias.
cscCourseId :: Lens' CoursesStudentsCreate Text
cscCourseId
= lens _cscCourseId (\ s a -> s{_cscCourseId = a})
-- | OAuth access token.
cscAccessToken :: Lens' CoursesStudentsCreate (Maybe Text)
cscAccessToken
= lens _cscAccessToken
(\ s a -> s{_cscAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cscUploadType :: Lens' CoursesStudentsCreate (Maybe Text)
cscUploadType
= lens _cscUploadType
(\ s a -> s{_cscUploadType = a})
-- | Multipart request metadata.
cscPayload :: Lens' CoursesStudentsCreate Student
cscPayload
= lens _cscPayload (\ s a -> s{_cscPayload = a})
-- | Enrollment code of the course to create the student in. This code is
-- required if userId corresponds to the requesting user; it may be omitted
-- if the requesting user has administrative permissions to create students
-- for any user.
cscEnrollmentCode :: Lens' CoursesStudentsCreate (Maybe Text)
cscEnrollmentCode
= lens _cscEnrollmentCode
(\ s a -> s{_cscEnrollmentCode = a})
-- | JSONP
cscCallback :: Lens' CoursesStudentsCreate (Maybe Text)
cscCallback
= lens _cscCallback (\ s a -> s{_cscCallback = a})
instance GoogleRequest CoursesStudentsCreate where
type Rs CoursesStudentsCreate = Student
type Scopes CoursesStudentsCreate =
'["https://www.googleapis.com/auth/classroom.profile.emails",
"https://www.googleapis.com/auth/classroom.profile.photos",
"https://www.googleapis.com/auth/classroom.rosters"]
requestClient CoursesStudentsCreate'{..}
= go _cscCourseId _cscXgafv _cscUploadProtocol
_cscAccessToken
_cscUploadType
_cscEnrollmentCode
_cscCallback
(Just AltJSON)
_cscPayload
classroomService
where go
= buildClient
(Proxy :: Proxy CoursesStudentsCreateResource)
mempty
| brendanhay/gogol | gogol-classroom/gen/Network/Google/Resource/Classroom/Courses/Students/Create.hs | mpl-2.0 | 6,937 | 0 | 19 | 1,529 | 889 | 525 | 364 | 127 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudDebugger.Debugger.Debuggees.Breakpoints.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all breakpoints for the debuggee.
--
-- /See:/ <https://cloud.google.com/debugger Cloud Debugger API Reference> for @clouddebugger.debugger.debuggees.breakpoints.list@.
module Network.Google.Resource.CloudDebugger.Debugger.Debuggees.Breakpoints.List
(
-- * REST Resource
DebuggerDebuggeesBreakpointsListResource
-- * Creating a Request
, debuggerDebuggeesBreakpointsList
, DebuggerDebuggeesBreakpointsList
-- * Request Lenses
, ddblXgafv
, ddblIncludeInactive
, ddblUploadProtocol
, ddblAccessToken
, ddblActionValue
, ddblUploadType
, ddblStripResults
, ddblIncludeAllUsers
, ddblWaitToken
, ddblDebuggeeId
, ddblClientVersion
, ddblCallback
) where
import Network.Google.Debugger.Types
import Network.Google.Prelude
-- | A resource alias for @clouddebugger.debugger.debuggees.breakpoints.list@ method which the
-- 'DebuggerDebuggeesBreakpointsList' request conforms to.
type DebuggerDebuggeesBreakpointsListResource =
"v2" :>
"debugger" :>
"debuggees" :>
Capture "debuggeeId" Text :>
"breakpoints" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "includeInactive" Bool :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "action.value"
DebuggerDebuggeesBreakpointsListActionValue
:>
QueryParam "uploadType" Text :>
QueryParam "stripResults" Bool :>
QueryParam "includeAllUsers" Bool :>
QueryParam "waitToken" Text :>
QueryParam "clientVersion" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListBreakpointsResponse
-- | Lists all breakpoints for the debuggee.
--
-- /See:/ 'debuggerDebuggeesBreakpointsList' smart constructor.
data DebuggerDebuggeesBreakpointsList =
DebuggerDebuggeesBreakpointsList'
{ _ddblXgafv :: !(Maybe Xgafv)
, _ddblIncludeInactive :: !(Maybe Bool)
, _ddblUploadProtocol :: !(Maybe Text)
, _ddblAccessToken :: !(Maybe Text)
, _ddblActionValue :: !(Maybe DebuggerDebuggeesBreakpointsListActionValue)
, _ddblUploadType :: !(Maybe Text)
, _ddblStripResults :: !(Maybe Bool)
, _ddblIncludeAllUsers :: !(Maybe Bool)
, _ddblWaitToken :: !(Maybe Text)
, _ddblDebuggeeId :: !Text
, _ddblClientVersion :: !(Maybe Text)
, _ddblCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DebuggerDebuggeesBreakpointsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ddblXgafv'
--
-- * 'ddblIncludeInactive'
--
-- * 'ddblUploadProtocol'
--
-- * 'ddblAccessToken'
--
-- * 'ddblActionValue'
--
-- * 'ddblUploadType'
--
-- * 'ddblStripResults'
--
-- * 'ddblIncludeAllUsers'
--
-- * 'ddblWaitToken'
--
-- * 'ddblDebuggeeId'
--
-- * 'ddblClientVersion'
--
-- * 'ddblCallback'
debuggerDebuggeesBreakpointsList
:: Text -- ^ 'ddblDebuggeeId'
-> DebuggerDebuggeesBreakpointsList
debuggerDebuggeesBreakpointsList pDdblDebuggeeId_ =
DebuggerDebuggeesBreakpointsList'
{ _ddblXgafv = Nothing
, _ddblIncludeInactive = Nothing
, _ddblUploadProtocol = Nothing
, _ddblAccessToken = Nothing
, _ddblActionValue = Nothing
, _ddblUploadType = Nothing
, _ddblStripResults = Nothing
, _ddblIncludeAllUsers = Nothing
, _ddblWaitToken = Nothing
, _ddblDebuggeeId = pDdblDebuggeeId_
, _ddblClientVersion = Nothing
, _ddblCallback = Nothing
}
-- | V1 error format.
ddblXgafv :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Xgafv)
ddblXgafv
= lens _ddblXgafv (\ s a -> s{_ddblXgafv = a})
-- | When set to \`true\`, the response includes active and inactive
-- breakpoints. Otherwise, it includes only active breakpoints.
ddblIncludeInactive :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Bool)
ddblIncludeInactive
= lens _ddblIncludeInactive
(\ s a -> s{_ddblIncludeInactive = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ddblUploadProtocol :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Text)
ddblUploadProtocol
= lens _ddblUploadProtocol
(\ s a -> s{_ddblUploadProtocol = a})
-- | OAuth access token.
ddblAccessToken :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Text)
ddblAccessToken
= lens _ddblAccessToken
(\ s a -> s{_ddblAccessToken = a})
-- | Only breakpoints with the specified action will pass the filter.
ddblActionValue :: Lens' DebuggerDebuggeesBreakpointsList (Maybe DebuggerDebuggeesBreakpointsListActionValue)
ddblActionValue
= lens _ddblActionValue
(\ s a -> s{_ddblActionValue = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ddblUploadType :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Text)
ddblUploadType
= lens _ddblUploadType
(\ s a -> s{_ddblUploadType = a})
-- | This field is deprecated. The following fields are always stripped out
-- of the result: \`stack_frames\`, \`evaluated_expressions\` and
-- \`variable_table\`.
ddblStripResults :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Bool)
ddblStripResults
= lens _ddblStripResults
(\ s a -> s{_ddblStripResults = a})
-- | When set to \`true\`, the response includes the list of breakpoints set
-- by any user. Otherwise, it includes only breakpoints set by the caller.
ddblIncludeAllUsers :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Bool)
ddblIncludeAllUsers
= lens _ddblIncludeAllUsers
(\ s a -> s{_ddblIncludeAllUsers = a})
-- | A wait token that, if specified, blocks the call until the breakpoints
-- list has changed, or a server selected timeout has expired. The value
-- should be set from the last response. The error code
-- \`google.rpc.Code.ABORTED\` (RPC) is returned on wait timeout, which
-- should be called again with the same \`wait_token\`.
ddblWaitToken :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Text)
ddblWaitToken
= lens _ddblWaitToken
(\ s a -> s{_ddblWaitToken = a})
-- | Required. ID of the debuggee whose breakpoints to list.
ddblDebuggeeId :: Lens' DebuggerDebuggeesBreakpointsList Text
ddblDebuggeeId
= lens _ddblDebuggeeId
(\ s a -> s{_ddblDebuggeeId = a})
-- | Required. The client version making the call. Schema:
-- \`domain\/type\/version\` (e.g., \`google.com\/intellij\/v1\`).
ddblClientVersion :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Text)
ddblClientVersion
= lens _ddblClientVersion
(\ s a -> s{_ddblClientVersion = a})
-- | JSONP
ddblCallback :: Lens' DebuggerDebuggeesBreakpointsList (Maybe Text)
ddblCallback
= lens _ddblCallback (\ s a -> s{_ddblCallback = a})
instance GoogleRequest
DebuggerDebuggeesBreakpointsList
where
type Rs DebuggerDebuggeesBreakpointsList =
ListBreakpointsResponse
type Scopes DebuggerDebuggeesBreakpointsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud_debugger"]
requestClient DebuggerDebuggeesBreakpointsList'{..}
= go _ddblDebuggeeId _ddblXgafv _ddblIncludeInactive
_ddblUploadProtocol
_ddblAccessToken
_ddblActionValue
_ddblUploadType
_ddblStripResults
_ddblIncludeAllUsers
_ddblWaitToken
_ddblClientVersion
_ddblCallback
(Just AltJSON)
debuggerService
where go
= buildClient
(Proxy ::
Proxy DebuggerDebuggeesBreakpointsListResource)
mempty
| brendanhay/gogol | gogol-debugger/gen/Network/Google/Resource/CloudDebugger/Debugger/Debuggees/Breakpoints/List.hs | mpl-2.0 | 8,796 | 0 | 24 | 2,050 | 1,200 | 694 | 506 | 179 | 1 |
module BOM where
import Control.Applicative
import Control.Monad.State
import Control.Monad.RWS
import Control.Monad.Logic
import Data.Function
import Data.List
import Data.Monoid
import Data.Ord
import qualified Data.Map as M
bom =
[ (1, pcb)
, (1, usbConnector)
, (1, usbEsdModule)
, (1, shieldDecouplingCapacitor)
, (1, shieldDecouplingResistor)
, (1, regulator)
, (1, regulatorInputCapacitor)
, (1, regulatorOutputCapacitor)
]
pcb = oshParkPCB (0.4 * 0.7) "USB Mini-B connector DIP module"
usbConnector = basicPart mouser "10033526-N3212LF"
[ (1, 0.58)
, (10, 0.483)
, (100, 0.422)
, (500, 0.362)
]
usbEsdModule = basicPart mouser "TPD2E001IDRLRQ1"
[ (1, 0.42)
, (10, 0.351)
, (25, 0.312)
, (100, 0.211)
, (250, 0.207)
, (500, 0.203)
, (750, 0.199)
]
shieldDecouplingCapacitor = basicPart mouser "C0402C472K5RACTU"
[ (1, 0.10)
, (10, 0.01)
, (100, 0.008)
, (1000, 0.006)
, (1000, 0.005)
]
shieldDecouplingResistor = basicPart mouser "CRCW04021M00FKED"
[ (1, 0.08)
, (10, 0.044)
, (100, 0.021)
, (1000, 0.015)
]
regulator = basicPart mouser "AAT3220IGY-3.3-T1"
[ (1, 0.17)
, (100, 0.16)
, (500, 0.15)
, (1000, 0.14)
]
regulatorInputCapacitor = basicPart mouser "GRM155R71C104KA88D"
[ (1, 0.10)
, (10, 0.01)
, (100, 0.006)
, (1000, 0.005)
, (2500, 0.004)
]
regulatorOutputCapacitor = basicPart mouser "GRM155R60J105KE19D"
[ (1, 0.18)
, (10, 0.022)
, (100, 0.013)
, (1000, 0.01)
, (2500, 0.009)
]
---------------------------------------
data Supplier = Supplier
{ supplierName :: String
, shipping :: Double -- TODO: [(Integer, Part)] -> Double
} deriving (Eq, Ord, Read, Show)
mouser = Supplier "Mouser" 4.99
oshPark = Supplier "OSH Park" 0
digikey = Supplier "Digikey" 5.47
newark = Supplier "Newark" 8.50
oshParkPCB sz boardName =
[ Part
{ supplier = oshPark
, partNo = boardName
, minimumQty = 3
, increment = 3
, price = sz * 5 / 3
}
, Part
{ supplier = oshPark
, partNo = boardName
, minimumQty = 10 * ceiling (15 / sz)
, increment = 10
, price = sz
}
]
data Part = Part
{ supplier :: Supplier
, partNo :: String
, minimumQty :: Integer
, increment :: Integer
, price :: Double
} deriving (Eq, Ord, Read, Show)
basicPart supp num breaks =
[ Part supp num moq 1 p
| (moq, p) <- breaks
]
---------------------------------------
unitCost withShipping bom qty = cost / fromIntegral qty
where
cost | withShipping = pCost + sCost
| otherwise = pCost
(_, pCost, sCost) = selectBOM bom qty
selectParts bom = (\(a,_,_) -> a ) . selectBOM bom
orderCost bom = (\(_,b,c) -> b + c) . selectBOM bom
partsCost bom = (\(_,b,_) -> b ) . selectBOM bom
shippingCost bom = (\(_,_,c) -> c) . selectBOM bom
selectBOM parts qty = (bom, sum partCosts, shippingCost)
where
totalCost ((_, x), Sum y) = sum x + y
((bom, partCosts), Sum shippingCost) = minimumBy (comparing totalCost) $
map (\(a,b) -> (unzip a, b)) $
observeAll $
(\x -> evalRWST x () M.empty) $
flip mapM parts $ \(count, part) -> do
selectPart part (count * qty)
selectPart parts qty = do
let suppliers = nub (map supplier parts)
selected <- map supplierName <$> filterM selectSupplier suppliers
let selectedParts = filter (flip elem selected . supplierName . supplier) parts
if null selectedParts
then empty
else pure (selectPart' selectedParts qty)
selectPart' parts qty = ((actualQty part, part), extendedPrice part)
where
part = minimumBy cmpParts parts
extras part = max 0 (qty - minimumQty part)
increments part = ceiling (fromIntegral (extras part) / fromIntegral (increment part))
actualQty part = minimumQty part + increments part * increment part
extendedPrice part = price part * fromIntegral (actualQty part)
-- minimize price, break ties by maximizing qty
cmpParts = mconcat
[ comparing extendedPrice
, flip (comparing actualQty)
]
-- nondeterministically accept/reject each supplier,
-- remembering the choice and (if accepting) tallying
-- the shipping cost
selectSupplier s = do
mbPrev <- gets (M.lookup (supplierName s))
case mbPrev of
Just prev -> return prev
Nothing -> do
accept <- pure True <|> pure False
modify (M.insert (supplierName s) accept)
when accept (tell (Sum (shipping s)))
return accept
| mokus0/schematics | usb-dip-module/BOM.hs | unlicense | 5,028 | 0 | 18 | 1,619 | 1,609 | 901 | 708 | 133 | 2 |
f a b =
let i = (a*10) `div` b
o = (a*10) `mod` b
in
i:(f o b)
ans (a:b:n:_) =
sum $ take n $ f (a`mod`b) b
main = do
c <- getContents
let i = map (map read) $ map words $ lines c
o = map ans i
mapM_ print o
| a143753/AOJ | 0054.hs | apache-2.0 | 238 | 0 | 14 | 86 | 175 | 88 | 87 | 11 | 1 |
module Twitter where
-- Twitter stuff
import Control.Monad
import Data.Aeson
import GHC.Generics
import Data.ByteString
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as BL
import qualified Network.HTTP.Base as HTTP
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import Network.HTTP.Types
import Web.Authenticate.OAuth
data Config = Config {
apiKey :: String,
apiSecret :: String,
userKey :: String,
userSecret :: String
} deriving (Show, Generic)
instance FromJSON Config
instance ToJSON Config
configFromFile :: FilePath -> IO (Either String Config)
configFromFile path = do
contents <- BL.readFile path
return $ eitherDecode contents
oauthTwitter :: ByteString -> ByteString -> OAuth
oauthTwitter key secret =
newOAuth { oauthServerName = "twitter"
, oauthRequestUri = "https://api.twitter.com/oauth/request_token"
, oauthAccessTokenUri = "https://api.twitter.com/oauth/access_token"
, oauthAuthorizeUri = "https://api.twitter.com/oauth/authorize"
, oauthSignatureMethod = HMACSHA1
, oauthConsumerKey = key
, oauthConsumerSecret = secret
, oauthVersion = OAuth10a
}
signWithConfig :: Config -> Request -> IO Request
signWithConfig Config{..} = signOAuth
(oauthTwitter (B.pack apiKey) (B.pack apiSecret))
(newCredential (B.pack userKey) (B.pack userSecret))
tweet :: Config -> String -> IO (Response BL.ByteString)
tweet config status = do
url <- parseUrl $ "https://api.twitter.com/1.1/statuses/update.json?status=" ++ HTTP.urlEncode status
req <- signWithConfig config url{ method = B.pack "POST" }
manager <- newManager tlsManagerSettings
httpLbs req manager
| daherb/GF-loves-food | Twitter.hs | artistic-2.0 | 1,878 | 0 | 12 | 458 | 438 | 241 | 197 | -1 | -1 |
-- 210
import Euler(toDigitsBase)
nn = 6
calcProd n = product $ map (champDigits !!) $ take n $ iterate (10*) 1
where champDigits = concatMap (toDigitsBase 10) [0..]
main = putStrLn $ show $ calcProd nn
| higgsd/euler | hs/40.hs | bsd-2-clause | 210 | 0 | 9 | 44 | 91 | 48 | 43 | 5 | 1 |
{-# LANGUAGE UnicodeSyntax #-}
import qualified Data.Foldable as Fold
import Data.List (sort)
import qualified Data.Sequence as Seq
import Options.Applicative (fullDesc,progDesc)
import LogicGrowsOnTrees.Parallel.Main
import LogicGrowsOnTrees.Parallel.Adapter.Threads
import LogicGrowsOnTrees.Examples.Queens
main :: IO ()
main =
mainForExploreTree
driver
board_size_parser
(fullDesc <> progDesc "print all the n-queens solutions for a given board size")
(\_ (RunOutcome _ termination_reason) → do
case termination_reason of
Aborted _ → error "search aborted"
Completed solutions → Fold.mapM_ print . Seq.unstableSort $ solutions
Failure _ message → error $ "error: " ++ message
)
(fmap (Seq.singleton . sort) . nqueensSolutions)
| gcross/LogicGrowsOnTrees | LogicGrowsOnTrees/examples/print-all-nqueens-solutions.hs | bsd-2-clause | 855 | 0 | 16 | 206 | 195 | 106 | 89 | 20 | 3 |
module Language.Drasil.Code.DataDesc where
import Language.Drasil.Chunk.Code (CodeVarChunk)
import Data.List (nub)
type DataDesc = [Data]
type DataItem = CodeVarChunk
type Delim = Char -- delimiter
data Data = Singleton DataItem
| JunkData
| Line LinePattern Delim
| Lines LinePattern (Maybe Integer) Delim -- multi-line data
-- (Maybe Int) = number of lines, Nothing = unknown so go to end of file
data LinePattern = Straight [DataItem] -- line of data with no pattern
| Repeat [DataItem] -- line of data with repeated pattern
singleton :: DataItem -> Data
singleton = Singleton
junkLine :: Data
junkLine = JunkData
singleLine :: LinePattern -> Delim -> Data
singleLine = Line
multiLine :: LinePattern -> Delim -> Data
multiLine l = Lines l Nothing
multiLine' :: LinePattern -> Integer -> Delim -> Data
multiLine' l i = Lines l (Just i)
straight :: [DataItem] -> LinePattern
straight = Straight
repeated :: [DataItem] -> LinePattern
repeated = Repeat
isJunk :: Data -> Bool
isJunk JunkData = True
isJunk _ = False
isLine :: Data -> Bool
isLine Line{} = True
isLine _ = False
isLines :: Data -> Bool
isLines Lines{} = True
isLines _ = False
getInputs :: DataDesc -> [DataItem]
getInputs d = nub $ concatMap getDataInputs d
getDataInputs :: Data -> [DataItem]
getDataInputs (Singleton v) = [v]
getDataInputs (Line lp _) = getPatternInputs lp
getDataInputs (Lines lp _ _) = getPatternInputs lp
getDataInputs JunkData = []
getPatternInputs :: LinePattern -> [DataItem]
getPatternInputs (Straight vs) = vs
getPatternInputs (Repeat vs) = vs | JacquesCarette/literate-scientific-software | code/drasil-code/Language/Drasil/Code/DataDesc.hs | bsd-2-clause | 1,685 | 0 | 8 | 396 | 481 | 265 | 216 | 45 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE UnicodeSyntax #-}
{-|
[@ISO639-1@] zh
[@ISO639-2B@] chi
[@ISO639-2T@] zho
[@ISO639-3@] cmn
[@Native name@] 官話
[@English name@] Chinese
-}
module Text.Numeral.Language.ZHO
( -- * Language entries
trad_entry
, simpl_entry
, finance_trad_entry
, finance_simpl_entry
, pinyin_entry
-- * Conversions
, trad_cardinal
, simpl_cardinal
, finance_trad_cardinal
, finance_simpl_cardinal
, pinyin_cardinal
-- * Structure
, struct
-- * Bounds
, bounds
) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "base" Data.Bool ( otherwise )
import "base" Data.Function ( id, const, fix, flip, ($) )
import "base" Data.Maybe ( Maybe(Just) )
import "base" Data.Ord ( (<) )
import "base" Prelude ( Num, Integral, fromIntegral, (-), div, divMod, negate )
import "base-unicode-symbols" Data.Eq.Unicode ( (≡) )
import "base-unicode-symbols" Data.Function.Unicode ( (∘) )
import "base-unicode-symbols" Data.Monoid.Unicode ( (⊕) )
import "base-unicode-symbols" Data.Ord.Unicode ( (≥) )
import qualified "containers" Data.Map as M ( Map, fromList, lookup )
import "containers-unicode-symbols" Data.Map.Unicode ( (∪) )
import "this" Text.Numeral
import qualified "this" Text.Numeral.Exp as E
import "this" Text.Numeral.Misc ( dec )
import "this" Text.Numeral.Entry
import "text" Data.Text ( Text )
--------------------------------------------------------------------------------
-- ZHO
--------------------------------------------------------------------------------
entry ∷ Entry
entry = emptyEntry
{ entIso639_1 = Just "zh"
, entIso639_2 = ["chi", "zho"]
, entIso639_3 = Just "cmn"
, entNativeNames = ["官話"]
, entEnglishName = Just "Chinese"
}
flipIfR ∷ Side → (α → α → α) → (α → α → α)
flipIfR L = id
flipIfR R = flip
add0 ∷ (Integral α, E.Lit β, E.Add β) ⇒ α → Rule α β
add0 val f n | n < val `div` 10 = E.lit 0 `E.add` f n
| otherwise = f n
mulX ∷ (Integral α, E.Lit β, E.Add β, E.Mul β)
⇒ α → Side → Side → Rule α β
mulX val aSide mSide =
\f n → let (m, a) = n `divMod` val
mval = if m ≡ 1
then E.lit 1 ⊡ E.lit (fromIntegral val)
else f m ⊡ E.lit (fromIntegral val)
in if a ≡ 0
then mval
else (flipIfR aSide E.add) (add0 val f a) mval
where
(⊡) = flipIfR mSide E.mul
struct ∷ (Integral α, E.Unknown β, E.Lit β, E.Neg β, E.Add β, E.Mul β) ⇒ α → β
struct = pos
$ fix
$ findRule (0, lit)
( [(dec 1, step (dec 1) (dec 1) R L)]
⊕ [(dec n, stepX (dec n) (dec 1) R L) | n ← [2,3]]
⊕ [(dec n, stepX (dec n) (dec 4) R L) | n ← [4,8..44]]
)
(dec 48 - 1)
where
stepX = mkStep lit1 addX mulX
addX val _ = \f n → E.add (f val) (add0 val f $ n - val)
bounds ∷ (Integral α) ⇒ (α, α)
bounds = let x = dec 48 - 1 in (negate x, x)
cardinalRepr ∷ Repr i
cardinalRepr = defaultRepr
{ reprAdd = Just $ \_ _ _ → ""
, reprMul = Just $ \_ _ _ → ""
}
--------------------------------------------------------------------------------
-- Traditional Characters
--------------------------------------------------------------------------------
trad_entry ∷ Entry
trad_entry = entry
{ entVariant = Just "traditional"
, entCardinal = Just Conversion
{ toNumeral = trad_cardinal
, toStructure = struct
}
}
trad_cardinal ∷ (Integral α) ⇒ i → α → Maybe Text
trad_cardinal inf = trad_cardinalRepr inf ∘ struct
trad_cardinalRepr ∷ i → Exp i → Maybe Text
trad_cardinalRepr =
render cardinalRepr
{ reprValue = \_ n → M.lookup n trad_syms
, reprNeg = Just $ \_ _ → "負"
}
trad_syms ∷ (Integral α) ⇒ M.Map α (Ctx (Exp i) → Text)
trad_syms =
M.fromList
[ (0, \c → case c of
CtxEmpty → "零"
_ → "〇"
)
, (1, const "一")
, (2, \c → case c of
CtxMul _ (Lit n) _ | n ≥ 1000 → "兩"
_ → "二"
)
, (3, const "三")
, (4, const "四")
, (5, const "五")
, (6, const "六")
, (7, const "七")
, (8, const "八")
, (9, const "九")
, (10, const "十")
, (100, const "百")
, (1000, const "千")
, (dec 4, const "萬")
, (dec 8, const "億")
, (dec 12, const "兆")
, (dec 16, const "京")
, (dec 20, const "垓")
, (dec 24, const "秭")
, (dec 28, const "穰")
, (dec 32, const "溝")
, (dec 36, const "澗")
, (dec 40, const "正")
, (dec 44, const "載")
]
--------------------------------------------------------------------------------
-- Simplified Characters
--------------------------------------------------------------------------------
simpl_entry ∷ Entry
simpl_entry = entry
{ entVariant = Just "simplified"
, entCardinal = Just Conversion
{ toNumeral = simpl_cardinal
, toStructure = struct
}
}
simpl_cardinal ∷ (Integral α) ⇒ i → α → Maybe Text
simpl_cardinal inf = simpl_cardinalRepr inf ∘ struct
simpl_cardinalRepr ∷ i → Exp i → Maybe Text
simpl_cardinalRepr =
render cardinalRepr
{ reprValue = \_ n → M.lookup n (simpl_syms ∪ trad_syms)
, reprNeg = Just $ \_ _ → "负"
}
simpl_syms ∷ (Integral α) ⇒ M.Map α (Ctx (Exp i) → Text)
simpl_syms =
M.fromList
[ (2, \c → case c of
CtxMul _ (Lit n) _ | n ≥ 1000 → "两"
_ → "二"
)
, (dec 4, const "万")
, (dec 8, const "亿")
]
--------------------------------------------------------------------------------
-- Financial Characters (Traditional)
--------------------------------------------------------------------------------
finance_trad_entry ∷ Entry
finance_trad_entry = entry
{ entVariant = Just "finance traditional"
, entCardinal = Just Conversion
{ toNumeral = finance_trad_cardinal
, toStructure = struct
}
}
finance_trad_cardinal ∷ (Integral α) ⇒ i → α → Maybe Text
finance_trad_cardinal inf = finance_trad_cardinalRepr inf ∘ struct
finance_trad_cardinalRepr ∷ i → Exp i → Maybe Text
finance_trad_cardinalRepr =
render cardinalRepr
{ reprValue = \_ n → M.lookup n (finance_trad_syms ∪ trad_syms)
, reprNeg = Just $ \_ _ → "負"
}
finance_trad_syms ∷ (Integral α) ⇒ M.Map α (Ctx (Exp i) → Text)
finance_trad_syms =
M.fromList
[ (0, const "零")
, (1, const "壹")
, (2, const "貳")
, (3, const "参")
, (4, const "肆")
, (5, const "伍")
, (6, const "陸")
, (7, const "柒")
, (8, const "捌")
, (9, const "玖")
, (10, const "拾")
, (100, const "伯")
, (1000, const "仟")
, (dec 4, const "萬")
, (dec 8, const "億")
]
--------------------------------------------------------------------------------
-- Financial Characters (Simplified)
--------------------------------------------------------------------------------
finance_simpl_entry ∷ Entry
finance_simpl_entry = entry
{ entVariant = Just "finance simplified"
, entCardinal = Just Conversion
{ toNumeral = finance_simpl_cardinal
, toStructure = struct
}
}
finance_simpl_cardinal ∷ (Integral α) ⇒ i → α → Maybe Text
finance_simpl_cardinal inf = finance_simpl_cardinalRepr inf ∘ struct
finance_simpl_cardinalRepr ∷ i → Exp i → Maybe Text
finance_simpl_cardinalRepr =
render cardinalRepr
{ reprValue = \_ n → M.lookup n ( finance_simpl_syms
∪ finance_trad_syms
∪ trad_syms
)
, reprNeg = Just $ \_ _ → "负"
}
where
finance_simpl_syms ∷ (Integral α) ⇒ M.Map α (Ctx (Exp i) → Text)
finance_simpl_syms =
M.fromList
[ (2, const "贰")
, (6, const "陆")
, (dec 4, const "万")
, (dec 8, const "亿")
]
--------------------------------------------------------------------------------
-- Pinyin
--------------------------------------------------------------------------------
pinyin_entry ∷ Entry
pinyin_entry = entry
{ entVariant = Just "pinyin"
, entCardinal = Just Conversion
{ toNumeral = pinyin_cardinal
, toStructure = struct
}
}
pinyin_cardinal ∷ (Integral α) ⇒ i → α → Maybe Text
pinyin_cardinal inf = pinyin_cardinalRepr inf ∘ struct
pinyin_cardinalRepr ∷ i → Exp i → Maybe Text
pinyin_cardinalRepr =
render cardinalRepr
{ reprValue = \_ n → M.lookup n pinyin_syms
, reprNeg = Just $ \_ _ → "fù"
, reprAdd = Just (⊞)
}
where
(Lit 10 ⊞ _) _ = ""
(_ ⊞ _) _ = " "
pinyin_syms ∷ (Integral α) ⇒ M.Map α (Ctx (Exp i) → Text)
pinyin_syms =
M.fromList
[ (0, const "líng")
, (1, const "yī")
, (2, \c → case c of
CtxMul _ (Lit n) _ | n ≥ 1000 → "liǎng"
_ → "èr"
)
, (3, const "sān")
, (4, const "sì")
, (5, const "wǔ")
, (6, const "liù")
, (7, const "qī")
, (8, const "bā")
, (9, const "jiǔ")
, (10, const "shí")
, (100, const "bǎi")
, (1000, const "qiān")
, (dec 4, const "wàn")
, (dec 8, const "yì")
, (dec 12, const "zhào")
, (dec 16, const "jīng")
, (dec 20, const "gāi")
, (dec 24, const "zǐ")
, (dec 28, const "ráng")
, (dec 32, const "gōu")
, (dec 36, const "jiàn")
, (dec 40, const "zhēng")
, (dec 44, const "zài")
]
| telser/numerals | src/Text/Numeral/Language/ZHO.hs | bsd-3-clause | 10,686 | 52 | 15 | 3,294 | 3,255 | 1,801 | 1,454 | 237 | 3 |
{-# LANGUAGE FlexibleInstances,
UndecidableInstances,
ScopedTypeVariables,
OverlappingInstances #-}
module LameTetris.Game where
import Prelude
import qualified Prelude as P
import System.Random
import Data.Maybe (catMaybes, isJust)
import Data.List (foldl', sort, findIndex)
import Control.Monad (when)
import Control.Monad.RWS.Strict
import Data.Array.Repa
import qualified Data.Array.Repa as R
import Data.Array.Repa.Repr.Vector
import Graphics.UI.SDL
import qualified Graphics.UI.SDL as SDL
import Graphics.UI.SDL.TTF
import LameTetris.Types
import qualified LameTetris.Types as LT
import LameTetris.Video
import LameTetris.Timer
import LameTetris.Utils
-- Random stuff
-- Add an instance to random so that I can make random BlockTypes
instance (Enum a, Bounded a) => Random a where
randomR (mn, mx) g = let fstToEnum (a, g) = (toEnum a, g)
in fstToEnum $ randomR (fromEnum mn, fromEnum mx) g
random = randomR (minBound :: a, maxBound :: a)
-- | Grab a random block
spawnBlock' :: IO Block
spawnBlock' = do btype <- liftIO $ randomRIO (minBound, maxBound)
return $ Block { position = (4, 0)
, guts = (getGuts btype)
}
-- | for use inside Game monad
spawnBlock :: Game Block
spawnBlock = liftIO spawnBlock'
-- | Empty board
startBoard :: Board
-- startBoard = fromListVector (R.Z :. 10 :. 20) $ replicate 200 Nothing
startBoard = fromListVector (R.Z :. 10 :. 20) $ replicate 200 Nothing
-- | Initial game state
initialGS :: Font -> IO GameState
initialGS fnt = do time <- initialTimer
linetxt <- renderTextSolid fnt "0" textColor
frst <- spawnBlock'
next <- spawnBlock'
return $ GameState { lineNum = 0
, timer = time
, currentPiece = frst
, nextPiece = next { position = (12, 8) }
, board = startBoard
, linesText = linetxt
, gameOver = False
}
-- | Handle events, then draw the game, then do it again (unless you quit)
gameLoop :: Game ()
gameLoop = do
event <- handlePoll
over <- gets gameOver
when (event /= Quit && not over) $
do brd <- gets board
drawBoard brd
handleInterval
waitUntilNextFrame
gameLoop
-- | Checks if the interval has been reached, if so, move down!
handleInterval :: Game ()
handleInterval = do
time <- gets timer
now <- liftIO $ getTicks
let lastDropDelta = now - (tsld time)
interv = interval time
when (lastDropDelta >= interv) $ do ifM currentPieceShouldBeSet -- condition
setPieceAndStartAnew -- then
(movePiece 0 1) -- else
linum <- gets lineNum
let newInt = calculateInterval linum
setTimer $ time { interval = newInt
, tsld = now
}
-- | Polls and polls until it can't poll no more!
handlePoll :: Game Event
handlePoll = do
event <- liftIO pollEvent
case event of Quit -> return Quit
NoEvent -> return NoEvent
_ -> handleEvent event >> handlePoll
-- | In any event, this is what you do:
handleEvent :: Event -> Game ()
handleEvent (KeyUp (Keysym SDLK_LEFT _ _)) = movePiece (-1) 0
handleEvent (KeyUp (Keysym SDLK_RIGHT _ _)) = movePiece 1 0
handleEvent (KeyUp (Keysym SDLK_DOWN _ _)) = movePiece 0 1
handleEvent (KeyUp (Keysym SDLK_UP _ _)) = rotateCurrent
handleEvent _ = return ()
-- | Checks if two grids collide
collide :: Grid -> Grid -> Bool
collide g1 g2 = R.foldAllS (||) False $ R.zipWith (bothJust) g1 g2
-- | Takes a point (position current), a source (guts current),
-- and a destination (board), and
combineAt :: Point -> Grid -> Grid -> Grid
combineAt (x,y) src dest = computeVectorS $ traverse dest id insertCell
where
(R.Z :. w :. h) = extent src
insertCell lookupDest spot@(R.Z :. destX :. destY) =
if any id [ destX < x
, destX >= x + w
, destY < y
, destY >= y + h
]
then lookupDest spot -- No changes here, keep moving...
else case src ! (R.Z :. destX -x :. destY - y) of
Nothing -> lookupDest spot -- Nothing new, keep moving...
occupiedCell -> occupiedCell -- There's something to add, so use that!
-- | Checks if there are any full rows on the board and removes them
removeFullRowsFromBoard :: Game ()
removeFullRowsFromBoard = do
brd <- gets board
let fullRows = scanFullRows brd
when (not $ null fullRows) $ do addRowsToCount $ length fullRows
setBoard $ removeRows brd fullRows
-- | Takes a board and a list of y-indexed rows, then returns a board
-- without those rows (padded with Nothings on top)
removeRows :: Board -> [Int] -> Board
removeRows brd rows = computeVectorS $ traverse brd id determineCell
where
numToDrop = length rows
sh = extent brd
determineCell lookupBrd spot@(R.Z :. x :. y)
| y < numToDrop = Nothing
| otherwise =
case findIndex (== y) $ reverse $ sort (y:rows) of
Nothing -> error "what is this I don't even... I JUST PUT THAT THERE"
-- this is for cases when the y index of the cell is also a row
-- that was just removed :(
Just offset -> let offset' = if (y `elem` rows) then offset + 1 else offset
y' = properY (y - offset')
in lookupBrd (R.Z :. x :. y')
properY y
| y `elem` rows = properY (y - 1)
| otherwise = y
-- | Sees if there are any full rows - they mean points!!
-- [] means no rows were full
-- [1, 3] means rows 1 and 3 were full!!!
scanFullRows :: Grid -> [Int]
scanFullRows grid = catMaybes . P.map checkFull $ rows
where
(R.Z :. w :. h) = extent grid
getRow y = computeVectorS $ extract (R.Z :. 0 :. y) (R.Z :. 10 :. 1) grid
range = [0 .. h -1]
-- e.g. rows = [(0, Nothing), (1, Just J) ... ]
rows = P.zip range $ P.map getRow range
checkFull :: (Int, Grid) -> Maybe Int
checkFull (ix, row) = do isFull <- foldAllP (&&) True $ R.map isJust row
case isFull of
False -> Nothing -- Ain't full, please disregard
True -> Just ix -- Full row here, remember the index!
-- | handles change in row count
addRowsToCount :: Int -> Game ()
addRowsToCount ct = do linum <- gets lineNum
linetxt <- gets linesText
fnt <- asks font
let newct = linum + (toEnum ct)
setLineNum newct
liftIO $ freeSurface linetxt
newlinetxt <- liftIO $ renderTextSolid fnt (show newct) textColor
setLinesText newlinetxt
-- | Checks if we should set the current piece into the board and move on
currentPieceShouldBeSet :: Game Bool
currentPieceShouldBeSet = fmap not $ checkCurrentPieceCanMove 0 1
-- | Checks if the current piece could be moved to a location
checkCurrentPieceCanMove :: Int -> Int -> Game Bool
checkCurrentPieceCanMove dx dy = do
current <- gets currentPiece
checkPieceCanMove current dx dy
-- | Checks if the a particular piece could be moved to the location
checkPieceCanMove :: Block -> Int -> Int -> Game Bool
checkPieceCanMove block dx dy = do
brd <- gets board
let (x,y) = position block
currentSlice = guts block
currentsh@(R.Z :. w :. h) = extent currentSlice
-- y+1 gets us the
boardSlice = computeVectorS $ extract (R.Z :. x+dx :. y+dy) currentsh brd
if (y + h) >= boardHeight || (x + dx) < 0 || (x + dx) > (boardWidth - w) -- board edge check
then return False -- Bottom of the board, left side of board, right side
else let doesntCollide = not $ collide currentSlice boardSlice -- board contents check
in return doesntCollide
-- | Board bounds checked move: won'd do anything if it can't move
movePiece :: Int -> Int -> Game ()
movePiece dx dy = do
canMove <- checkCurrentPieceCanMove dx dy
when canMove $ movePiece' dx dy
where
movePiece' dx dy = do
current <- gets currentPiece
let (x,y) = position current
setCurrentPiece $ current { position = ((x + dx), (y + dy)) }
-- | Takes the current piece, slaps it on the board, and starts everything
-- all over again
setPieceAndStartAnew :: Game ()
setPieceAndStartAnew = do
newNextBlock <- spawnBlock
gs <- get
let current = currentPiece gs
nextp = (nextPiece gs) { position = (4, 1) }
newBoard = combineAt (position current) (guts current) (board gs)
put $ gs { currentPiece = nextp
, nextPiece = newNextBlock { position = (12, 8) }
, board = newBoard
}
removeFullRowsFromBoard
canSetNew <- checkPieceCanMove nextp 0 0
when (not canSetNew) $ setGameOver True
rotateCurrent :: Game ()
rotateCurrent = gets currentPiece >>= rotate >>= setCurrentPiece
where
rotate :: Block -> Game Block
rotate block@(Block pos@(px,py) grid) = do
let (R.Z :. oldw :. oldh) = extent grid
w' = oldh -- height becomes width
h' = oldw -- width becomes height
newsh = (R.Z :. w' :. h')
-- make sure that the px and py values fit the piece on the board!
permissibleX x
| x < 0 = 0
| x >= (boardWidth - w') = boardWidth - w'
| otherwise = x
permissibleY y
| y < 0 = 0
| y >= (boardHeight - h') = boardHeight - h'
| otherwise = y
newpos@(newx, newy) = (permissibleX px, permissibleY py)
-- shuffle pieces over to their new positions
translateC (R.Z :. x :. y) = (R.Z :. (oldw - y - 1) :. x)
newgrid = computeVectorS $ backpermute newsh translateC grid
newblock = Block { position = newpos, guts = newgrid }
canMove <- checkPieceCanMove newblock 0 0
return $ if canMove
then newblock
else block -- can't move, keep same
| ppseafield/lametetris | LameTetris/Game.hs | bsd-3-clause | 10,700 | 0 | 17 | 3,616 | 2,812 | 1,455 | 1,357 | 199 | 3 |
{-# LANGUAGE PolyKinds #-}
module ClenshawRounded where
import FFTRounded
import Data.Number.IReal (bsum)
import Data.Number.IReal.Rounded
import Text.Printf
import Data.Bits
import ListNumSyntax
-- Chebyshev points
chebpts :: Floating a => Int -> [a]
chebpts n = [sin (fromIntegral k*(pi/fromIntegral (2*n'))) | k <- [n',n'-2 .. -n']]
where n' = bit n :: Int
-- Clenshaw-Curtis weights following Waldvogel
weights :: (RealFloat a) => Int -> [a]
weights 0 = [2]
weights n = w/2 : init ws ++ rs
where w : ws = dct (map f [0..n'])
rs = reverse (w/2 : ws)
n' = bit (n-1) :: Int
f k = 2/fromIntegral n'/(1 - 4 * fromIntegral k^2)
cpss :: Floating a => [[a]]
cpss = [chebpts n | n <- [0..]]
wss :: (RealFloat a) => [[a]]
wss = [weights n | n <- [0..]]
-- General quadrature formula
quad f xs ws = bsum (map f xs * ws)
cctest f = do let is = zipWith (quad f) cpss wss
diffs = is - tail is
ns = map (+1) (iterate (*2) 1)
row n d = do printf "%5d " (n :: Integer)
d ?? 10
printf "%5s %s\n" "n" "cc(n) - cc(2*n-1)"
sequence_ (take 9 (zipWith row ns diffs))
{-
cctest f computes Clenshaw-Curtis quadrature of f for 2, 3, 5, 9 .. 257, 513 points and
tabulates the differences between successive values with ten significant digits.
Example:
> let f x = 1/sqrt pi*exp(-x^2)
> cctest f
n cc(n) - cc(2*n-1)
2 -0.4755144465
3 0.4658494488e-1
5 1.3360200833e-3
9 0.1859662191e-6
17 0.2786578649e-13
33 0.5387431708e-28
65 0.6451787408e-61
129 0.4623338430e-135
257 0.2823741182e-301
(2.42 secs, 1912637760 bytes)
This indicates geometric convergence. We can use this (or, of course, well-known error bounds
for Clenshaw-Curtis with analytic integrands) to argue that 128 points are enough to compute the integral
of f over [-1,1] to 100 decimals:
> quad f (cpss!!7) (wss!!7)?100
0.8427007929497148693412206350826092592960669979663029084599378978347172540960108412619833253481448885
(0.72 secs, 649544664 bytes)
As another example, we can see the slow convergence when a low-order derivative of the integrand is
discontinuous:
> cctest ((^3) . abs . (+0.5))
n cc(n) - cc(2*n-1)
2 0.5416666667e1
3 0.5719095842e-1
5 -0.5061390681e-2
9 -0.3745316495e-4
17 -0.8422334880e-5
33 -0.3307784812e-6
65 -0.2654695584e-7
129 -1.4743605157e-9
257 -0.9789596620e-10
(0.48 secs, 239041584 bytes)
-}
| sydow/ireal | applications/ClenshawRounded.hs | bsd-3-clause | 2,543 | 0 | 13 | 643 | 538 | 286 | 252 | 30 | 1 |
{-# LANGUAGE ViewPatterns
, RecordWildCards
#-}
module System.FSWatch.Slave ( createWatchProcess
, createWatchProcessWithListener
, createWatchProcessWL
, watch
, stop
, getNotifies
, waitNotifies
) where
import Control.Concurrent
import Control.Monad
import Control.Monad.IO.Class (MonadIO(..))
import System.IO
import System.Process
import System.FSWatch.Repr (PE, Listener, WatchProcess(..))
createWatchProcess :: (MonadIO m) => String -> Int -> m WatchProcess
createWatchProcess wPath dbi = createWatchProcessWL wPath dbi Nothing
createWatchProcessWithListener :: (MonadIO m) => String -> Int -> Listener -> m WatchProcess
createWatchProcessWithListener wPath dbi listener =createWatchProcessWL wPath dbi (Just listener)
createWatchProcessWL :: (MonadIO m) => String -> Int -> Maybe Listener -> m WatchProcess
createWatchProcessWL wPath dbi listener = liftIO $ do
(Just wStdin, Just wStdout, _, wProcessHandle)
<- createProcess (proc wPath ["--slave", "--delayed-buffering", show dbi]) { std_in = CreatePipe, std_out = CreatePipe }
hSetBuffering wStdin NoBuffering
hSetBuffering wStdout NoBuffering
hSetNewlineMode wStdin (NewlineMode LF LF)
hSetNewlineMode wStdout (NewlineMode LF LF)
wNotifyMVar <- newEmptyMVar
wPollerThreadId <- forkIO $ void $ forever $ do
line <- hGetLine wStdout
let recs = read line
case listener of
(Just lsnr) -> forM_ recs lsnr
_ -> return ()
ns <- tryTakeMVar wNotifyMVar
case ns of
(Just ns') -> putMVar wNotifyMVar (ns' ++ recs)
Nothing -> putMVar wNotifyMVar recs
let wShutdown = do
killThread wPollerThreadId
terminateProcess wProcessHandle
return WatchProcess {..}
watch :: (MonadIO m) => WatchProcess -> FilePath -> m ()
watch (WatchProcess {..}) fn = void $ liftIO $ do
hPutStrLn wStdin ("watch " ++ fn)
stop :: (MonadIO m) => WatchProcess -> FilePath -> m ()
stop (WatchProcess {..}) fn = void $ liftIO $ do
hPutStrLn wStdin ("stop " ++ fn)
getNotifies :: WatchProcess -> IO [PE]
getNotifies (WatchProcess {..}) = do
jpes <- tryTakeMVar wNotifyMVar
case jpes of
(Just pes) -> return pes
_ -> return []
waitNotifies :: WatchProcess -> IO [PE]
waitNotifies (WatchProcess {..}) = do
takeMVar wNotifyMVar
| kelemzol/watch | src/System/FSWatch/Slave.hs | bsd-3-clause | 2,565 | 0 | 17 | 723 | 760 | 383 | 377 | 57 | 3 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{-# OPTIONS -Wall #-}
module Test.Paraiso.ClarisSimple (
test
)where
import Data.Dynamic
import qualified Language.Paraiso.Generator.Claris as C
import qualified Language.Paraiso.Generator.ClarisTrans as C
import Language.Paraiso.Name (mkName)
import Language.Paraiso.Prelude
import Test.Framework (Test, testGroup)
import Test.Paraiso.Adaptor (testResult)
import qualified Test.Paraiso.Option as Option
import Test.Paraiso.ClarisUtil
import Test.QuickCheck
test :: Test
test = testGroup "simple program generated from Claris" tests
where
tests = if Option.cpp then tests1 else []
tests1 = [
testResult "adder test" $ quickCheckWithResult myArgs testQuiz
]
myArgs = stdArgs{maxSuccess = 10, maxDiscard = 100,maxSize=1000, chatty=False}
data AdderQuiz = AdderQuiz {adderQuizAns :: Int, adderProg :: C.Program, progText :: Text} deriving Show
instance Arbitrary AdderQuiz where
arbitrary =
flip fmap arbitrary $
\(x',y') ->
let [x,y] = map (`mod` 65536) [x', y']
prog = adderProgram x y
in
AdderQuiz {
adderProg = prog,
adderQuizAns = x+y,
progText = C.translate C.sourceFile prog
}
testQuiz :: AdderQuiz -> Bool
testQuiz (AdderQuiz ans prog _) = ans == evaluate prog
adderProgram :: Int -> Int -> C.Program
adderProgram x1 x2 =
C.Program {
C.progName = mkName "simple",
C.topLevel =
[ C.Exclusive C.SourceFile $ C.StmtPrpr $ C.PrprInclude C.Chevron "iostream" ,
C.FuncDef $ (C.function tInt (mkName "main"))
{ C.funcBody= mainBody },
C.FuncDef $ (C.function tInt (mkName "calc"))
{ C.funcArgs = [varX, varY] ,
C.funcBody = calcBody
}
]
}
where
varX = C.Var tInt (mkName "x")
varY = C.Var tInt (mkName "y")
varZ = C.Var tInt (mkName "z")
mainBody =
[C.StmtExpr $ cout << message << endl,
C.StmtReturn $ C.toDyn (0::Int) ]
calcBody =
[C.VarDefSub varZ (C.Imm $ toDyn(0::Int)),
C.StmtExpr $ C.Op2Infix "+=" (C.VarExpr varZ) $
C.Op2Infix "+" (C.VarExpr varX) (C.VarExpr varY),
C.StmtReturn $ (C.VarExpr varZ)
]
cout = C.VarExpr $ C.Var C.UnknownType $ mkName "std::cout"
endl = C.VarExpr $ C.Var C.UnknownType $ mkName "std::endl"
message = C.FuncCallUsr (mkName "calc") [C.toDyn x1, C.toDyn x2]
infixl 1 <<
(<<) = C.Op2Infix "<<"
tInt :: C.TypeRep
tInt = C.typeOf (undefined :: Int)
| drmaruyama/Paraiso | Test/Paraiso/ClarisSimple.hs | bsd-3-clause | 2,667 | 0 | 13 | 750 | 848 | 472 | 376 | 62 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Set1.Challenge04Spec
( spec
) where
import Test.Hspec
import Test.QuickCheck
import Control.Exception (evaluate)
import qualified Challenges.Set1 as S1
import qualified Crypto.Xor as Xor
main :: IO ()
main = hspec spec
result =
"Now that the party is jumping\n"
spec :: Spec
spec = do
describe "Challenge 4" $ do
fileContents <- runIO (readFile "./static/4.txt")
it ("should find the one string that when decrypted says " ++ result) $ do
(Xor.string $ S1.challenge4 $ lines fileContents) `shouldBe` result
| eelcoh/cryptochallenge | test/Set1/Challenge04Spec.hs | bsd-3-clause | 581 | 0 | 17 | 115 | 151 | 83 | 68 | 18 | 1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.ARB.ShaderObjects
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.ARB.ShaderObjects (
-- * Extension Support
glGetARBShaderObjects,
gl_ARB_shader_objects,
-- * Enums
pattern GL_BOOL_ARB,
pattern GL_BOOL_VEC2_ARB,
pattern GL_BOOL_VEC3_ARB,
pattern GL_BOOL_VEC4_ARB,
pattern GL_FLOAT_MAT2_ARB,
pattern GL_FLOAT_MAT3_ARB,
pattern GL_FLOAT_MAT4_ARB,
pattern GL_FLOAT_VEC2_ARB,
pattern GL_FLOAT_VEC3_ARB,
pattern GL_FLOAT_VEC4_ARB,
pattern GL_INT_VEC2_ARB,
pattern GL_INT_VEC3_ARB,
pattern GL_INT_VEC4_ARB,
pattern GL_OBJECT_ACTIVE_UNIFORMS_ARB,
pattern GL_OBJECT_ACTIVE_UNIFORM_MAX_LENGTH_ARB,
pattern GL_OBJECT_ATTACHED_OBJECTS_ARB,
pattern GL_OBJECT_COMPILE_STATUS_ARB,
pattern GL_OBJECT_DELETE_STATUS_ARB,
pattern GL_OBJECT_INFO_LOG_LENGTH_ARB,
pattern GL_OBJECT_LINK_STATUS_ARB,
pattern GL_OBJECT_SHADER_SOURCE_LENGTH_ARB,
pattern GL_OBJECT_SUBTYPE_ARB,
pattern GL_OBJECT_TYPE_ARB,
pattern GL_OBJECT_VALIDATE_STATUS_ARB,
pattern GL_PROGRAM_OBJECT_ARB,
pattern GL_SAMPLER_1D_ARB,
pattern GL_SAMPLER_1D_SHADOW_ARB,
pattern GL_SAMPLER_2D_ARB,
pattern GL_SAMPLER_2D_RECT_ARB,
pattern GL_SAMPLER_2D_RECT_SHADOW_ARB,
pattern GL_SAMPLER_2D_SHADOW_ARB,
pattern GL_SAMPLER_3D_ARB,
pattern GL_SAMPLER_CUBE_ARB,
pattern GL_SHADER_OBJECT_ARB,
-- * Functions
glAttachObjectARB,
glCompileShaderARB,
glCreateProgramObjectARB,
glCreateShaderObjectARB,
glDeleteObjectARB,
glDetachObjectARB,
glGetActiveUniformARB,
glGetAttachedObjectsARB,
glGetHandleARB,
glGetInfoLogARB,
glGetObjectParameterfvARB,
glGetObjectParameterivARB,
glGetShaderSourceARB,
glGetUniformLocationARB,
glGetUniformfvARB,
glGetUniformivARB,
glLinkProgramARB,
glShaderSourceARB,
glUniform1fARB,
glUniform1fvARB,
glUniform1iARB,
glUniform1ivARB,
glUniform2fARB,
glUniform2fvARB,
glUniform2iARB,
glUniform2ivARB,
glUniform3fARB,
glUniform3fvARB,
glUniform3iARB,
glUniform3ivARB,
glUniform4fARB,
glUniform4fvARB,
glUniform4iARB,
glUniform4ivARB,
glUniformMatrix2fvARB,
glUniformMatrix3fvARB,
glUniformMatrix4fvARB,
glUseProgramObjectARB,
glValidateProgramARB
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/ARB/ShaderObjects.hs | bsd-3-clause | 2,639 | 0 | 5 | 347 | 336 | 218 | 118 | 80 | 0 |
import Turbinado.Controller
import App.Models.Posts as PostsModel
home::Controller()
home = do posts <- PostsModel.findAll :: Controller [Posts]
setViewDataValue "posts-titles" $ map (\p -> (Prelude.show(fromJust(_id p)),title p)) posts
return ()
| abuiles/turbinado-blog | App/Controllers/Manage.hs | bsd-3-clause | 271 | 1 | 16 | 52 | 108 | 53 | 55 | 6 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.HPACK.HeaderBlock.Encode (
encodeHeader
, encodeTokenHeader
, encodeString
, encodeS
) where
import Control.Exception (bracket, throwIO)
import qualified Control.Exception as E
import qualified Data.ByteString as BS
import Data.ByteString.Internal (create, memcpy)
import Data.IORef
import Foreign.Marshal.Alloc (mallocBytes, free)
import Foreign.Ptr (minusPtr)
import Network.ByteOrder
import Imports
import Network.HPACK.HeaderBlock.Integer
import Network.HPACK.Huffman
import Network.HPACK.Table
import Network.HPACK.Token
import Network.HPACK.Types
----------------------------------------------------------------
changeTableSize :: DynamicTable -> WriteBuffer -> IO ()
changeTableSize dyntbl wbuf = do
msiz <- needChangeTableSize dyntbl
case msiz of
Keep -> return ()
Change lim -> do
renewDynamicTable lim dyntbl
change wbuf lim
Ignore lim -> do
resetLimitForEncoding dyntbl
change wbuf lim
----------------------------------------------------------------
-- | Converting 'HeaderList' to the HPACK format.
-- This function has overhead of allocating/freeing a temporary buffer.
-- 'BufferOverrun' will be thrown if the temporary buffer is too small.
encodeHeader :: EncodeStrategy
-> Size -- ^ The size of a temporary buffer.
-> DynamicTable
-> HeaderList
-> IO ByteString -- ^ An HPACK format
encodeHeader stgy siz dyntbl hs = encodeHeader' stgy siz dyntbl hs'
where
hs' = map (\(k,v) -> let t = toToken k in (t,v)) hs
-- | Converting 'HeaderList' to the HPACK format.
-- 'BufferOverrun' will be thrown if the temporary buffer is too small.
encodeHeader' :: EncodeStrategy
-> Size -- ^ The size of a temporary buffer.
-> DynamicTable
-> TokenHeaderList
-> IO ByteString -- ^ An HPACK format
encodeHeader' stgy siz dyntbl hs = bracket (mallocBytes siz) free enc
where
enc buf = do
(hs',len) <- encodeTokenHeader buf siz stgy True dyntbl hs
case hs' of
[] -> create len $ \p -> memcpy p buf len
_ -> throwIO BufferOverrun
----------------------------------------------------------------
-- | Converting 'TokenHeaderList' to the HPACK format directly in the buffer.
--
-- When calling this function for a new 'TokenHeaderList',
-- 4th argument must be 'True'.
--
-- The return value is a pair of leftover 'TokenHeaderList' and
-- how many bytes are filled in the buffer.
-- If the leftover is empty, the encoding is finished.
-- Otherwise, this function should be called with it again.
-- 4th argument must be 'False'.
--
-- 4th argument is relating to dynamic table size update.
-- If 'True' and the limit is set by 'setLimitForEncoding',
-- dynamic table size update is generated at the beginning of
-- the HPACK format.
--
encodeTokenHeader :: Buffer
-> BufferSize
-> EncodeStrategy
-> Bool -- ^ 'True' at the first time, 'False' when continued.
-> DynamicTable
-> TokenHeaderList
-> IO (TokenHeaderList, Int) -- ^ Leftover, filled length
encodeTokenHeader buf siz EncodeStrategy{..} first dyntbl hs0 = do
wbuf <- newWriteBuffer buf siz
when first $ changeTableSize dyntbl wbuf
let fa = indexedHeaderField dyntbl wbuf useHuffman
fb = literalHeaderFieldWithIncrementalIndexingIndexedName dyntbl wbuf useHuffman
fc = literalHeaderFieldWithIncrementalIndexingNewName dyntbl wbuf useHuffman
fd = literalHeaderFieldWithoutIndexingIndexedName dyntbl wbuf useHuffman
fe = literalHeaderFieldWithoutIndexingNewName dyntbl wbuf useHuffman
fe' = literalHeaderFieldWithoutIndexingNewName' dyntbl wbuf useHuffman
rev = getRevIndex dyntbl
step0 = case compressionAlgo of
Naive -> naiveStep fe'
Static -> staticStep fa fd fe
Linear -> linearStep rev fa fb fc fd
ref1 <- currentOffset wbuf >>= newIORef
ref2 <- newIORef hs0
loop wbuf ref1 ref2 step0 hs0 `E.catch` \BufferOverrun -> return ()
end <- readIORef ref1
let len = end `minusPtr` buf
hs <- readIORef ref2
return (hs, len)
where
loop wbuf ref1 ref2 step hsx = go hsx
where
go [] = return ()
go ((t,v):hs) = do
_ <- step t v
currentOffset wbuf >>= writeIORef ref1
writeIORef ref2 hs
go hs
----------------------------------------------------------------
naiveStep :: (HeaderName -> HeaderValue -> IO ()) -> Token -> HeaderValue -> IO ()
naiveStep fe t v = fe (tokenFoldedKey t) v
----------------------------------------------------------------
staticStep :: FA -> FD -> FE -> Token -> HeaderValue -> IO ()
staticStep fa fd fe t v = lookupRevIndex' t v fa fd fe
----------------------------------------------------------------
linearStep :: RevIndex -> FA -> FB -> FC -> FD -> Token -> HeaderValue -> IO ()
linearStep rev fa fb fc fd t v = lookupRevIndex t v fa fb fc fd rev
----------------------------------------------------------------
type FA = HIndex -> IO ()
type FB = HeaderValue -> Entry -> HIndex -> IO ()
type FC = HeaderName -> HeaderValue -> Entry -> IO ()
type FD = HeaderValue -> HIndex -> IO ()
type FE = HeaderName -> HeaderValue -> IO ()
-- 6.1. Indexed Header Field Representation
-- Indexed Header Field
indexedHeaderField
:: DynamicTable -> WriteBuffer -> Bool -> FA
indexedHeaderField dyntbl wbuf _ hidx =
fromHIndexToIndex dyntbl hidx >>= index wbuf
-- 6.2.1. Literal Header Field with Incremental Indexing
-- Literal Header Field with Incremental Indexing -- Indexed Name
literalHeaderFieldWithIncrementalIndexingIndexedName
:: DynamicTable -> WriteBuffer -> Bool -> FB
literalHeaderFieldWithIncrementalIndexingIndexedName dyntbl wbuf huff v ent hidx = do
fromHIndexToIndex dyntbl hidx >>= indexedName wbuf huff 6 set01 v
insertEntry ent dyntbl
-- 6.2.1. Literal Header Field with Incremental Indexing
-- Literal Header Field with Incremental Indexing -- New Name
literalHeaderFieldWithIncrementalIndexingNewName
:: DynamicTable -> WriteBuffer -> Bool -> FC
literalHeaderFieldWithIncrementalIndexingNewName dyntbl wbuf huff k v ent = do
newName wbuf huff set01 k v
insertEntry ent dyntbl
-- 6.2.2. Literal Header Field without Indexing
-- Literal Header Field without Indexing -- Indexed Name
literalHeaderFieldWithoutIndexingIndexedName
:: DynamicTable -> WriteBuffer -> Bool -> FD
literalHeaderFieldWithoutIndexingIndexedName dyntbl wbuf huff v hidx =
fromHIndexToIndex dyntbl hidx >>= indexedName wbuf huff 4 set0000 v
-- 6.2.2. Literal Header Field without Indexing
-- Literal Header Field without Indexing -- New Name
literalHeaderFieldWithoutIndexingNewName
:: DynamicTable -> WriteBuffer -> Bool -> FE
literalHeaderFieldWithoutIndexingNewName _ wbuf huff k v =
newName wbuf huff set0000 k v
literalHeaderFieldWithoutIndexingNewName'
:: DynamicTable -> WriteBuffer -> Bool -> HeaderName -> HeaderValue -> IO ()
literalHeaderFieldWithoutIndexingNewName' _ wbuf huff k v =
newName wbuf huff set0000 k v
----------------------------------------------------------------
{-# INLINE change #-}
change :: WriteBuffer -> Int -> IO ()
change wbuf i = encodeI wbuf set001 5 i
{-# INLINE index #-}
index :: WriteBuffer -> Int -> IO ()
index wbuf i = encodeI wbuf set1 7 i
-- Using Huffman encoding
{-# INLINE indexedName #-}
indexedName :: WriteBuffer -> Bool -> Int -> Setter -> HeaderValue -> Index -> IO ()
indexedName wbuf huff n set v idx = do
encodeI wbuf set n idx
encStr wbuf huff v
-- Using Huffman encoding
{-# INLINE newName #-}
newName :: WriteBuffer -> Bool -> Setter -> HeaderName -> HeaderValue -> IO ()
newName wbuf huff set k v = do
write8 wbuf $ set 0
encStr wbuf huff k
encStr wbuf huff v
----------------------------------------------------------------
type Setter = Word8 -> Word8
-- Assuming MSBs are 0.
set1, set01, set001, set0000 :: Setter
set1 x = x `setBit` 7
set01 x = x `setBit` 6
set001 x = x `setBit` 5
-- set0001 x = x `setBit` 4 -- Never indexing
set0000 = id
----------------------------------------------------------------
-- | String encoding.
-- The algorithm based on copy avoidance and
-- selection of better result of huffman or raw.
encodeS :: WriteBuffer
-> Bool -- ^ Use Huffman if efficient
-> (Word8 -> Word8) -- ^ Setting prefix
-> (Word8 -> Word8) -- ^ Setting huffman flag
-> Int -- ^ N+
-> ByteString -- ^ Target
-> IO ()
encodeS wbuf False set _ n bs = do
let len = BS.length bs
encodeI wbuf set n len
copyByteString wbuf bs
encodeS wbuf True set setH n bs = do
let origLen = BS.length bs
expectedLen = (origLen `div` 10) * 8 -- 80%: decided by examples
expectedIntLen = integerLength n expectedLen
ff wbuf expectedIntLen
len <- encodeH wbuf bs
let intLen = integerLength n len
if origLen < len then do
ff wbuf (negate (expectedIntLen + len))
encodeI wbuf set n origLen
copyByteString wbuf bs
else if intLen == expectedIntLen then do
ff wbuf (negate (expectedIntLen + len))
encodeI wbuf (set . setH) n len
ff wbuf len
else do
let gap = intLen - expectedIntLen
shiftLastN wbuf gap len
ff wbuf (negate (intLen + len))
encodeI wbuf (set . setH) n len
ff wbuf len
{-# INLINE encStr #-}
encStr :: WriteBuffer -> Bool -> ByteString -> IO ()
encStr wbuf h bs = encodeS wbuf h id (`setBit` 7) 7 bs
-- | String encoding (7+) with a temporary buffer whose size is 4096.
encodeString :: Bool -- ^ Use Huffman if efficient
-> ByteString -- ^ Target
-> IO ByteString
encodeString h bs = withWriteBuffer 4096 $ \wbuf -> encStr wbuf h bs
{-
N+ 1 2 3 <- bytes
8 254 382 16638
7 126 254 16510
6 62 190 16446
5 30 158 16414
4 14 142 16398
3 6 134 16390
2 2 130 16386
1 0 128 16384
-}
{-# INLINE integerLength #-}
integerLength :: Int -> Int -> Int
integerLength 8 l
| l <= 254 = 1
| l <= 382 = 2
| otherwise = 3
integerLength 7 l
| l <= 126 = 1
| l <= 254 = 2
| otherwise = 3
integerLength 6 l
| l <= 62 = 1
| l <= 190 = 2
| otherwise = 3
integerLength 5 l
| l <= 30 = 1
| l <= 158 = 2
| otherwise = 3
integerLength 4 l
| l <= 14 = 1
| l <= 142 = 2
| otherwise = 3
integerLength 3 l
| l <= 6 = 1
| l <= 134 = 2
| otherwise = 3
integerLength 2 l
| l <= 2 = 1
| l <= 130 = 2
| otherwise = 3
integerLength _ l
| l <= 0 = 1
| l <= 128 = 2
| otherwise = 3
| kazu-yamamoto/http2 | Network/HPACK/HeaderBlock/Encode.hs | bsd-3-clause | 10,921 | 0 | 15 | 2,684 | 2,687 | 1,355 | 1,332 | 218 | 4 |
{-# language CPP #-}
-- | = Name
--
-- VK_EXT_texel_buffer_alignment - device extension
--
-- == VK_EXT_texel_buffer_alignment
--
-- [__Name String__]
-- @VK_EXT_texel_buffer_alignment@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 282
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@
--
-- [__Deprecation state__]
--
-- - /Promoted/ to
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.3-promotions Vulkan 1.3>
--
-- [__Contact__]
--
-- - Jeff Bolz
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_EXT_texel_buffer_alignment] @jeffbolznv%0A<<Here describe the issue or question you have about the VK_EXT_texel_buffer_alignment extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2019-06-06
--
-- [__Interactions and External Dependencies__]
--
-- - Promoted to Vulkan 1.3 Core
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Jeff Bolz, NVIDIA
--
-- == Description
--
-- This extension adds more expressive alignment requirements for uniform
-- and storage texel buffers. Some implementations have single texel
-- alignment requirements that cannot be expressed via
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@minTexelBufferOffsetAlignment@.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDeviceTexelBufferAlignmentFeaturesEXT'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2':
--
-- - 'PhysicalDeviceTexelBufferAlignmentPropertiesEXT'
--
-- == New Enum Constants
--
-- - 'EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME'
--
-- - 'EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT'
--
-- - 'STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT'
--
-- == Promotion to Vulkan 1.3
--
-- Functionality in this extension is included in core Vulkan 1.3, with the
-- EXT suffix omitted. However, only the properties structure is promoted.
-- The feature structure is not promoted. The original type name is still
-- available as an alias of the core functionality.
--
-- == Version History
--
-- - Revision 1, 2019-06-06 (Jeff Bolz)
--
-- - Initial draft
--
-- == See Also
--
-- 'PhysicalDeviceTexelBufferAlignmentFeaturesEXT',
-- 'PhysicalDeviceTexelBufferAlignmentPropertiesEXT'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_EXT_texel_buffer_alignment Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_EXT_texel_buffer_alignment ( pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT
, PhysicalDeviceTexelBufferAlignmentFeaturesEXT(..)
, PhysicalDeviceTexelBufferAlignmentPropertiesEXT
, EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION
, pattern EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION
, EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME
, pattern EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core13.Promoted_From_VK_EXT_texel_buffer_alignment (PhysicalDeviceTexelBufferAlignmentProperties)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES))
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT"
pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES
-- | VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT - Structure describing
-- the texel buffer alignment features that can be supported by an
-- implementation
--
-- = Members
--
-- This structure describes the following feature:
--
-- = Description
--
-- If the 'PhysicalDeviceTexelBufferAlignmentFeaturesEXT' structure is
-- included in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2',
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDeviceTexelBufferAlignmentFeaturesEXT' /can/ also be
-- used in the @pNext@ chain of 'Vulkan.Core10.Device.DeviceCreateInfo' to
-- selectively enable these features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_EXT_texel_buffer_alignment VK_EXT_texel_buffer_alignment>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceTexelBufferAlignmentFeaturesEXT = PhysicalDeviceTexelBufferAlignmentFeaturesEXT
{ -- | #features-texelBufferAlignment# @texelBufferAlignment@ indicates whether
-- the implementation uses more specific alignment requirements advertised
-- in
-- 'Vulkan.Core13.Promoted_From_VK_EXT_texel_buffer_alignment.PhysicalDeviceTexelBufferAlignmentProperties'
-- rather than
-- 'Vulkan.Core10.DeviceInitialization.PhysicalDeviceLimits'::@minTexelBufferOffsetAlignment@.
texelBufferAlignment :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceTexelBufferAlignmentFeaturesEXT)
#endif
deriving instance Show PhysicalDeviceTexelBufferAlignmentFeaturesEXT
instance ToCStruct PhysicalDeviceTexelBufferAlignmentFeaturesEXT where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceTexelBufferAlignmentFeaturesEXT{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (texelBufferAlignment))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceTexelBufferAlignmentFeaturesEXT where
peekCStruct p = do
texelBufferAlignment <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ PhysicalDeviceTexelBufferAlignmentFeaturesEXT
(bool32ToBool texelBufferAlignment)
instance Storable PhysicalDeviceTexelBufferAlignmentFeaturesEXT where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceTexelBufferAlignmentFeaturesEXT where
zero = PhysicalDeviceTexelBufferAlignmentFeaturesEXT
zero
-- No documentation found for TopLevel "VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT"
type PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties
type EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION"
pattern EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION :: forall a . Integral a => a
pattern EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION = 1
type EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME = "VK_EXT_texel_buffer_alignment"
-- No documentation found for TopLevel "VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME"
pattern EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME = "VK_EXT_texel_buffer_alignment"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_EXT_texel_buffer_alignment.hs | bsd-3-clause | 9,497 | 0 | 14 | 1,471 | 1,029 | 647 | 382 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Commands where
import Control.Lens
import Data.Attoparsec.Text as A
import qualified Data.Text as T
import Data.Char (isSpace)
import Twotter (Message(..), UserName)
data Command = POST {_message :: Message}
| READ {_userName :: UserName }
| FOLLOW {_who :: UserName , _whom :: UserName}
| WALL {_userName :: UserName}
deriving (Show, Eq)
$(makePrisms ''Command)
command :: Parser Command
command = choice $ map (skipSpace >>)[post_, wall_, follow_, read_]
where post_ :: Parser Command
post_ = do _author <- T.strip <$> A.takeWhile (/= '-')
string "->"
_content <- T.strip <$> takeText
return (POST Message{..})
read_ :: Parser Command
read_ = do _userName <- T.strip <$> takeText
return READ{..}
wall_ :: Parser Command
wall_ = do _userName <- T.strip <$> A.takeWhile (not . isSpace)
skipSpace
string "wall"
return WALL{..}
follow_ :: Parser Command
follow_ = do _who <- T.strip <$> A.takeWhile (not . isSpace)
skipSpace
string "follows"
skipSpace
_whom <- T.strip <$> A.takeWhile (not . isSpace)
return FOLLOW{..}
| epsilonhalbe/lambdadojo | src/Commands.hs | bsd-3-clause | 1,563 | 0 | 13 | 564 | 423 | 225 | 198 | 38 | 1 |
data TrafficLight = Red | Yellow | Green
instance Eq TrafficLight where
Red == Red = True
Yellow == Yellow = True
Green == Green = True
_ == _ = False
instance Show TrafficLight where
show Red = "Red"
show Yellow = "Yellow"
show Green = "Green"
main = do
print(Red == Red)
print(Red == Yellow)
print Red
| yuncliu/Learn | haskell/trafficLight.hs | bsd-3-clause | 348 | 0 | 9 | 103 | 135 | 65 | 70 | 14 | 1 |
-- | Output the results to HTML
--
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Criterion.ToHtml.Html
( report
) where
import Data.Monoid (mempty)
import Data.Aeson (encode)
import Data.ByteString (ByteString)
import Text.Blaze (unsafeLazyByteString, unsafeByteString, (!))
import Text.Blaze.Html
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Criterion.ToHtml.Result
report :: ByteString -> [ResultGroup] -> Html
report js results = H.docTypeHtml $ do
H.head $ do
H.title "Criterion results"
-- jQuery for DOM manipulation
H.script ! A.type_ "text/javascript"
! A.src "http://code.jquery.com/jquery-latest.js"
$ mempty
-- Our results as JSON
H.script ! A.type_ "text/javascript" $ do
"var criterionResults = "
unsafeLazyByteString $ encode results
";"
H.script ! A.type_ "text/javascript" $ unsafeByteString js
H.style ! A.type_ "text/css" $ do
"html {"
" font-size: 16px;"
" font-family: sans-serif;"
"}"
"body {"
" width: 600px;"
" margin: 0px auto 0px auto;"
"}"
"div.controls {"
" float: right;"
"}"
"div.results {"
" margin-bottom: 50px;"
"}"
H.body mempty
| jaspervdj/criterion-to-html | src/Criterion/ToHtml/Html.hs | bsd-3-clause | 1,486 | 0 | 15 | 484 | 296 | 151 | 145 | 40 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Sky.Learn.GHCGenerics where
import GHC.Generics
import Data.Typeable
import Sky.Classes.Isomorphism.Monomorphic
import Sky.Implementations.Isomorphism
import Sky.Implementations.Isomorphism.MonoIso
data Stupid a = Stupid a
deriving (Show, Eq, Generic)
data UserTree a
= Leaf
| Node a (UserTree a) (UserTree a)
deriving (Show, Eq, Generic)
data Expr a b
= LiteralA a
| LiteralB b
| LiteralAB a b
| LiteralABA a b a
| LiteralABAB a b a b
deriving (Show, Eq, Generic)
{- -- This is what Generics will create:
M1 D _ (
(
M1 C _ (M1 S _ (K1 R Bool))
:+: M1 C _ (M1 S _ (K1 R Bool))
) :+: (
M1 C _ (M1 S _ (K1 R Bool) :*: M1 S _ (K1 R Bool))
:+: (
M1 C _ (M1 S _ (K1 R Bool) :*: (M1 S _ (K1 R Bool) :*: M1 S _ (K1 R Bool)))
:+: M1 C _ ((M1 S _ (K1 R Bool) :*: M1 S _ (K1 R Bool)) :*: (M1 S _ (K1 R Bool) :*: M1 S _ (K1 R Bool)))
)
)
) Bool
-}
{- -- And this is Template Haskell:
TyConI (DataD [] Sky.Learn.GHCGenerics.Expr [KindedTV a_1627402939 StarT,KindedTV b_1627402940 StarT]
[NormalC Sky.Learn.GHCGenerics.LiteralA [(NotStrict,VarT a_1627402939)]
,NormalC Sky.Learn.GHCGenerics.LiteralB [(NotStrict,VarT b_1627402940)]
,NormalC Sky.Learn.GHCGenerics.LiteralAB [(NotStrict,VarT a_1627402939),(NotStrict,VarT b_1627402940)]
,NormalC Sky.Learn.GHCGenerics.LiteralABA [(NotStrict,VarT a_1627402939),(NotStrict,VarT b_1627402940),(NotStrict,VarT a_1627402939)]
,NormalC Sky.Learn.GHCGenerics.LiteralABAB [(NotStrict,VarT a_1627402939),(NotStrict,VarT b_1627402940),(NotStrict,VarT a_1627402939),(NotStrict,VarT b_1627402940)]
]
[]
)
-}
stupidExample = Stupid False
example = Node 5 Leaf Leaf
example2 = LiteralABAB True False True False
{- Documentation reminder:
M1 meta-info
U1 unit (e.g. Leaf)
K1 P parameter
K1 R recursive
L1 left argument of :+: ("Left" on Either)
R1 right argument of :+: ("Right" on Either)
-}
class GPrint f where
gprint :: f a -> String
-- =
instance GPrint U1 where
gprint _ = "()"
instance (GPrint a, GPrint b) => GPrint (a :*: b) where
gprint :: (a :*: b) p -> String
gprint _ = "(" ++ gprint (undefined :: a p) ++ ", " ++ gprint (undefined :: b p) ++ ")"
instance (GPrint a, GPrint b) => GPrint (a :+: b) where
gprint :: (a :+: b) p -> String
gprint _ = "(Either " ++ gprint (undefined :: a p) ++ " " ++ gprint (undefined :: b p) ++ ")"
instance (GPrint a) => GPrint (M1 i c a) where
gprint :: M1 i c a p -> String
gprint _ = gprint (undefined :: a p)
instance Typeable a => GPrint (K1 i a) where
gprint :: K1 i a p -> String
gprint _ = show $ typeRep (Proxy :: Proxy a)
printIsoType :: (Generic a, GPrint (Rep a)) => a -> String
printIsoType a = gprint (from a)
----------------------------------------------------------------------------------------------------
-- Isomorphism composition for algebraic data structures
class GIso t x where
gIso :: Iso (t a) x
instance GIso U1 () where
gIso = iso (\U1 -> ()) (\() -> U1)
instance (GIso a x, GIso b y) => GIso (a :*: b) (x, y) where
gIso = iso from to where
from (a :*: b) = (apply gIso a, apply gIso b)
to (x, y) = (unapply gIso x :*: unapply gIso y)
instance (GIso a x, GIso b y) => GIso (a :+: b) (Either x y) where
gIso = iso from to where
from (L1 a) = Left $ apply gIso a
from (R1 b) = Right $ apply gIso b
to (Left x) = L1 $ unapply gIso x
to (Right y) = R1 $ unapply gIso y
instance (GIso a x) => GIso (M1 i c a) x where
gIso = iso from to where
from (M1 a) = apply gIso a
to x = M1 $ unapply gIso x
instance GIso (K1 i a) a where
gIso = iso from to where
from (K1 a) = a
to x = K1 x
ii :: (Generic a, GIso (Rep a) b) => Iso a b
ii = iso aToRep repToA where
aToRep a = apply gIso (from a)
repToA r = to (unapply gIso r)
----------------------------------------------------------------------------------------------------
-- "Goedel numbers" for ADTs
class GNum t where
gMax :: t a -> Int
gNum :: t a -> Int -> Int
gGoedel :: t a -> Int -> [Int]
instance GNum U1 where
gMax :: U1 p -> Int
gMax _ = 1
gNum :: U1 p -> Int -> Int
gNum U1 i = i
gGoedel :: U1 p -> Int -> [Int]
gGoedel _ i = [i]
instance Typeable a => GNum (K1 i a) where
gMax :: K1 i a p -> Int
gMax _ = 1
gNum :: K1 i a p -> Int -> Int
gNum (K1 a) i = i
gGoedel :: K1 i a p -> Int -> [Int]
gGoedel _ i = []
instance (GNum a, GNum b) => GNum (a :*: b) where
gMax :: (a :*: b) p -> Int
gMax _ = 1
gNum :: (a :*: b) p -> Int -> Int
gNum _ i = i
gGoedel :: (a :*: b) p -> Int -> [Int]
gGoedel _ i = []
instance (GNum a, GNum b) => GNum (a :+: b) where
gMax :: (a :+: b) p -> Int
gMax _ = gMax (undefined :: a p) + gMax (undefined :: b p)
gNum :: (a :+: b) p -> Int -> Int
gNum (L1 a) i = gNum a i
gNum (R1 b) i = let
left = gMax (undefined :: a p)
in gNum b (i + left)
gGoedel :: (a :+: b) p -> Int -> [Int]
gGoedel _ i = []
instance (GNum a) => GNum (M1 i c a) where
gMax :: M1 i c a p -> Int
gMax _ = gMax (undefined :: a p)
gNum :: M1 i c a p -> Int -> Int
gNum (M1 x) i = gNum x i
gGoedel :: M1 i c a p -> Int -> [Int]
gGoedel _ i = []
cMax :: forall a. (Generic a, GNum (Rep a)) => a -> Int
cMax a = gMax (from a)
cNum :: forall a. (Generic a, GNum (Rep a)) => a -> Int
cNum a = gNum (from a) 0
----------------------------------------------------------------------------------------------------
---- Actual type: Iso a x1 -> Iso b x2 -> Iso (Either a b) (x1|x2)
--compose :: (x -> Bool) -> Iso a x -> Iso b x -> Iso (Either a b) x
--compose decision iso1 iso2 = iso
| xicesky/sky-haskell-playground | src/Sky/Learn/GHCGenerics.hs | bsd-3-clause | 6,283 | 0 | 12 | 1,826 | 2,030 | 1,052 | 978 | 119 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
module BSPM.Engine.Local
( BSPM ()
, WorkerState ()
, receive
, run
, send
) where
import BSPM.Util.CriticalSection
import BSPM.Util.RunOnce
import Control.Concurrent
import Control.Concurrent.STM.TChan
import Control.Exception
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.STM
import Data.Hashable
import qualified Data.HashTable.IO as H
import Data.IORef
type HashTable k v = H.BasicHashTable k v
newtype BSPM a k r = BSPM { unBSPM :: WorkerState a k -> IO r }
instance Functor (BSPM a k) where
fmap = liftM
instance Applicative (BSPM a k) where
pure = return
(<*>) = ap
instance Monad (BSPM a k) where
return = BSPM . const . return
a >>= f = BSPM $ \w -> unBSPM a w >>= flip unBSPM w . f
instance MonadIO (BSPM a k) where
liftIO = BSPM . const
data MessageWrapper a = DataMessage a
| EndReceive
| Halt
deriving ( Show, Eq )
data WorkerState a k = WorkerState
{ _chan :: !(TChan (MessageWrapper a))
, _currentStep :: !(StepState a k)
}
data StepState a k = StepState
{ _activeWorkers :: !(IORef Int)
, _nextChans :: !(CriticalSection (HashTable k (TChan (MessageWrapper a))))
, _nextStep :: !(RunOnce (StepState a k))
, _workerFactory :: !(k -> BSPM a k ())
, _rootChan :: !(TChan (MessageWrapper a))
}
{-# INLINE newStepState #-}
newStepState :: (k -> BSPM a k ()) -> TChan (MessageWrapper a) -> IO (StepState a k)
newStepState workerFactory rootChan = do
activeWorkers <- newIORef 0 -- will be zero for root worker, I'm ok with it for now
nextChans <- H.new >>= newCriticalSection
nextStep <- newRunOnce $ newStepState workerFactory rootChan
return StepState
{ _activeWorkers = activeWorkers
, _nextChans = nextChans
, _nextStep = nextStep
, _workerFactory = workerFactory
, _rootChan = rootChan
}
{-# INLINE broadcastEndRecieve #-}
broadcastEndRecieve :: (StepState a k) -> IO ()
broadcastEndRecieve step =
withCriticalSection
( _nextChans step )
( H.mapM_ $ atomically . flip writeTChan EndReceive . snd )
{-# INLINE endRecieve #-}
endRecieve :: TChan (MessageWrapper a) -> IO ()
endRecieve chan = skipToEnd
where
skipToEnd = do
msg <- atomically $ readTChan chan
case msg of
EndReceive -> return ()
_ -> skipToEnd
{-# INLINE getWorkerChan #-}
getWorkerChan :: ( Eq k, Hashable k ) => StepState a k -> k -> IO (TChan (MessageWrapper a))
getWorkerChan step k = modifyCriticalSection (_nextChans step) $ \nextChans -> do
maybeChan <- H.lookup nextChans k
case maybeChan of
Just chan -> return (nextChans, chan)
Nothing -> do
nextStep <- getRunOnce (_nextStep step)
chan <- newTChanIO
mask_ $ do
atomicModifyIORef' (_activeWorkers nextStep) $ \a -> (a + 1, ())
void $ forkIOWithUnmask $ \unmask -> finally
( unmask $ do
unBSPM
(_workerFactory nextStep k)
WorkerState { _chan = chan, _currentStep = nextStep }
endRecieve chan )
( do
c <-atomicModifyIORef' (_activeWorkers nextStep) $ \a -> (a - 1, a - 1)
when (c == 0) $ do
hasNextStep <- initialized $ _nextStep nextStep
if hasNextStep
then ( broadcastEndRecieve nextStep )
else atomically $ writeTChan (_rootChan nextStep) Halt )
H.insert nextChans k chan
return (nextChans, chan)
run :: (k -> BSPM a k ()) -> BSPM a k r -> IO r
run workerFactory bspm = do
chan <- newTChanIO
step <- newStepState workerFactory chan
result <- finally
( unBSPM bspm WorkerState { _chan = chan, _currentStep = step } )
( broadcastEndRecieve step )
halt <- atomically $ readTChan chan
case halt of
Halt -> return result
-- this should never happen
EndReceive -> error "BSPM.Engine.Local.run: EndReceive"
DataMessage _ -> error "BSPM.Engine.Local.run: DataMessage"
send :: ( Eq k, Hashable k ) => k -> a -> BSPM a k ()
send k a = BSPM $ \worker -> do
chan <- getWorkerChan (_currentStep worker) k
atomically $ writeTChan chan $ DataMessage a
receive :: BSPM a s (Maybe a)
receive = BSPM $ \worker -> atomically $ do
msg <- readTChan $ _chan worker
case msg of
DataMessage a -> return $ Just a
EndReceive -> do
unGetTChan (_chan worker) msg
return Nothing
-- this should never happen
Halt -> error "BSPM.Engine.Local.receive: Halt"
| schernichkin/BSPM | bsp/src/BSPM/Engine/Local.hs | bsd-3-clause | 4,716 | 0 | 30 | 1,330 | 1,491 | 766 | 725 | 140 | 3 |
{-# LANGUAGE FlexibleInstances #-}
module Ch5
( toList,
sumL
) where
class Listable a where
toList :: a -> [Int]
instance Listable Int where
-- toList :: Int -> [Int]
toList x = [x]
instance Listable Bool where
toList True = [1]
toList False = [0]
instance Listable [Int] where
toList = id
data Tree a = Empty | Node a (Tree a) (Tree a)
instance Listable (Tree Int) where
toList Empty = []
toList (Node x l r) = toList l ++ [x] ++ toList r
sumL x = sum (toList x)
| wangwangwar/cis194 | src/ch5/Ch5.hs | bsd-3-clause | 521 | 0 | 8 | 150 | 205 | 109 | 96 | 18 | 1 |
module Util where
import Control.Exception
import Data.Char
import Data.List
import Control.Monad.State (MonadIO(..), liftM)
import System.Random
import Text.Regex.PCRE
contains :: String -> String -> Bool
contains = flip isInfixOf
containsAny :: String -> [String] -> Bool
containsAny msg ys = map toLower msg =~ ("(\\W|^)(" ++ intercalate "|" ys ++ ")(\\W|$)")
startsWith :: String -> String -> Bool
startsWith = flip isPrefixOf
endsWith :: String -> String -> Bool
endsWith = flip isInfixOf
capitalize :: String -> String
capitalize "" = ""
capitalize (x:xs) = toUpper x : xs
randomChoice :: MonadIO m => [a] -> m a
randomChoice xs
| maxIndex < 0 = (liftIO . throwIO) (ErrorCall "randomChoice: empty list")
| otherwise = (xs !!) `liftM` (liftIO . randomRIO) (0, maxIndex)
where
maxIndex = pred (length xs)
| wimdu/alonzo | src/Util.hs | mit | 886 | 0 | 9 | 205 | 308 | 165 | 143 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright (C) 2006-2010 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.RST
Copyright : Copyright (C) 2006-2010 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to reStructuredText.
reStructuredText: <http://docutils.sourceforge.net/rst.html>
-}
module Text.Pandoc.Writers.RST ( writeRST) where
import Text.Pandoc.Definition
import Text.Pandoc.Shared
import Text.Pandoc.Templates (renderTemplate)
import Data.List ( isPrefixOf, intersperse, transpose )
import Text.Pandoc.Pretty
import Control.Monad.State
import Control.Applicative ( (<$>) )
type Refs = [([Inline], Target)]
data WriterState =
WriterState { stNotes :: [[Block]]
, stLinks :: Refs
, stImages :: Refs
, stHasMath :: Bool
, stOptions :: WriterOptions
}
-- | Convert Pandoc to RST.
writeRST :: WriterOptions -> Pandoc -> String
writeRST opts document =
let st = WriterState { stNotes = [], stLinks = [],
stImages = [], stHasMath = False,
stOptions = opts }
in evalState (pandocToRST document) st
-- | Return RST representation of document.
pandocToRST :: Pandoc -> State WriterState String
pandocToRST (Pandoc (Meta tit auth dat) blocks) = do
opts <- liftM stOptions get
title <- titleToRST tit
authors <- mapM inlineListToRST auth
date <- inlineListToRST dat
body <- blockListToRST blocks
notes <- liftM (reverse . stNotes) get >>= notesToRST
-- note that the notes may contain refs, so we do them first
refs <- liftM (reverse . stLinks) get >>= refsToRST
pics <- liftM (reverse . stImages) get >>= pictRefsToRST
hasMath <- liftM stHasMath get
let colwidth = if writerWrapText opts
then Just $ writerColumns opts
else Nothing
let main = render colwidth $ foldl ($+$) empty $ [body, notes, refs, pics]
let context = writerVariables opts ++
[ ("body", main)
, ("title", render Nothing title)
, ("date", render colwidth date) ] ++
[ ("math", "yes") | hasMath ] ++
[ ("author", render colwidth a) | a <- authors ]
if writerStandalone opts
then return $ renderTemplate context $ writerTemplate opts
else return main
-- | Return RST representation of reference key table.
refsToRST :: Refs -> State WriterState Doc
refsToRST refs = mapM keyToRST refs >>= return . vcat
-- | Return RST representation of a reference key.
keyToRST :: ([Inline], (String, String))
-> State WriterState Doc
keyToRST (label, (src, _)) = do
label' <- inlineListToRST label
let label'' = if ':' `elem` (render Nothing label')
then char '`' <> label' <> char '`'
else label'
return $ ".. _" <> label'' <> ": " <> text src
-- | Return RST representation of notes.
notesToRST :: [[Block]] -> State WriterState Doc
notesToRST notes =
mapM (\(num, note) -> noteToRST num note) (zip [1..] notes) >>=
return . vsep
-- | Return RST representation of a note.
noteToRST :: Int -> [Block] -> State WriterState Doc
noteToRST num note = do
contents <- blockListToRST note
let marker = ".. [" <> text (show num) <> "]"
return $ marker $$ nest 3 contents
-- | Return RST representation of picture reference table.
pictRefsToRST :: Refs -> State WriterState Doc
pictRefsToRST refs = mapM pictToRST refs >>= return . vcat
-- | Return RST representation of a picture substitution reference.
pictToRST :: ([Inline], (String, String))
-> State WriterState Doc
pictToRST (label, (src, _)) = do
label' <- inlineListToRST label
return $ ".. |" <> label' <> "| image:: " <> text src
-- | Escape special characters for RST.
escapeString :: String -> String
escapeString = escapeStringUsing (backslashEscapes "`\\|*_")
titleToRST :: [Inline] -> State WriterState Doc
titleToRST [] = return empty
titleToRST lst = do
contents <- inlineListToRST lst
let titleLength = length $ (render Nothing contents :: String)
let border = text (replicate titleLength '=')
return $ border $$ contents $$ border
-- | Convert Pandoc block element to RST.
blockToRST :: Block -- ^ Block element
-> State WriterState Doc
blockToRST Null = return empty
blockToRST (Plain inlines) = inlineListToRST inlines
blockToRST (Para [Image txt (src,tit)]) = do
capt <- inlineListToRST txt
let fig = "figure:: " <> text src
let align = ":align: center"
let alt = ":alt: " <> if null tit then capt else text tit
return $ hang 3 ".. " $ fig $$ align $$ alt $+$ capt $$ blankline
blockToRST (Para inlines) = do
contents <- inlineListToRST inlines
return $ contents <> blankline
blockToRST (RawBlock f str) =
return $ blankline <> ".. raw:: " <> text f $+$
(nest 3 $ text str) $$ blankline
blockToRST HorizontalRule =
return $ blankline $$ "--------------" $$ blankline
blockToRST (Header level inlines) = do
contents <- inlineListToRST inlines
let headerChar = if level > 5 then ' ' else "=-~^'" !! (level - 1)
let border = text $ replicate (offset contents) headerChar
return $ contents $$ border $$ blankline
blockToRST (CodeBlock (_,classes,_) str) = do
opts <- stOptions <$> get
let tabstop = writerTabStop opts
if "haskell" `elem` classes && "literate" `elem` classes &&
writerLiterateHaskell opts
then return $ prefixed "> " (text str) $$ blankline
else return $ "::" $+$ nest tabstop (text str) $$ blankline
blockToRST (BlockQuote blocks) = do
tabstop <- get >>= (return . writerTabStop . stOptions)
contents <- blockListToRST blocks
return $ (nest tabstop contents) <> blankline
blockToRST (Table caption _ widths headers rows) = do
caption' <- inlineListToRST caption
let caption'' = if null caption
then empty
else blankline <> text "Table: " <> caption'
headers' <- mapM blockListToRST headers
rawRows <- mapM (mapM blockListToRST) rows
let isSimple = all (==0) widths && all (all (\bs -> length bs == 1)) rows
let numChars = maximum . map offset
opts <- get >>= return . stOptions
let widthsInChars =
if isSimple
then map ((+2) . numChars) $ transpose (headers' : rawRows)
else map (floor . (fromIntegral (writerColumns opts) *)) widths
let hpipeBlocks blocks = hcat [beg, middle, end]
where h = maximum (map height blocks)
sep' = lblock 3 $ vcat (map text $ replicate h " | ")
beg = lblock 2 $ vcat (map text $ replicate h "| ")
end = lblock 2 $ vcat (map text $ replicate h " |")
middle = hcat $ intersperse sep' blocks
let makeRow = hpipeBlocks . zipWith lblock widthsInChars
let head' = makeRow headers'
rows' <- mapM (\row -> do cols <- mapM blockListToRST row
return $ makeRow cols) rows
let border ch = char '+' <> char ch <>
(hcat $ intersperse (char ch <> char '+' <> char ch) $
map (\l -> text $ replicate l ch) widthsInChars) <>
char ch <> char '+'
let body = vcat $ intersperse (border '-') rows'
let head'' = if all null headers
then empty
else head' $$ border '='
return $ border '-' $$ head'' $$ body $$ border '-' $$ caption'' $$ blankline
blockToRST (BulletList items) = do
contents <- mapM bulletListItemToRST items
-- ensure that sublists have preceding blank line
return $ blankline $$ vcat contents $$ blankline
blockToRST (OrderedList (start, style', delim) items) = do
let markers = if start == 1 && style' == DefaultStyle && delim == DefaultDelim
then take (length items) $ repeat "#."
else take (length items) $ orderedListMarkers
(start, style', delim)
let maxMarkerLength = maximum $ map length markers
let markers' = map (\m -> let s = maxMarkerLength - length m
in m ++ replicate s ' ') markers
contents <- mapM (\(item, num) -> orderedListItemToRST item num) $
zip markers' items
-- ensure that sublists have preceding blank line
return $ blankline $$ vcat contents $$ blankline
blockToRST (DefinitionList items) = do
contents <- mapM definitionListItemToRST items
-- ensure that sublists have preceding blank line
return $ blankline $$ vcat contents $$ blankline
-- | Convert bullet list item (list of blocks) to RST.
bulletListItemToRST :: [Block] -> State WriterState Doc
bulletListItemToRST items = do
contents <- blockListToRST items
return $ hang 3 "- " $ contents <> cr
-- | Convert ordered list item (a list of blocks) to RST.
orderedListItemToRST :: String -- ^ marker for list item
-> [Block] -- ^ list item (list of blocks)
-> State WriterState Doc
orderedListItemToRST marker items = do
contents <- blockListToRST items
let marker' = marker ++ " "
return $ hang (length marker') (text marker') $ contents <> cr
-- | Convert defintion list item (label, list of blocks) to RST.
definitionListItemToRST :: ([Inline], [[Block]]) -> State WriterState Doc
definitionListItemToRST (label, defs) = do
label' <- inlineListToRST label
contents <- liftM vcat $ mapM blockListToRST defs
tabstop <- get >>= (return . writerTabStop . stOptions)
return $ label' $$ nest tabstop (contents <> cr)
-- | Convert list of Pandoc block elements to RST.
blockListToRST :: [Block] -- ^ List of block elements
-> State WriterState Doc
blockListToRST blocks = mapM blockToRST blocks >>= return . vcat
-- | Convert list of Pandoc inline elements to RST.
inlineListToRST :: [Inline] -> State WriterState Doc
inlineListToRST lst = mapM inlineToRST lst >>= return . hcat
-- | Convert Pandoc inline element to RST.
inlineToRST :: Inline -> State WriterState Doc
inlineToRST (Emph lst) = do
contents <- inlineListToRST lst
return $ "*" <> contents <> "*"
inlineToRST (Strong lst) = do
contents <- inlineListToRST lst
return $ "**" <> contents <> "**"
inlineToRST (Strikeout lst) = do
contents <- inlineListToRST lst
return $ "[STRIKEOUT:" <> contents <> "]"
inlineToRST (Superscript lst) = do
contents <- inlineListToRST lst
return $ "\\ :sup:`" <> contents <> "`\\ "
inlineToRST (Subscript lst) = do
contents <- inlineListToRST lst
return $ "\\ :sub:`" <> contents <> "`\\ "
inlineToRST (SmallCaps lst) = inlineListToRST lst
inlineToRST (Quoted SingleQuote lst) = do
contents <- inlineListToRST lst
return $ "‘" <> contents <> "’"
inlineToRST (Quoted DoubleQuote lst) = do
contents <- inlineListToRST lst
return $ "“" <> contents <> "”"
inlineToRST (Cite _ lst) =
inlineListToRST lst
inlineToRST EmDash = return $ char '\8212'
inlineToRST EnDash = return $ char '\8211'
inlineToRST Apostrophe = return $ char '\8217'
inlineToRST Ellipses = return $ char '\8230'
inlineToRST (Code _ str) = return $ "``" <> text str <> "``"
inlineToRST (Str str) = return $ text $ escapeString str
inlineToRST (Math t str) = do
modify $ \st -> st{ stHasMath = True }
return $ if t == InlineMath
then ":math:`$" <> text str <> "$`"
else ":math:`$$" <> text str <> "$$`"
inlineToRST (RawInline _ _) = return empty
inlineToRST (LineBreak) = return cr -- there's no line break in RST
inlineToRST Space = return space
inlineToRST (Link [Code _ str] (src, _)) | src == str ||
src == "mailto:" ++ str = do
let srcSuffix = if isPrefixOf "mailto:" src then drop 7 src else src
return $ text $ unescapeURI srcSuffix
inlineToRST (Link txt (src', tit)) = do
let src = unescapeURI src'
useReferenceLinks <- get >>= return . writerReferenceLinks . stOptions
linktext <- inlineListToRST $ normalizeSpaces txt
if useReferenceLinks
then do refs <- get >>= return . stLinks
let refs' = if (txt, (src, tit)) `elem` refs
then refs
else (txt, (src, tit)):refs
modify $ \st -> st { stLinks = refs' }
return $ "`" <> linktext <> "`_"
else return $ "`" <> linktext <> " <" <> text src <> ">`_"
inlineToRST (Image alternate (source', tit)) = do
let source = unescapeURI source'
pics <- get >>= return . stImages
let labelsUsed = map fst pics
let txt = if null alternate || alternate == [Str ""] ||
alternate `elem` labelsUsed
then [Str $ "image" ++ show (length pics)]
else alternate
let pics' = if (txt, (source, tit)) `elem` pics
then pics
else (txt, (source, tit)):pics
modify $ \st -> st { stImages = pics' }
label <- inlineListToRST txt
return $ "|" <> label <> "|"
inlineToRST (Note contents) = do
-- add to notes in state
notes <- get >>= return . stNotes
modify $ \st -> st { stNotes = contents:notes }
let ref = show $ (length notes) + 1
return $ " [" <> text ref <> "]_"
| Lythimus/lptv | sites/all/modules/jgm-pandoc-8be6cc2/src/Text/Pandoc/Writers/RST.hs | gpl-2.0 | 13,931 | 0 | 20 | 3,529 | 4,203 | 2,098 | 2,105 | 261 | 8 |
{-# LANGUAGE PatternGuards #-}
-- |
-- Copyright : (c) 2019 Charlie Jacomme <[email protected]>
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Robert Künnemann <[email protected]>
-- Portability : GHC only
--
-- Compute annotations for always-secret channels
--
-- A channel is defined always-secret iff it correspond to a fresh variable
-- only use as a channel identifier. For these channels, we can use a more
-- efficient translation, as the adversary can never deduce then, and thus only
-- a silent transition is possible.
module Sapic.SecretChannels (
annotateSecretChannels
) where
-- import Control.Exception
-- import Control.Monad.Catch
-- import Control.Monad.Fresh
import Data.Set as S
import Data.List as L
import Sapic.Annotation
-- import Sapic.Exceptions
import Theory
import Theory.Sapic
-- | Get all variables inside a term
getTermVariables :: LNTerm -> S.Set LVar
getTermVariables ts =
S.fromList $ L.map fst $ varOccurences ts
-- | Get all variables never outputed
getSecretChannels :: AnProcess ProcessAnnotation -> S.Set LVar -> S.Set LVar
getSecretChannels (ProcessAction (New v) _ p) candidates =
let c = S.insert v candidates in
getSecretChannels p c
getSecretChannels (ProcessAction (ChOut _ t2) _ p) candidates =
let c = S.difference candidates (getTermVariables t2) in
getSecretChannels p c
getSecretChannels (ProcessAction (Insert _ t2) _ p) candidates =
let c = S.difference candidates (getTermVariables t2) in
getSecretChannels p c
getSecretChannels (ProcessAction (_) _ p) candidates =
getSecretChannels p candidates
getSecretChannels (ProcessNull _) candidates = candidates
getSecretChannels (ProcessComb _ _ pl pr ) candidates =
S.intersection c1 c2
where
c1 = getSecretChannels pl candidates
c2 = getSecretChannels pr candidates
-- | For each input or output, if the variable is secret, we annotate the process
annotateEachSecretChannels :: AnProcess ProcessAnnotation -> S.Set LVar -> AnProcess ProcessAnnotation
annotateEachSecretChannels (ProcessNull an) _ = (ProcessNull an)
annotateEachSecretChannels (ProcessComb comb an pl pr ) svars =
(ProcessComb comb an pl' pr')
where
pl' = annotateEachSecretChannels pl svars
pr' = annotateEachSecretChannels pr svars
annotateEachSecretChannels (ProcessAction ac an p) svars
| (ChIn (Just t1) _) <- ac, Lit (Var v) <- viewTerm t1 =
if S.member v svars then
(ProcessAction ac (an `mappend` annSecretChannel (AnLVar v)) p')
else
(ProcessAction ac an p')
| (ChOut (Just t1) _) <- ac, Lit (Var v) <- viewTerm t1 =
if S.member v svars then
(ProcessAction ac (an `mappend` annSecretChannel (AnLVar v)) p')
else
(ProcessAction ac an p')
| otherwise = (ProcessAction ac an p')
where p'= annotateEachSecretChannels p svars
annotateSecretChannels :: AnProcess ProcessAnnotation -> (AnProcess ProcessAnnotation)
annotateSecretChannels anp =
annotateEachSecretChannels anp svars
where svars = getSecretChannels anp S.empty
| tamarin-prover/tamarin-prover | lib/sapic/src/Sapic/SecretChannels.hs | gpl-3.0 | 3,268 | 0 | 13 | 779 | 778 | 396 | 382 | 49 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudHSM.DescribeLunaClient
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Retrieves information about an HSM client.
--
-- <http://docs.aws.amazon.com/cloudhsm/latest/dg/API_DescribeLunaClient.html>
module Network.AWS.CloudHSM.DescribeLunaClient
(
-- * Request
DescribeLunaClient
-- ** Request constructor
, describeLunaClient
-- ** Request lenses
, dlcCertificateFingerprint
, dlcClientArn
-- * Response
, DescribeLunaClientResponse
-- ** Response constructor
, describeLunaClientResponse
-- ** Response lenses
, dlcrCertificate
, dlcrCertificateFingerprint
, dlcrClientArn
, dlcrLabel
, dlcrLastModifiedTimestamp
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CloudHSM.Types
import qualified GHC.Exts
data DescribeLunaClient = DescribeLunaClient
{ _dlcCertificateFingerprint :: Maybe Text
, _dlcClientArn :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeLunaClient' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dlcCertificateFingerprint' @::@ 'Maybe' 'Text'
--
-- * 'dlcClientArn' @::@ 'Maybe' 'Text'
--
describeLunaClient :: DescribeLunaClient
describeLunaClient = DescribeLunaClient
{ _dlcClientArn = Nothing
, _dlcCertificateFingerprint = Nothing
}
-- | The certificate fingerprint.
dlcCertificateFingerprint :: Lens' DescribeLunaClient (Maybe Text)
dlcCertificateFingerprint =
lens _dlcCertificateFingerprint
(\s a -> s { _dlcCertificateFingerprint = a })
-- | The ARN of the client.
dlcClientArn :: Lens' DescribeLunaClient (Maybe Text)
dlcClientArn = lens _dlcClientArn (\s a -> s { _dlcClientArn = a })
data DescribeLunaClientResponse = DescribeLunaClientResponse
{ _dlcrCertificate :: Maybe Text
, _dlcrCertificateFingerprint :: Maybe Text
, _dlcrClientArn :: Maybe Text
, _dlcrLabel :: Maybe Text
, _dlcrLastModifiedTimestamp :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'DescribeLunaClientResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dlcrCertificate' @::@ 'Maybe' 'Text'
--
-- * 'dlcrCertificateFingerprint' @::@ 'Maybe' 'Text'
--
-- * 'dlcrClientArn' @::@ 'Maybe' 'Text'
--
-- * 'dlcrLabel' @::@ 'Maybe' 'Text'
--
-- * 'dlcrLastModifiedTimestamp' @::@ 'Maybe' 'Text'
--
describeLunaClientResponse :: DescribeLunaClientResponse
describeLunaClientResponse = DescribeLunaClientResponse
{ _dlcrClientArn = Nothing
, _dlcrCertificate = Nothing
, _dlcrCertificateFingerprint = Nothing
, _dlcrLastModifiedTimestamp = Nothing
, _dlcrLabel = Nothing
}
-- | The certificate installed on the HSMs used by this client.
dlcrCertificate :: Lens' DescribeLunaClientResponse (Maybe Text)
dlcrCertificate = lens _dlcrCertificate (\s a -> s { _dlcrCertificate = a })
-- | The certificate fingerprint.
dlcrCertificateFingerprint :: Lens' DescribeLunaClientResponse (Maybe Text)
dlcrCertificateFingerprint =
lens _dlcrCertificateFingerprint
(\s a -> s { _dlcrCertificateFingerprint = a })
-- | The ARN of the client.
dlcrClientArn :: Lens' DescribeLunaClientResponse (Maybe Text)
dlcrClientArn = lens _dlcrClientArn (\s a -> s { _dlcrClientArn = a })
-- | The label of the client.
dlcrLabel :: Lens' DescribeLunaClientResponse (Maybe Text)
dlcrLabel = lens _dlcrLabel (\s a -> s { _dlcrLabel = a })
-- | The date and time the client was last modified.
dlcrLastModifiedTimestamp :: Lens' DescribeLunaClientResponse (Maybe Text)
dlcrLastModifiedTimestamp =
lens _dlcrLastModifiedTimestamp
(\s a -> s { _dlcrLastModifiedTimestamp = a })
instance ToPath DescribeLunaClient where
toPath = const "/"
instance ToQuery DescribeLunaClient where
toQuery = const mempty
instance ToHeaders DescribeLunaClient
instance ToJSON DescribeLunaClient where
toJSON DescribeLunaClient{..} = object
[ "ClientArn" .= _dlcClientArn
, "CertificateFingerprint" .= _dlcCertificateFingerprint
]
instance AWSRequest DescribeLunaClient where
type Sv DescribeLunaClient = CloudHSM
type Rs DescribeLunaClient = DescribeLunaClientResponse
request = post "DescribeLunaClient"
response = jsonResponse
instance FromJSON DescribeLunaClientResponse where
parseJSON = withObject "DescribeLunaClientResponse" $ \o -> DescribeLunaClientResponse
<$> o .:? "Certificate"
<*> o .:? "CertificateFingerprint"
<*> o .:? "ClientArn"
<*> o .:? "Label"
<*> o .:? "LastModifiedTimestamp"
| kim/amazonka | amazonka-cloudhsm/gen/Network/AWS/CloudHSM/DescribeLunaClient.hs | mpl-2.0 | 5,706 | 0 | 17 | 1,233 | 814 | 481 | 333 | 91 | 1 |
{-# LANGUAGE NoImplicitPrelude, MagicHash, UnboxedTuples, BangPatterns, StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Java.Primitive
-- Copyright : (c) Rahul Muttineni 2016-2017
--
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Dealing with native Java primitives.
--
-----------------------------------------------------------------------------
module Java.Primitive
( Byte(..)
, Short(..)
, JChar(..)
, charToJChar
, jcharToChar)
where
import GHC.Base
import GHC.Num
import GHC.Real
import GHC.Show
import Java.PrimitiveBase
import Data.Data
import Data.Typeable
import Data.Char
import Prelude(maxBound)
deriving instance Typeable Byte
byteType :: DataType
byteType = mkIntType "Java.Primitive.Byte"
instance Data Byte where
toConstr = mkIntegralConstr byteType
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Byte."
dataTypeOf _ = byteType
deriving instance Typeable Short
shortType :: DataType
shortType = mkIntType "Java.Primitive.Short"
instance Data Short where
toConstr = mkIntegralConstr shortType
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type Short."
dataTypeOf _ = shortType
deriving instance Typeable JChar
charToJChar :: Char -> Maybe JChar
charToJChar chr
| chrVal <= maxJChar && not isSurrogate = Just $ fromIntegral chrVal
| otherwise = Nothing
where maxJChar = fromIntegral (maxBound :: JChar)
chrVal = ord chr
-- check if Char is reserved UTF-16 value - not a valid JChar
isSurrogate = 0xD800 <= chrVal && chrVal <= 0xDFFF
jcharToChar :: JChar -> Maybe Char
jcharToChar jchr
| not isSurrogate = Just $ chr jchrVal
| otherwise = Nothing
where jchrVal = fromIntegral jchr
-- check if JChar is reserved UTF-16 value - not a valid character
isSurrogate = 0xD800 <= jchrVal && jchrVal <= 0xDFFF
jcharType :: DataType
jcharType = mkIntType "Java.Primitive.JChar"
instance Data JChar where
toConstr = mkIntegralConstr jcharType
gunfold _ z c = case constrRep c of
(IntConstr x) -> z (fromIntegral x)
_ -> errorWithoutStackTrace $ "Data.Data.gunfold: Constructor " ++ show c
++ " is not of type JChar."
dataTypeOf _ = jcharType
| rahulmutt/ghcvm | libraries/base/Java/Primitive.hs | bsd-3-clause | 2,807 | 0 | 11 | 699 | 584 | 306 | 278 | 59 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Applicative
-- Copyright : Conor McBride and Ross Paterson 2005
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This module describes a structure intermediate between a functor and
-- a monad (technically, a strong lax monoidal functor). Compared with
-- monads, this interface lacks the full power of the binding operation
-- '>>=', but
--
-- * it has more instances.
--
-- * it is sufficient for many uses, e.g. context-free parsing, or the
-- 'Data.Traversable.Traversable' class.
--
-- * instances can perform analysis of computations before they are
-- executed, and thus produce shared optimizations.
--
-- This interface was introduced for parsers by Niklas Röjemo, because
-- it admits more sharing than the monadic interface. The names here are
-- mostly based on parsing work by Doaitse Swierstra.
--
-- For more details, see
-- <http://www.soi.city.ac.uk/~ross/papers/Applicative.html Applicative Programming with Effects>,
-- by Conor McBride and Ross Paterson.
module Control.Applicative (
-- * Applicative functors
Applicative(..),
-- * Alternatives
Alternative(..),
-- * Instances
Const(..), WrappedMonad(..), WrappedArrow(..), ZipList(..),
-- * Utility functions
(<$>), (<$), (<**>),
liftA, liftA3,
optional,
) where
import Control.Category hiding ((.), id)
import Control.Arrow
import Data.Maybe
import Data.Tuple
import Data.Eq
import Data.Ord
import Data.Foldable (Foldable(..))
import Data.Functor ((<$>))
import Data.Functor.Const (Const(..))
import GHC.Base
import GHC.Generics
import GHC.List (repeat, zipWith, drop)
import GHC.Read (Read)
import GHC.Show (Show)
newtype WrappedMonad m a = WrapMonad { unwrapMonad :: m a }
deriving (Generic, Generic1, Monad)
-- | @since 2.01
instance Monad m => Functor (WrappedMonad m) where
fmap f (WrapMonad v) = WrapMonad (liftM f v)
-- | @since 2.01
instance Monad m => Applicative (WrappedMonad m) where
pure = WrapMonad . pure
WrapMonad f <*> WrapMonad v = WrapMonad (f `ap` v)
liftA2 f (WrapMonad x) (WrapMonad y) = WrapMonad (liftM2 f x y)
-- | @since 2.01
instance MonadPlus m => Alternative (WrappedMonad m) where
empty = WrapMonad mzero
WrapMonad u <|> WrapMonad v = WrapMonad (u `mplus` v)
newtype WrappedArrow a b c = WrapArrow { unwrapArrow :: a b c }
deriving (Generic, Generic1)
-- | @since 2.01
instance Arrow a => Functor (WrappedArrow a b) where
fmap f (WrapArrow a) = WrapArrow (a >>> arr f)
-- | @since 2.01
instance Arrow a => Applicative (WrappedArrow a b) where
pure x = WrapArrow (arr (const x))
liftA2 f (WrapArrow u) (WrapArrow v) =
WrapArrow (u &&& v >>> arr (uncurry f))
-- | @since 2.01
instance (ArrowZero a, ArrowPlus a) => Alternative (WrappedArrow a b) where
empty = WrapArrow zeroArrow
WrapArrow u <|> WrapArrow v = WrapArrow (u <+> v)
-- | Lists, but with an 'Applicative' functor based on zipping.
newtype ZipList a = ZipList { getZipList :: [a] }
deriving ( Show, Eq, Ord, Read, Functor
, Foldable, Generic, Generic1)
-- See Data.Traversable for Traversable instance due to import loops
-- |
-- > f '<$>' 'ZipList' xs1 '<*>' ... '<*>' 'ZipList' xsN
-- > = 'ZipList' (zipWithN f xs1 ... xsN)
--
-- where @zipWithN@ refers to the @zipWith@ function of the appropriate arity
-- (@zipWith@, @zipWith3@, @zipWith4@, ...). For example:
--
-- > (\a b c -> stimes c [a, b]) <$> ZipList "abcd" <*> ZipList "567" <*> ZipList [1..]
-- > = ZipList (zipWith3 (\a b c -> stimes c [a, b]) "abcd" "567" [1..])
-- > = ZipList {getZipList = ["a5","b6b6","c7c7c7"]}
--
-- @since 2.01
instance Applicative ZipList where
pure x = ZipList (repeat x)
liftA2 f (ZipList xs) (ZipList ys) = ZipList (zipWith f xs ys)
-- | @since 4.11.0.0
instance Alternative ZipList where
empty = ZipList []
ZipList xs <|> ZipList ys = ZipList (xs ++ drop (length xs) ys)
-- extra functions
-- | One or none.
optional :: Alternative f => f a -> f (Maybe a)
optional v = Just <$> v <|> pure Nothing
| rahulmutt/ghcvm | libraries/base/Control/Applicative.hs | bsd-3-clause | 4,551 | 0 | 11 | 940 | 961 | 546 | 415 | 60 | 1 |
-- Compiler Toolkit: general purpose attribute management
--
-- Author : Manuel M. T. Chakravarty
-- Created: 14 February 95
--
-- Copyright (c) [1995..1999] Manuel M. T. Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This module provides an abstract notion of attributes (in the sense of
-- compiler construction). The collection of attributes that is attached to a
-- single node of the structure tree is referenced via an attributes
-- identifier. This is basically a reference into so-called attribute tables,
-- which manage attributes of one type and may use different representations.
-- There is also a position attribute managed via the attribute identifier
-- without needing a further table (it is already fixed on construction of
-- the structure tree).
--
-- The `Attributed' class is based on a suggestion from Roman Lechtchinsky.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
-- * Attribute identifiers are generated during parsing and whenever new
-- structure tree elements, possibly due to transformations, are generated.
--
-- * New attributes can be added by simply providing a new attribute table
-- indexed by the attribute identifiers. Thus, adding or discarding an
-- attribute does not involve any change in the structure tree.
--
-- * Consecutive sequences of names are used as attribute identifiers to
-- facilitate the use of arrays for attributes that are fixed; speeds up
-- read access. (See also TODO.)
--
-- * Each attribute table can simultaneously provide melted (updatable) and
-- frozen (non-updatable) attributes. It also allows to dynamically grow the
-- table, i.e., cover a wider range of attribute identifiers.
--
-- * There is a variant merely providing a position, which is used for
-- internal identifiers and such.
--
-- * `StdAttr' provides standard undefined and don't care variants for
-- attribute values.
--
--- TODO ----------------------------------------------------------------------
--
-- * When there are sparse attribute tables that we want to freeze (and they
-- will occur sooner or later), then introduce a third variant of tables
-- realized via hash table---depending on the type of attribute table, we
-- may even allow them to be soft.
--
-- NOTE: Currently, if assertions are switched on, on freezing a table, its
-- density is calculate and, if it is below 33%, an internal error is
-- raised (only if there are more than 1000 entries in the table).
--
-- * check whether it would increase the performance significantly if we use
-- a mixed finite map/array representation for soft tables (all attributes
-- defined before the last `soften' could be held in the array, changing
-- an attribute just means to update it in the FM; i.e., the FM entries take
-- precedence over the array entries)
--
module Data.Attributes (-- attribute management
--
NodeInfo, newAttrsOnlyPos, newAttrs,
Attributed(attrsOf), eqOfAttrsOf, posOfAttrsOf,
--
-- attributes and attribute tables
--
Attr(undef, isUndef, dontCare, isDontCare),
AttrTable, newAttrTable, getAttr, setAttr, updAttr,
copyAttr, freezeAttrTable, softenAttrTable,
StdAttr(..), getStdAttr, getStdAttrDft, isDontCareStdAttr,
isUndefStdAttr, setStdAttr, updStdAttr,
getGenAttr, setGenAttr, updGenAttr)
where
import Data.Array
import Control.Exception (assert)
import qualified Data.IntMap as NameMap (fromList, insert, findWithDefault, empty, assocs)
import Data.IntMap (IntMap)
import Language.C.Data.Node
import Language.C.Data.Position
import Language.C.Data.Name (Name(Name, nameId))
import Data.Errors (interr)
type NameMap = IntMap
-- attribute management data structures and operations
-- ---------------------------------------------------
-- a class for convenient access to the attributes of an attributed object
--
--
class Attributed a where
attrsOf :: a -> NodeInfo
-- equality induced by attribution
--
eqOfAttrsOf :: Attributed a => a -> a -> Bool
eqOfAttrsOf obj1 obj2 = (attrsOf obj1) == (attrsOf obj2)
-- position induced by attribution
--
posOfAttrsOf :: Attributed a => a -> Position
posOfAttrsOf = posOf . attrsOf
-- attribute identifier creation
-- -----------------------------
-- Given only a source position, create a new attribute identifier
--
newAttrsOnlyPos :: Position -> NodeInfo
newAttrsOnlyPos = mkNodeInfoOnlyPos
-- Given a source position and a unique name, create a new attribute
-- identifier
--
newAttrs :: Position -> Name -> NodeInfo
newAttrs = mkNodeInfo
-- attribute tables and operations on them
-- ---------------------------------------
-- | the type class 'Attr' determines which types may be used as attributes
--
--
-- * such types have to provide values representing an undefined and a don't
-- care state, together with two functions to test for these values
--
-- * an attribute in an attribute table is initially set to 'undef' (before
-- some value is assigned to it)
--
-- * an attribute with value 'dontCare' participated in an already detected
-- error, it's value may not be used for further computations in order to
-- avoid error avalanches
--
class Attr a where
undef :: a
isUndef :: a -> Bool
dontCare :: a
isDontCare :: a -> Bool
undef = interr "Attributes: Undefined `undef' method in `Attr' class!"
isUndef = interr "Attributes: Undefined `isUndef' method in `Attr' \
\class!"
dontCare = interr "Attributes: Undefined `dontCare' method in `Attr' \
\class!"
isDontCare = interr "Attributes: Undefined `isDontCare' method in `Attr' \
\class!"
-- | attribute tables map attribute identifiers to attribute values
--
-- * the attributes within a table can be soft or frozen, the former may by be
-- updated, but the latter can not be changed
--
-- * the attributes in a frozen table are stored in an array for fast
-- lookup; consequently, the attribute identifiers must be *dense*
--
-- * the table description string is used to emit better error messages (for
-- internal errors)
--
data Attr a =>
AttrTable a = -- for all attribute identifiers not contained in the
-- finite map the value is 'undef'
--
SoftTable (NameMap a) -- updated attr.s
String -- desc of the table
-- the array contains 'undef' attributes for the undefined
-- attributes; for all attribute identifiers outside the
-- bounds, the value is also 'undef';
--
| FrozenTable (Array Name a) -- attribute values
String -- desc of the table
instance (Attr a, Show a) => Show (AttrTable a) where
show (SoftTable mp descr) = -- freeze is disabled
"AttrTable "++ descr ++ " { " ++ (unwords . map show) (NameMap.assocs mp) ++ " }"
show tbl@(FrozenTable _ _) = show (softenAttrTable tbl)
nameMapToList :: NameMap a -> [(Name, a)]
nameMapToList = map (\(k,v) -> (Name k, v)) . NameMap.assocs
nameMapFromList :: [(Name, a)] -> NameMap a
nameMapFromList = NameMap.fromList . map (\(k,v) -> (nameId k, v))
-- | create an attribute table, where all attributes are 'undef'
--
-- the description string is used to identify the table in error messages
-- (internal errors); a table is initially soft
--
newAttrTable :: Attr a => String -> AttrTable a
newAttrTable desc = SoftTable NameMap.empty desc
-- | get the value of an attribute from the given attribute table
--
getAttr :: Attr a => AttrTable a -> NodeInfo -> a
getAttr at node =
case nameOfNode node of
Nothing -> onlyPosErr "getAttr" at (posOfNode node)
Just aid ->
case at of
(SoftTable fm _) -> NameMap.findWithDefault undef (nameId aid) fm
(FrozenTable arr _) -> let (lbd, ubd) = bounds arr
in
if (aid < lbd || aid > ubd) then undef else arr!aid
-- | set the value of an, up to now, undefined attribute from the given
-- attribute table
--
setAttr :: Attr a => AttrTable a -> NodeInfo -> a -> AttrTable a
setAttr at node av =
case nameOfNode node of
Nothing -> onlyPosErr "setAttr" at (posOfNode node)
Just aid ->
case at of
(SoftTable fm desc) -> assert (isUndef (NameMap.findWithDefault undef (nameId aid) fm)) $
SoftTable (NameMap.insert (nameId aid) av fm) desc
(FrozenTable _arr _) -> interr frozenErr
where
frozenErr = "Attributes.setAttr: Tried to write frozen attribute in\n"
++ errLoc at (posOfNode node)
-- | update the value of an attribute from the given attribute table
--
updAttr :: Attr a => AttrTable a -> NodeInfo -> a -> AttrTable a
updAttr at node av =
case nameOfNode node of
Nothing -> onlyPosErr "updAttr" at (posOfNode node)
Just aid ->
case at of
(SoftTable fm desc) -> SoftTable (NameMap.insert (nameId aid) av fm) desc
(FrozenTable _arr _) -> interr $ "Attributes.updAttr: Tried to\
\ update frozen attribute in\n"
++ errLoc at (posOfNode node)
-- | copy the value of an attribute to another one
--
-- * undefined attributes are not copied, to avoid filling the table
--
copyAttr :: Attr a => AttrTable a -> NodeInfo -> NodeInfo -> AttrTable a
copyAttr at ats ats'
| isUndef av = assert (isUndef (getAttr at ats'))
at
| otherwise =
updAttr at ats' av
where
av = getAttr at ats
-- | auxiliary functions for error messages
--
onlyPosErr :: Attr a => String -> AttrTable a -> Position -> b
onlyPosErr fctName at pos =
interr $ "Attributes." ++ fctName ++ ": No attribute identifier in\n"
++ errLoc at pos
--
errLoc :: Attr a => AttrTable a -> Position -> String
errLoc at pos = " table `" ++ tableDesc at ++ "' for construct at\n\
\ position " ++ show pos ++ "!"
where
tableDesc (SoftTable _ desc) = desc
tableDesc (FrozenTable _ desc) = desc
-- | freeze a soft table; afterwards no more changes are possible until the
-- table is softened again
--
freezeAttrTable :: Attr a => AttrTable a -> AttrTable a
freezeAttrTable (SoftTable fm desc) =
let contents = nameMapToList fm
keys = map fst contents
lbd = minimum keys
ubd = maximum keys
in
assert (length keys < 1000 || (length . range) (lbd, ubd) > 3 * length keys)
(FrozenTable (array (lbd, ubd) contents) desc)
freezeAttrTable (FrozenTable _ desc) =
interr ("Attributes.freezeAttrTable: Attempt to freeze the already frozen\n\
\ table `" ++ desc ++ "'!")
-- | soften a frozen table; afterwards changes are possible until the
-- table is frozen again
--
softenAttrTable :: Attr a => AttrTable a -> AttrTable a
softenAttrTable (SoftTable _fm desc) =
interr ("Attributes.softenAttrTable: Attempt to soften the already \
\softened\n table `" ++ desc ++ "'!")
softenAttrTable (FrozenTable arr desc) =
SoftTable (nameMapFromList . assocs $ arr) desc
-- standard attributes
-- -------------------
-- | standard attribute variants
--
data StdAttr a = UndefStdAttr
| DontCareStdAttr
| JustStdAttr a
instance Attr (StdAttr a) where
undef = UndefStdAttr
isUndef UndefStdAttr = True
isUndef _ = False
dontCare = DontCareStdAttr
isDontCare DontCareStdAttr = True
isDontCare _ = False
-- | get an attribute value from a standard attribute table
--
-- * if the attribute can be "don't care", this should be checked before
-- calling this function (using 'isDontCareStdAttr')
--
getStdAttr :: AttrTable (StdAttr a) -> NodeInfo -> a
getStdAttr atab at = getStdAttrDft atab at err
where
err = interr $ "Attributes.getStdAttr: Don't care in\n"
++ errLoc atab (posOf at)
-- | get an attribute value from a standard attribute table, where a default is
-- substituted if the table is don't care
--
getStdAttrDft :: AttrTable (StdAttr a) -> NodeInfo -> a -> a
getStdAttrDft atab at dft =
case getAttr atab at of
DontCareStdAttr -> dft
JustStdAttr av -> av
UndefStdAttr -> interr $ "Attributes.getStdAttrDft: Undefined in\n"
++ errLoc atab (posOf at)
-- | check if the attribue value is marked as "don't care"
--
isDontCareStdAttr :: AttrTable (StdAttr a) -> NodeInfo -> Bool
isDontCareStdAttr atab at = isDontCare (getAttr atab at)
-- | check if the attribue value is still undefined
--
-- * we also regard "don't care" attributes as undefined
--
isUndefStdAttr :: AttrTable (StdAttr a) -> NodeInfo -> Bool
isUndefStdAttr atab at = isUndef (getAttr atab at)
-- | set an attribute value in a standard attribute table
--
setStdAttr :: AttrTable (StdAttr a) -> NodeInfo -> a -> AttrTable (StdAttr a)
setStdAttr atab at av = setAttr atab at (JustStdAttr av)
-- | update an attribute value in a standard attribute table
--
updStdAttr :: AttrTable (StdAttr a) -> NodeInfo -> a -> AttrTable (StdAttr a)
updStdAttr atab at av = updAttr atab at (JustStdAttr av)
-- generic attribute table access
-- ------------------------------
getGenAttr :: (Attr a, Attributed obj) => AttrTable a -> obj -> a
getGenAttr atab at = getAttr atab (attrsOf at)
setGenAttr :: (Attr a, Attributed obj)
=> AttrTable a -> obj -> a -> AttrTable a
setGenAttr atab at av = setAttr atab (attrsOf at) av
updGenAttr :: (Attr a, Attributed obj)
=> AttrTable a -> obj -> a -> AttrTable a
updGenAttr atab at av = updAttr atab (attrsOf at) av
| jrockway/c2hs | src/Data/Attributes.hs | gpl-2.0 | 14,902 | 0 | 18 | 3,903 | 2,486 | 1,360 | 1,126 | 156 | 4 |
module NPDA.Quiz where
-- $Id$
import NPDA.Type
import Language.Type
import qualified Grammatik.CF.Language as L
import qualified Grammatik.CF.Generate as G
import qualified NPDA.Inter as I
import qualified Machine.Acceptor.Type as A
import Util.Cache
import Util.Datei
import Util.Seed
import Informed
import ToDoc
import Inter.Types
make :: ( G.Config , I.Config )
-> Var A.Acceptor
( A.Type ( NPDA Char Char Int ) String )
( NPDA Char Char Int )
make ( gconf, iconf ) =
let auf = "NPDA"
ver = "Quiz"
in Var { problem = A.Acceptor
, aufgabe = auf
, version = ver
, key = \ matrikel -> return matrikel
, gen = \ key -> do
seed $ read key
g <- cache ( Datei { pfad = [ "autotool", "cache" , auf, ver ]
, name = key
, extension = "cache"
}
) ( G.throw $ gconf )
let l = L.make "Quiz" g
m = I.max_num iconf
e = I.max_length iconf
small w = length w <= e
yeah <- samples l m 0
noh <- anti_samples l m 0
return $ return $ A.Make
{ A.machine_info = text "Keller-Automat"
, A.data_info = info $ l
, A.yeah = take m $ filter small yeah
, A.noh = take m $ filter small noh
, A.cut = I.cut iconf
, A.check = I.check iconf
, A.start = I.start iconf
}
}
| Erdwolf/autotool-bonn | src/NPDA/Quiz.hs | gpl-2.0 | 1,415 | 63 | 8 | 499 | 326 | 209 | 117 | 44 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module WaiAppStatic.Types
( -- * Pieces
Piece
, toPiece
, fromPiece
, unsafeToPiece
, Pieces
, toPieces
-- * Caching
, MaxAge (..)
-- * File\/folder serving
, FolderName
, Folder (..)
, File (..)
, LookupResult (..)
, Listing
-- * Settings
, StaticSettings (..)
) where
import Data.Text (Text)
import qualified Network.HTTP.Types as H
import qualified Network.Wai as W
import Data.ByteString (ByteString)
import System.Posix.Types (EpochTime)
import qualified Data.Text as T
import Blaze.ByteString.Builder (Builder)
import Network.Mime (MimeType)
-- | An individual component of a path, or of a filepath.
--
-- This is the core type used by wai-app-static for doing lookups. It provides
-- a smart constructor to avoid the possibility of constructing unsafe path
-- segments (though @unsafeToPiece@ can get around that as necessary).
--
-- Individual file lookup backends must know how to convert from a @Piece@ to
-- their storage system.
newtype Piece = Piece { fromPiece :: Text }
deriving (Show, Eq, Ord)
-- | Smart constructor for a @Piece@. Won\'t allow unsafe components, such as
-- pieces beginning with a period or containing a slash. This /will/, however,
-- allow null pieces.
toPiece :: Text -> Maybe Piece
toPiece t
| T.null t = Just $ Piece t
| T.head t == '.' = Nothing
| T.any (== '/') t = Nothing
| otherwise = Just $ Piece t
-- | Construct a @Piece@ without input validation.
unsafeToPiece :: Text -> Piece
unsafeToPiece = Piece
-- | Call @toPiece@ on a list.
--
-- > toPieces = mapM toPiece
toPieces :: [Text] -> Maybe Pieces
toPieces = mapM toPiece
-- | Request coming from a user. Corresponds to @pathInfo@.
type Pieces = [Piece]
-- | Values for the max-age component of the cache-control response header.
data MaxAge = NoMaxAge -- ^ no cache-control set
| MaxAgeSeconds Int -- ^ set to the given number of seconds
| MaxAgeForever -- ^ essentially infinite caching; in reality, probably one year
-- | Just the name of a folder.
type FolderName = Piece
-- | Represent contents of a single folder, which can be itself either a file
-- or a folder.
data Folder = Folder
{ folderContents :: [Either FolderName File]
}
-- | Information on an individual file.
data File = File
{ -- | Size of file in bytes
fileGetSize :: Int
-- | How to construct a WAI response for this file. Some files are stored
-- on the filesystem and can use @ResponseFile@, while others are stored
-- in memory and should use @ResponseBuilder@.
, fileToResponse :: H.Status -> H.ResponseHeaders -> W.Response
-- | Last component of the filename.
, fileName :: Piece
-- | Calculate a hash of the contents of this file, such as for etag.
, fileGetHash :: IO (Maybe ByteString)
-- | Last modified time, used for both display in listings and if-modified-since.
, fileGetModified :: Maybe EpochTime
}
-- | Result of looking up a file in some storage backend.
--
-- The lookup is either a file or folder, or does not exist.
data LookupResult = LRFile File
| LRFolder Folder
| LRNotFound
-- | How to construct a directory listing page for the given request path and
-- the resulting folder.
type Listing = Pieces -> Folder -> IO Builder
-- | All of the settings available to users for tweaking wai-app-static.
--
-- Note that you should use the settings type approach for modifying values.
-- See <http://www.yesodweb.com/book/settings-types> for more information.
data StaticSettings = StaticSettings
{
-- | Lookup a single file or folder. This is how you can control storage
-- backend (filesystem, embedded, etc) and where to lookup.
ssLookupFile :: Pieces -> IO LookupResult
-- | Determine the mime type of the given file. Note that this function
-- lives in @IO@ in case you want to perform more complicated mimetype
-- analysis, such as via the @file@ utility.
, ssGetMimeType :: File -> IO MimeType
-- | Ordered list of filenames to be used for indices. If the user
-- requests a folder, and a file with the given name is found in that
-- folder, that file is served. This supercedes any directory listing.
, ssIndices :: [Piece]
-- | How to perform a directory listing. Optional. Will be used when the
-- user requested a folder.
, ssListing :: Maybe Listing
-- | Value to provide for max age in the cache-control.
, ssMaxAge :: MaxAge
-- | Given a requested path and a new destination, construct a string
-- that will go there. Default implementation will use relative paths.
, ssMkRedirect :: Pieces -> ByteString -> ByteString
-- | If @True@, send a redirect to the user when a folder is requested
-- and an index page should be displayed. When @False@, display the
-- content immediately.
, ssRedirectToIndex :: Bool
-- | Prefer usage of etag caching to last-modified caching.
, ssUseHash :: Bool
}
| jberryman/wai | wai-app-static/WaiAppStatic/Types.hs | mit | 5,127 | 0 | 11 | 1,221 | 600 | 376 | 224 | 62 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-| Implementation of the Ganeti logging functionality.
This currently lacks the following (FIXME):
- log file reopening
Note that this requires the hslogger library version 1.1 and above.
-}
{-
Copyright (C) 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Logging
( setupLogging
, logDebug
, logInfo
, logNotice
, logWarning
, logError
, logCritical
, logAlert
, logEmergency
, SyslogUsage(..)
, syslogUsageToRaw
, syslogUsageFromRaw
) where
import Control.Monad (when)
import System.Log.Logger
import System.Log.Handler.Simple
import System.Log.Handler.Syslog
import System.Log.Handler (setFormatter, LogHandler)
import System.Log.Formatter
import System.IO
import Ganeti.THH
import qualified Ganeti.Constants as C
-- | Syslog usage type.
$(declareSADT "SyslogUsage"
[ ("SyslogNo", 'C.syslogNo)
, ("SyslogYes", 'C.syslogYes)
, ("SyslogOnly", 'C.syslogOnly)
])
-- | Builds the log formatter.
logFormatter :: String -- ^ Program
-> Bool -- ^ Multithreaded
-> Bool -- ^ Syslog
-> LogFormatter a
logFormatter prog mt syslog =
let parts = [ if syslog
then "[$pid]:"
else "$time: " ++ prog ++ " pid=$pid"
, if mt then if syslog then " ($tid)" else "/$tid"
else ""
, " $prio $msg"
]
in tfLogFormatter "%F %X,%q %Z" $ concat parts
-- | Helper to open and set the formatter on a log if enabled by a
-- given condition, otherwise returning an empty list.
openFormattedHandler :: (LogHandler a) => Bool
-> LogFormatter a -> IO a -> IO [a]
openFormattedHandler False _ _ = return []
openFormattedHandler True fmt opener = do
handler <- opener
return [setFormatter handler fmt]
-- | Sets up the logging configuration.
setupLogging :: Maybe String -- ^ Log file
-> String -- ^ Program name
-> Bool -- ^ Debug level
-> Bool -- ^ Log to stderr
-> Bool -- ^ Log to console
-> SyslogUsage -- ^ Syslog usage
-> IO ()
setupLogging logf program debug stderr_logging console syslog = do
let level = if debug then DEBUG else INFO
destf = if console then Just C.devConsole else logf
fmt = logFormatter program False False
file_logging = syslog /= SyslogOnly
updateGlobalLogger rootLoggerName (setLevel level)
stderr_handlers <- openFormattedHandler stderr_logging fmt $
streamHandler stderr level
file_handlers <- case destf of
Nothing -> return []
Just path -> openFormattedHandler file_logging fmt $
fileHandler path level
let handlers = file_handlers ++ stderr_handlers
updateGlobalLogger rootLoggerName $ setHandlers handlers
-- syslog handler is special (another type, still instance of the
-- typeclass, and has a built-in formatter), so we can't pass it in
-- the above list
when (syslog /= SyslogNo) $ do
syslog_handler <- openlog program [PID] DAEMON INFO
updateGlobalLogger rootLoggerName $ addHandler syslog_handler
-- * Logging function aliases
-- | Log at debug level.
logDebug :: String -> IO ()
logDebug = debugM rootLoggerName
-- | Log at info level.
logInfo :: String -> IO ()
logInfo = infoM rootLoggerName
-- | Log at notice level.
logNotice :: String -> IO ()
logNotice = noticeM rootLoggerName
-- | Log at warning level.
logWarning :: String -> IO ()
logWarning = warningM rootLoggerName
-- | Log at error level.
logError :: String -> IO ()
logError = errorM rootLoggerName
-- | Log at critical level.
logCritical :: String -> IO ()
logCritical = criticalM rootLoggerName
-- | Log at alert level.
logAlert :: String -> IO ()
logAlert = alertM rootLoggerName
-- | Log at emergency level.
logEmergency :: String -> IO ()
logEmergency = emergencyM rootLoggerName
| dblia/nosql-ganeti | src/Ganeti/Logging.hs | gpl-2.0 | 4,650 | 0 | 12 | 1,147 | 817 | 440 | 377 | 85 | 4 |
{-
(c) Galois, 2006
(c) University of Glasgow, 2007
-}
{-# LANGUAGE CPP, NondecreasingIndentation, RecordWildCards #-}
module Coverage (addTicksToBinds, hpcInitCode) where
#ifdef GHCI
import qualified GHCi
import GHCi.RemoteTypes
import Data.Array
import ByteCodeTypes
import GHC.Stack.CCS
#endif
import Type
import HsSyn
import Module
import Outputable
import DynFlags
import Control.Monad
import SrcLoc
import ErrUtils
import NameSet hiding (FreeVars)
import Name
import Bag
import CostCentre
import CoreSyn
import Id
import VarSet
import Data.List
import FastString
import HscTypes
import TyCon
import UniqSupply
import BasicTypes
import MonadUtils
import Maybes
import CLabel
import Util
import Data.Time
import System.Directory
import Trace.Hpc.Mix
import Trace.Hpc.Util
import Data.Map (Map)
import qualified Data.Map as Map
{-
************************************************************************
* *
* The main function: addTicksToBinds
* *
************************************************************************
-}
addTicksToBinds
:: HscEnv
-> Module
-> ModLocation -- ... off the current module
-> NameSet -- Exported Ids. When we call addTicksToBinds,
-- isExportedId doesn't work yet (the desugarer
-- hasn't set it), so we have to work from this set.
-> [TyCon] -- Type constructor in this module
-> LHsBinds Id
-> IO (LHsBinds Id, HpcInfo, Maybe ModBreaks)
addTicksToBinds hsc_env mod mod_loc exports tyCons binds
| let dflags = hsc_dflags hsc_env
passes = coveragePasses dflags, not (null passes),
Just orig_file <- ml_hs_file mod_loc,
not ("boot" `isSuffixOf` orig_file) = do
us <- mkSplitUniqSupply 'C' -- for cost centres
let orig_file2 = guessSourceFile binds orig_file
tickPass tickish (binds,st) =
let env = TTE
{ fileName = mkFastString orig_file2
, declPath = []
, tte_dflags = dflags
, exports = exports
, inlines = emptyVarSet
, inScope = emptyVarSet
, blackList = Map.fromList
[ (getSrcSpan (tyConName tyCon),())
| tyCon <- tyCons ]
, density = mkDensity tickish dflags
, this_mod = mod
, tickishType = tickish
}
(binds',_,st') = unTM (addTickLHsBinds binds) env st
in (binds', st')
initState = TT { tickBoxCount = 0
, mixEntries = []
, uniqSupply = us
}
(binds1,st) = foldr tickPass (binds, initState) passes
let tickCount = tickBoxCount st
entries = reverse $ mixEntries st
hashNo <- writeMixEntries dflags mod tickCount entries orig_file2
modBreaks <- mkModBreaks hsc_env mod tickCount entries
when (dopt Opt_D_dump_ticked dflags) $
log_action dflags dflags NoReason SevDump noSrcSpan defaultDumpStyle
(pprLHsBinds binds1)
return (binds1, HpcInfo tickCount hashNo, Just modBreaks)
| otherwise = return (binds, emptyHpcInfo False, Nothing)
guessSourceFile :: LHsBinds Id -> FilePath -> FilePath
guessSourceFile binds orig_file =
-- Try look for a file generated from a .hsc file to a
-- .hs file, by peeking ahead.
let top_pos = catMaybes $ foldrBag (\ (L pos _) rest ->
srcSpanFileName_maybe pos : rest) [] binds
in
case top_pos of
(file_name:_) | ".hsc" `isSuffixOf` unpackFS file_name
-> unpackFS file_name
_ -> orig_file
mkModBreaks :: HscEnv -> Module -> Int -> [MixEntry_] -> IO ModBreaks
#ifndef GHCI
mkModBreaks _hsc_env _mod _count _entries = return emptyModBreaks
#else
mkModBreaks hsc_env mod count entries
| HscInterpreted <- hscTarget (hsc_dflags hsc_env) = do
breakArray <- GHCi.newBreakArray hsc_env (length entries)
ccs <- mkCCSArray hsc_env mod count entries
let
locsTicks = listArray (0,count-1) [ span | (span,_,_,_) <- entries ]
varsTicks = listArray (0,count-1) [ vars | (_,_,vars,_) <- entries ]
declsTicks = listArray (0,count-1) [ decls | (_,decls,_,_) <- entries ]
return emptyModBreaks
{ modBreaks_flags = breakArray
, modBreaks_locs = locsTicks
, modBreaks_vars = varsTicks
, modBreaks_decls = declsTicks
, modBreaks_ccs = ccs
}
| otherwise = return emptyModBreaks
mkCCSArray
:: HscEnv -> Module -> Int -> [MixEntry_]
-> IO (Array BreakIndex (RemotePtr GHC.Stack.CCS.CostCentre))
mkCCSArray hsc_env modul count entries = do
if interpreterProfiled dflags
then do
let module_str = moduleNameString (moduleName modul)
costcentres <- GHCi.mkCostCentres hsc_env module_str (map mk_one entries)
return (listArray (0,count-1) costcentres)
else do
return (listArray (0,-1) [])
where
dflags = hsc_dflags hsc_env
mk_one (srcspan, decl_path, _, _) = (name, src)
where name = concat (intersperse "." decl_path)
src = showSDoc dflags (ppr srcspan)
#endif
writeMixEntries
:: DynFlags -> Module -> Int -> [MixEntry_] -> FilePath -> IO Int
writeMixEntries dflags mod count entries filename
| not (gopt Opt_Hpc dflags) = return 0
| otherwise = do
let
hpc_dir = hpcDir dflags
mod_name = moduleNameString (moduleName mod)
hpc_mod_dir
| moduleUnitId mod == mainUnitId = hpc_dir
| otherwise = hpc_dir ++ "/" ++ unitIdString (moduleUnitId mod)
tabStop = 8 -- <tab> counts as a normal char in GHC's
-- location ranges.
createDirectoryIfMissing True hpc_mod_dir
modTime <- getModificationUTCTime filename
let entries' = [ (hpcPos, box)
| (span,_,_,box) <- entries, hpcPos <- [mkHpcPos span] ]
when (length entries' /= count) $ do
panic "the number of .mix entries are inconsistent"
let hashNo = mixHash filename modTime tabStop entries'
mixCreate hpc_mod_dir mod_name
$ Mix filename modTime (toHash hashNo) tabStop entries'
return hashNo
-- -----------------------------------------------------------------------------
-- TickDensity: where to insert ticks
data TickDensity
= TickForCoverage -- for Hpc
| TickForBreakPoints -- for GHCi
| TickAllFunctions -- for -prof-auto-all
| TickTopFunctions -- for -prof-auto-top
| TickExportedFunctions -- for -prof-auto-exported
| TickCallSites -- for stack tracing
deriving Eq
mkDensity :: TickishType -> DynFlags -> TickDensity
mkDensity tickish dflags = case tickish of
HpcTicks -> TickForCoverage
SourceNotes -> TickForCoverage
Breakpoints -> TickForBreakPoints
ProfNotes ->
case profAuto dflags of
ProfAutoAll -> TickAllFunctions
ProfAutoTop -> TickTopFunctions
ProfAutoExports -> TickExportedFunctions
ProfAutoCalls -> TickCallSites
_other -> panic "mkDensity"
-- | Decide whether to add a tick to a binding or not.
shouldTickBind :: TickDensity
-> Bool -- top level?
-> Bool -- exported?
-> Bool -- simple pat bind?
-> Bool -- INLINE pragma?
-> Bool
shouldTickBind density top_lev exported _simple_pat inline
= case density of
TickForBreakPoints -> False
-- we never add breakpoints to simple pattern bindings
-- (there's always a tick on the rhs anyway).
TickAllFunctions -> not inline
TickTopFunctions -> top_lev && not inline
TickExportedFunctions -> exported && not inline
TickForCoverage -> True
TickCallSites -> False
shouldTickPatBind :: TickDensity -> Bool -> Bool
shouldTickPatBind density top_lev
= case density of
TickForBreakPoints -> False
TickAllFunctions -> True
TickTopFunctions -> top_lev
TickExportedFunctions -> False
TickForCoverage -> False
TickCallSites -> False
-- -----------------------------------------------------------------------------
-- Adding ticks to bindings
addTickLHsBinds :: LHsBinds Id -> TM (LHsBinds Id)
addTickLHsBinds = mapBagM addTickLHsBind
addTickLHsBind :: LHsBind Id -> TM (LHsBind Id)
addTickLHsBind (L pos bind@(AbsBinds { abs_binds = binds,
abs_exports = abs_exports })) = do
withEnv add_exports $ do
withEnv add_inlines $ do
binds' <- addTickLHsBinds binds
return $ L pos $ bind { abs_binds = binds' }
where
-- in AbsBinds, the Id on each binding is not the actual top-level
-- Id that we are defining, they are related by the abs_exports
-- field of AbsBinds. So if we're doing TickExportedFunctions we need
-- to add the local Ids to the set of exported Names so that we know to
-- tick the right bindings.
add_exports env =
env{ exports = exports env `extendNameSetList`
[ idName mid
| ABE{ abe_poly = pid, abe_mono = mid } <- abs_exports
, idName pid `elemNameSet` (exports env) ] }
add_inlines env =
env{ inlines = inlines env `extendVarSetList`
[ mid
| ABE{ abe_poly = pid, abe_mono = mid } <- abs_exports
, isAnyInlinePragma (idInlinePragma pid) ] }
addTickLHsBind (L pos bind@(AbsBindsSig { abs_sig_bind = val_bind
, abs_sig_export = poly_id }))
| L _ FunBind { fun_id = L _ mono_id } <- val_bind
= do withEnv (add_export mono_id) $ do
withEnv (add_inlines mono_id) $ do
val_bind' <- addTickLHsBind val_bind
return $ L pos $ bind { abs_sig_bind = val_bind' }
| otherwise
= pprPanic "addTickLHsBind" (ppr bind)
where
-- see AbsBinds comments
add_export mono_id env
| idName poly_id `elemNameSet` exports env
= env { exports = exports env `extendNameSet` idName mono_id }
| otherwise
= env
add_inlines mono_id env
| isAnyInlinePragma (idInlinePragma poly_id)
= env { inlines = inlines env `extendVarSet` mono_id }
| otherwise
= env
addTickLHsBind (L pos (funBind@(FunBind { fun_id = (L _ id) }))) = do
let name = getOccString id
decl_path <- getPathEntry
density <- getDensity
inline_ids <- liftM inlines getEnv
let inline = isAnyInlinePragma (idInlinePragma id)
|| id `elemVarSet` inline_ids
-- See Note [inline sccs]
tickish <- tickishType `liftM` getEnv
if inline && tickish == ProfNotes then return (L pos funBind) else do
(fvs, mg@(MG { mg_alts = matches' })) <-
getFreeVars $
addPathEntry name $
addTickMatchGroup False (fun_matches funBind)
blackListed <- isBlackListed pos
exported_names <- liftM exports getEnv
-- We don't want to generate code for blacklisted positions
-- We don't want redundant ticks on simple pattern bindings
-- We don't want to tick non-exported bindings in TickExportedFunctions
let simple = isSimplePatBind funBind
toplev = null decl_path
exported = idName id `elemNameSet` exported_names
tick <- if not blackListed &&
shouldTickBind density toplev exported simple inline
then
bindTick density name pos fvs
else
return Nothing
let mbCons = maybe Prelude.id (:)
return $ L pos $ funBind { fun_matches = mg { mg_alts = matches' }
, fun_tick = tick `mbCons` fun_tick funBind }
where
-- a binding is a simple pattern binding if it is a funbind with
-- zero patterns
isSimplePatBind :: HsBind a -> Bool
isSimplePatBind funBind = matchGroupArity (fun_matches funBind) == 0
-- TODO: Revisit this
addTickLHsBind (L pos (pat@(PatBind { pat_lhs = lhs, pat_rhs = rhs }))) = do
let name = "(...)"
(fvs, rhs') <- getFreeVars $ addPathEntry name $ addTickGRHSs False False rhs
let pat' = pat { pat_rhs = rhs'}
-- Should create ticks here?
density <- getDensity
decl_path <- getPathEntry
let top_lev = null decl_path
if not (shouldTickPatBind density top_lev) then return (L pos pat') else do
-- Allocate the ticks
rhs_tick <- bindTick density name pos fvs
let patvars = map getOccString (collectPatBinders lhs)
patvar_ticks <- mapM (\v -> bindTick density v pos fvs) patvars
-- Add to pattern
let mbCons = maybe id (:)
rhs_ticks = rhs_tick `mbCons` fst (pat_ticks pat')
patvar_tickss = zipWith mbCons patvar_ticks
(snd (pat_ticks pat') ++ repeat [])
return $ L pos $ pat' { pat_ticks = (rhs_ticks, patvar_tickss) }
-- Only internal stuff, not from source, uses VarBind, so we ignore it.
addTickLHsBind var_bind@(L _ (VarBind {})) = return var_bind
addTickLHsBind patsyn_bind@(L _ (PatSynBind {})) = return patsyn_bind
bindTick
:: TickDensity -> String -> SrcSpan -> FreeVars -> TM (Maybe (Tickish Id))
bindTick density name pos fvs = do
decl_path <- getPathEntry
let
toplev = null decl_path
count_entries = toplev || density == TickAllFunctions
top_only = density /= TickAllFunctions
box_label = if toplev then TopLevelBox [name]
else LocalBox (decl_path ++ [name])
--
allocATickBox box_label count_entries top_only pos fvs
-- Note [inline sccs]
--
-- It should be reasonable to add ticks to INLINE functions; however
-- currently this tickles a bug later on because the SCCfinal pass
-- does not look inside unfoldings to find CostCentres. It would be
-- difficult to fix that, because SCCfinal currently works on STG and
-- not Core (and since it also generates CostCentres for CAFs,
-- changing this would be difficult too).
--
-- Another reason not to add ticks to INLINE functions is that this
-- sometimes handy for avoiding adding a tick to a particular function
-- (see #6131)
--
-- So for now we do not add any ticks to INLINE functions at all.
-- -----------------------------------------------------------------------------
-- Decorate an LHsExpr with ticks
-- selectively add ticks to interesting expressions
addTickLHsExpr :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExpr e@(L pos e0) = do
d <- getDensity
case d of
TickForBreakPoints | isGoodBreakExpr e0 -> tick_it
TickForCoverage -> tick_it
TickCallSites | isCallSite e0 -> tick_it
_other -> dont_tick_it
where
tick_it = allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0
dont_tick_it = addTickLHsExprNever e
-- Add a tick to an expression which is the RHS of an equation or a binding.
-- We always consider these to be breakpoints, unless the expression is a 'let'
-- (because the body will definitely have a tick somewhere). ToDo: perhaps
-- we should treat 'case' and 'if' the same way?
addTickLHsExprRHS :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprRHS e@(L pos e0) = do
d <- getDensity
case d of
TickForBreakPoints | HsLet{} <- e0 -> dont_tick_it
| otherwise -> tick_it
TickForCoverage -> tick_it
TickCallSites | isCallSite e0 -> tick_it
_other -> dont_tick_it
where
tick_it = allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0
dont_tick_it = addTickLHsExprNever e
-- The inner expression of an evaluation context:
-- let binds in [], ( [] )
-- we never tick these if we're doing HPC, but otherwise
-- we treat it like an ordinary expression.
addTickLHsExprEvalInner :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprEvalInner e = do
d <- getDensity
case d of
TickForCoverage -> addTickLHsExprNever e
_otherwise -> addTickLHsExpr e
-- | A let body is treated differently from addTickLHsExprEvalInner
-- above with TickForBreakPoints, because for breakpoints we always
-- want to tick the body, even if it is not a redex. See test
-- break012. This gives the user the opportunity to inspect the
-- values of the let-bound variables.
addTickLHsExprLetBody :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprLetBody e@(L pos e0) = do
d <- getDensity
case d of
TickForBreakPoints | HsLet{} <- e0 -> dont_tick_it
| otherwise -> tick_it
_other -> addTickLHsExprEvalInner e
where
tick_it = allocTickBox (ExpBox False) False False pos $ addTickHsExpr e0
dont_tick_it = addTickLHsExprNever e
-- version of addTick that does not actually add a tick,
-- because the scope of this tick is completely subsumed by
-- another.
addTickLHsExprNever :: LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprNever (L pos e0) = do
e1 <- addTickHsExpr e0
return $ L pos e1
-- general heuristic: expressions which do not denote values are good
-- break points
isGoodBreakExpr :: HsExpr Id -> Bool
isGoodBreakExpr (HsApp {}) = True
isGoodBreakExpr (HsAppTypeOut {}) = True
isGoodBreakExpr (OpApp {}) = True
isGoodBreakExpr _other = False
isCallSite :: HsExpr Id -> Bool
isCallSite HsApp{} = True
isCallSite HsAppTypeOut{} = True
isCallSite OpApp{} = True
isCallSite _ = False
addTickLHsExprOptAlt :: Bool -> LHsExpr Id -> TM (LHsExpr Id)
addTickLHsExprOptAlt oneOfMany (L pos e0)
= ifDensity TickForCoverage
(allocTickBox (ExpBox oneOfMany) False False pos $ addTickHsExpr e0)
(addTickLHsExpr (L pos e0))
addBinTickLHsExpr :: (Bool -> BoxLabel) -> LHsExpr Id -> TM (LHsExpr Id)
addBinTickLHsExpr boxLabel (L pos e0)
= ifDensity TickForCoverage
(allocBinTickBox boxLabel pos $ addTickHsExpr e0)
(addTickLHsExpr (L pos e0))
-- -----------------------------------------------------------------------------
-- Decorate the body of an HsExpr with ticks.
-- (Whether to put a tick around the whole expression was already decided,
-- in the addTickLHsExpr family of functions.)
addTickHsExpr :: HsExpr Id -> TM (HsExpr Id)
addTickHsExpr e@(HsVar (L _ id)) = do freeVar id; return e
addTickHsExpr (HsUnboundVar {}) = panic "addTickHsExpr.HsUnboundVar"
addTickHsExpr e@(HsIPVar _) = return e
addTickHsExpr e@(HsOverLit _) = return e
addTickHsExpr e@(HsOverLabel _) = return e
addTickHsExpr e@(HsLit _) = return e
addTickHsExpr (HsLam matchgroup) = liftM HsLam (addTickMatchGroup True matchgroup)
addTickHsExpr (HsLamCase mgs) = liftM HsLamCase (addTickMatchGroup True mgs)
addTickHsExpr (HsApp e1 e2) = liftM2 HsApp (addTickLHsExprNever e1)
(addTickLHsExpr e2)
addTickHsExpr (HsAppTypeOut e ty) = liftM2 HsAppTypeOut (addTickLHsExprNever e)
(return ty)
addTickHsExpr (OpApp e1 e2 fix e3) =
liftM4 OpApp
(addTickLHsExpr e1)
(addTickLHsExprNever e2)
(return fix)
(addTickLHsExpr e3)
addTickHsExpr (NegApp e neg) =
liftM2 NegApp
(addTickLHsExpr e)
(addTickSyntaxExpr hpcSrcSpan neg)
addTickHsExpr (HsPar e) =
liftM HsPar (addTickLHsExprEvalInner e)
addTickHsExpr (SectionL e1 e2) =
liftM2 SectionL
(addTickLHsExpr e1)
(addTickLHsExprNever e2)
addTickHsExpr (SectionR e1 e2) =
liftM2 SectionR
(addTickLHsExprNever e1)
(addTickLHsExpr e2)
addTickHsExpr (ExplicitTuple es boxity) =
liftM2 ExplicitTuple
(mapM addTickTupArg es)
(return boxity)
addTickHsExpr (ExplicitSum tag arity e ty) = do
e' <- addTickLHsExpr e
return (ExplicitSum tag arity e' ty)
addTickHsExpr (HsCase e mgs) =
liftM2 HsCase
(addTickLHsExpr e) -- not an EvalInner; e might not necessarily
-- be evaluated.
(addTickMatchGroup False mgs)
addTickHsExpr (HsIf cnd e1 e2 e3) =
liftM3 (HsIf cnd)
(addBinTickLHsExpr (BinBox CondBinBox) e1)
(addTickLHsExprOptAlt True e2)
(addTickLHsExprOptAlt True e3)
addTickHsExpr (HsMultiIf ty alts)
= do { let isOneOfMany = case alts of [_] -> False; _ -> True
; alts' <- mapM (liftL $ addTickGRHS isOneOfMany False) alts
; return $ HsMultiIf ty alts' }
addTickHsExpr (HsLet (L l binds) e) =
bindLocals (collectLocalBinders binds) $
liftM2 (HsLet . L l)
(addTickHsLocalBinds binds) -- to think about: !patterns.
(addTickLHsExprLetBody e)
addTickHsExpr (HsDo cxt (L l stmts) srcloc)
= do { (stmts', _) <- addTickLStmts' forQual stmts (return ())
; return (HsDo cxt (L l stmts') srcloc) }
where
forQual = case cxt of
ListComp -> Just $ BinBox QualBinBox
_ -> Nothing
addTickHsExpr (ExplicitList ty wit es) =
liftM3 ExplicitList
(return ty)
(addTickWit wit)
(mapM (addTickLHsExpr) es)
where addTickWit Nothing = return Nothing
addTickWit (Just fln)
= do fln' <- addTickSyntaxExpr hpcSrcSpan fln
return (Just fln')
addTickHsExpr (ExplicitPArr ty es) =
liftM2 ExplicitPArr
(return ty)
(mapM (addTickLHsExpr) es)
addTickHsExpr (HsStatic fvs e) = HsStatic fvs <$> addTickLHsExpr e
addTickHsExpr expr@(RecordCon { rcon_flds = rec_binds })
= do { rec_binds' <- addTickHsRecordBinds rec_binds
; return (expr { rcon_flds = rec_binds' }) }
addTickHsExpr expr@(RecordUpd { rupd_expr = e, rupd_flds = flds })
= do { e' <- addTickLHsExpr e
; flds' <- mapM addTickHsRecField flds
; return (expr { rupd_expr = e', rupd_flds = flds' }) }
addTickHsExpr (ExprWithTySig e ty) =
liftM2 ExprWithTySig
(addTickLHsExprNever e) -- No need to tick the inner expression
-- for expressions with signatures
(return ty)
addTickHsExpr (ArithSeq ty wit arith_seq) =
liftM3 ArithSeq
(return ty)
(addTickWit wit)
(addTickArithSeqInfo arith_seq)
where addTickWit Nothing = return Nothing
addTickWit (Just fl) = do fl' <- addTickSyntaxExpr hpcSrcSpan fl
return (Just fl')
-- We might encounter existing ticks (multiple Coverage passes)
addTickHsExpr (HsTick t e) =
liftM (HsTick t) (addTickLHsExprNever e)
addTickHsExpr (HsBinTick t0 t1 e) =
liftM (HsBinTick t0 t1) (addTickLHsExprNever e)
addTickHsExpr (HsTickPragma _ _ _ (L pos e0)) = do
e2 <- allocTickBox (ExpBox False) False False pos $
addTickHsExpr e0
return $ unLoc e2
addTickHsExpr (PArrSeq ty arith_seq) =
liftM2 PArrSeq
(return ty)
(addTickArithSeqInfo arith_seq)
addTickHsExpr (HsSCC src nm e) =
liftM3 HsSCC
(return src)
(return nm)
(addTickLHsExpr e)
addTickHsExpr (HsCoreAnn src nm e) =
liftM3 HsCoreAnn
(return src)
(return nm)
(addTickLHsExpr e)
addTickHsExpr e@(HsBracket {}) = return e
addTickHsExpr e@(HsTcBracketOut {}) = return e
addTickHsExpr e@(HsRnBracketOut {}) = return e
addTickHsExpr e@(HsSpliceE {}) = return e
addTickHsExpr (HsProc pat cmdtop) =
liftM2 HsProc
(addTickLPat pat)
(liftL (addTickHsCmdTop) cmdtop)
addTickHsExpr (HsWrap w e) =
liftM2 HsWrap
(return w)
(addTickHsExpr e) -- Explicitly no tick on inside
addTickHsExpr (ExprWithTySigOut e ty) =
liftM2 ExprWithTySigOut
(addTickLHsExprNever e) -- No need to tick the inner expression
(return ty) -- for expressions with signatures
-- Others should never happen in expression content.
addTickHsExpr e = pprPanic "addTickHsExpr" (ppr e)
addTickTupArg :: LHsTupArg Id -> TM (LHsTupArg Id)
addTickTupArg (L l (Present e)) = do { e' <- addTickLHsExpr e
; return (L l (Present e')) }
addTickTupArg (L l (Missing ty)) = return (L l (Missing ty))
addTickMatchGroup :: Bool{-is lambda-} -> MatchGroup Id (LHsExpr Id) -> TM (MatchGroup Id (LHsExpr Id))
addTickMatchGroup is_lam mg@(MG { mg_alts = L l matches }) = do
let isOneOfMany = matchesOneOfMany matches
matches' <- mapM (liftL (addTickMatch isOneOfMany is_lam)) matches
return $ mg { mg_alts = L l matches' }
addTickMatch :: Bool -> Bool -> Match Id (LHsExpr Id) -> TM (Match Id (LHsExpr Id))
addTickMatch isOneOfMany isLambda (Match mf pats opSig gRHSs) =
bindLocals (collectPatsBinders pats) $ do
gRHSs' <- addTickGRHSs isOneOfMany isLambda gRHSs
return $ Match mf pats opSig gRHSs'
addTickGRHSs :: Bool -> Bool -> GRHSs Id (LHsExpr Id) -> TM (GRHSs Id (LHsExpr Id))
addTickGRHSs isOneOfMany isLambda (GRHSs guarded (L l local_binds)) = do
bindLocals binders $ do
local_binds' <- addTickHsLocalBinds local_binds
guarded' <- mapM (liftL (addTickGRHS isOneOfMany isLambda)) guarded
return $ GRHSs guarded' (L l local_binds')
where
binders = collectLocalBinders local_binds
addTickGRHS :: Bool -> Bool -> GRHS Id (LHsExpr Id) -> TM (GRHS Id (LHsExpr Id))
addTickGRHS isOneOfMany isLambda (GRHS stmts expr) = do
(stmts',expr') <- addTickLStmts' (Just $ BinBox $ GuardBinBox) stmts
(addTickGRHSBody isOneOfMany isLambda expr)
return $ GRHS stmts' expr'
addTickGRHSBody :: Bool -> Bool -> LHsExpr Id -> TM (LHsExpr Id)
addTickGRHSBody isOneOfMany isLambda expr@(L pos e0) = do
d <- getDensity
case d of
TickForCoverage -> addTickLHsExprOptAlt isOneOfMany expr
TickAllFunctions | isLambda ->
addPathEntry "\\" $
allocTickBox (ExpBox False) True{-count-} False{-not top-} pos $
addTickHsExpr e0
_otherwise ->
addTickLHsExprRHS expr
addTickLStmts :: (Maybe (Bool -> BoxLabel)) -> [ExprLStmt Id] -> TM [ExprLStmt Id]
addTickLStmts isGuard stmts = do
(stmts, _) <- addTickLStmts' isGuard stmts (return ())
return stmts
addTickLStmts' :: (Maybe (Bool -> BoxLabel)) -> [ExprLStmt Id] -> TM a
-> TM ([ExprLStmt Id], a)
addTickLStmts' isGuard lstmts res
= bindLocals (collectLStmtsBinders lstmts) $
do { lstmts' <- mapM (liftL (addTickStmt isGuard)) lstmts
; a <- res
; return (lstmts', a) }
addTickStmt :: (Maybe (Bool -> BoxLabel)) -> Stmt Id (LHsExpr Id) -> TM (Stmt Id (LHsExpr Id))
addTickStmt _isGuard (LastStmt e noret ret) = do
liftM3 LastStmt
(addTickLHsExpr e)
(pure noret)
(addTickSyntaxExpr hpcSrcSpan ret)
addTickStmt _isGuard (BindStmt pat e bind fail ty) = do
liftM5 BindStmt
(addTickLPat pat)
(addTickLHsExprRHS e)
(addTickSyntaxExpr hpcSrcSpan bind)
(addTickSyntaxExpr hpcSrcSpan fail)
(return ty)
addTickStmt isGuard (BodyStmt e bind' guard' ty) = do
liftM4 BodyStmt
(addTick isGuard e)
(addTickSyntaxExpr hpcSrcSpan bind')
(addTickSyntaxExpr hpcSrcSpan guard')
(return ty)
addTickStmt _isGuard (LetStmt (L l binds)) = do
liftM (LetStmt . L l)
(addTickHsLocalBinds binds)
addTickStmt isGuard (ParStmt pairs mzipExpr bindExpr ty) = do
liftM4 ParStmt
(mapM (addTickStmtAndBinders isGuard) pairs)
(unLoc <$> addTickLHsExpr (L hpcSrcSpan mzipExpr))
(addTickSyntaxExpr hpcSrcSpan bindExpr)
(return ty)
addTickStmt isGuard (ApplicativeStmt args mb_join body_ty) = do
args' <- mapM (addTickApplicativeArg isGuard) args
return (ApplicativeStmt args' mb_join body_ty)
addTickStmt isGuard stmt@(TransStmt { trS_stmts = stmts
, trS_by = by, trS_using = using
, trS_ret = returnExpr, trS_bind = bindExpr
, trS_fmap = liftMExpr }) = do
t_s <- addTickLStmts isGuard stmts
t_y <- fmapMaybeM addTickLHsExprRHS by
t_u <- addTickLHsExprRHS using
t_f <- addTickSyntaxExpr hpcSrcSpan returnExpr
t_b <- addTickSyntaxExpr hpcSrcSpan bindExpr
L _ t_m <- addTickLHsExpr (L hpcSrcSpan liftMExpr)
return $ stmt { trS_stmts = t_s, trS_by = t_y, trS_using = t_u
, trS_ret = t_f, trS_bind = t_b, trS_fmap = t_m }
addTickStmt isGuard stmt@(RecStmt {})
= do { stmts' <- addTickLStmts isGuard (recS_stmts stmt)
; ret' <- addTickSyntaxExpr hpcSrcSpan (recS_ret_fn stmt)
; mfix' <- addTickSyntaxExpr hpcSrcSpan (recS_mfix_fn stmt)
; bind' <- addTickSyntaxExpr hpcSrcSpan (recS_bind_fn stmt)
; return (stmt { recS_stmts = stmts', recS_ret_fn = ret'
, recS_mfix_fn = mfix', recS_bind_fn = bind' }) }
addTick :: Maybe (Bool -> BoxLabel) -> LHsExpr Id -> TM (LHsExpr Id)
addTick isGuard e | Just fn <- isGuard = addBinTickLHsExpr fn e
| otherwise = addTickLHsExprRHS e
addTickApplicativeArg
:: Maybe (Bool -> BoxLabel) -> (SyntaxExpr Id, ApplicativeArg Id Id)
-> TM (SyntaxExpr Id, ApplicativeArg Id Id)
addTickApplicativeArg isGuard (op, arg) =
liftM2 (,) (addTickSyntaxExpr hpcSrcSpan op) (addTickArg arg)
where
addTickArg (ApplicativeArgOne pat expr) =
ApplicativeArgOne <$> addTickLPat pat <*> addTickLHsExpr expr
addTickArg (ApplicativeArgMany stmts ret pat) =
ApplicativeArgMany
<$> addTickLStmts isGuard stmts
<*> (unLoc <$> addTickLHsExpr (L hpcSrcSpan ret))
<*> addTickLPat pat
addTickStmtAndBinders :: Maybe (Bool -> BoxLabel) -> ParStmtBlock Id Id
-> TM (ParStmtBlock Id Id)
addTickStmtAndBinders isGuard (ParStmtBlock stmts ids returnExpr) =
liftM3 ParStmtBlock
(addTickLStmts isGuard stmts)
(return ids)
(addTickSyntaxExpr hpcSrcSpan returnExpr)
addTickHsLocalBinds :: HsLocalBinds Id -> TM (HsLocalBinds Id)
addTickHsLocalBinds (HsValBinds binds) =
liftM HsValBinds
(addTickHsValBinds binds)
addTickHsLocalBinds (HsIPBinds binds) =
liftM HsIPBinds
(addTickHsIPBinds binds)
addTickHsLocalBinds (EmptyLocalBinds) = return EmptyLocalBinds
addTickHsValBinds :: HsValBindsLR Id a -> TM (HsValBindsLR Id b)
addTickHsValBinds (ValBindsOut binds sigs) =
liftM2 ValBindsOut
(mapM (\ (rec,binds') ->
liftM2 (,)
(return rec)
(addTickLHsBinds binds'))
binds)
(return sigs)
addTickHsValBinds _ = panic "addTickHsValBinds"
addTickHsIPBinds :: HsIPBinds Id -> TM (HsIPBinds Id)
addTickHsIPBinds (IPBinds ipbinds dictbinds) =
liftM2 IPBinds
(mapM (liftL (addTickIPBind)) ipbinds)
(return dictbinds)
addTickIPBind :: IPBind Id -> TM (IPBind Id)
addTickIPBind (IPBind nm e) =
liftM2 IPBind
(return nm)
(addTickLHsExpr e)
-- There is no location here, so we might need to use a context location??
addTickSyntaxExpr :: SrcSpan -> SyntaxExpr Id -> TM (SyntaxExpr Id)
addTickSyntaxExpr pos syn@(SyntaxExpr { syn_expr = x }) = do
L _ x' <- addTickLHsExpr (L pos x)
return $ syn { syn_expr = x' }
-- we do not walk into patterns.
addTickLPat :: LPat Id -> TM (LPat Id)
addTickLPat pat = return pat
addTickHsCmdTop :: HsCmdTop Id -> TM (HsCmdTop Id)
addTickHsCmdTop (HsCmdTop cmd tys ty syntaxtable) =
liftM4 HsCmdTop
(addTickLHsCmd cmd)
(return tys)
(return ty)
(return syntaxtable)
addTickLHsCmd :: LHsCmd Id -> TM (LHsCmd Id)
addTickLHsCmd (L pos c0) = do
c1 <- addTickHsCmd c0
return $ L pos c1
addTickHsCmd :: HsCmd Id -> TM (HsCmd Id)
addTickHsCmd (HsCmdLam matchgroup) =
liftM HsCmdLam (addTickCmdMatchGroup matchgroup)
addTickHsCmd (HsCmdApp c e) =
liftM2 HsCmdApp (addTickLHsCmd c) (addTickLHsExpr e)
{-
addTickHsCmd (OpApp e1 c2 fix c3) =
liftM4 OpApp
(addTickLHsExpr e1)
(addTickLHsCmd c2)
(return fix)
(addTickLHsCmd c3)
-}
addTickHsCmd (HsCmdPar e) = liftM HsCmdPar (addTickLHsCmd e)
addTickHsCmd (HsCmdCase e mgs) =
liftM2 HsCmdCase
(addTickLHsExpr e)
(addTickCmdMatchGroup mgs)
addTickHsCmd (HsCmdIf cnd e1 c2 c3) =
liftM3 (HsCmdIf cnd)
(addBinTickLHsExpr (BinBox CondBinBox) e1)
(addTickLHsCmd c2)
(addTickLHsCmd c3)
addTickHsCmd (HsCmdLet (L l binds) c) =
bindLocals (collectLocalBinders binds) $
liftM2 (HsCmdLet . L l)
(addTickHsLocalBinds binds) -- to think about: !patterns.
(addTickLHsCmd c)
addTickHsCmd (HsCmdDo (L l stmts) srcloc)
= do { (stmts', _) <- addTickLCmdStmts' stmts (return ())
; return (HsCmdDo (L l stmts') srcloc) }
addTickHsCmd (HsCmdArrApp e1 e2 ty1 arr_ty lr) =
liftM5 HsCmdArrApp
(addTickLHsExpr e1)
(addTickLHsExpr e2)
(return ty1)
(return arr_ty)
(return lr)
addTickHsCmd (HsCmdArrForm e fix cmdtop) =
liftM3 HsCmdArrForm
(addTickLHsExpr e)
(return fix)
(mapM (liftL (addTickHsCmdTop)) cmdtop)
addTickHsCmd (HsCmdWrap w cmd)
= liftM2 HsCmdWrap (return w) (addTickHsCmd cmd)
-- Others should never happen in a command context.
--addTickHsCmd e = pprPanic "addTickHsCmd" (ppr e)
addTickCmdMatchGroup :: MatchGroup Id (LHsCmd Id) -> TM (MatchGroup Id (LHsCmd Id))
addTickCmdMatchGroup mg@(MG { mg_alts = L l matches }) = do
matches' <- mapM (liftL addTickCmdMatch) matches
return $ mg { mg_alts = L l matches' }
addTickCmdMatch :: Match Id (LHsCmd Id) -> TM (Match Id (LHsCmd Id))
addTickCmdMatch (Match mf pats opSig gRHSs) =
bindLocals (collectPatsBinders pats) $ do
gRHSs' <- addTickCmdGRHSs gRHSs
return $ Match mf pats opSig gRHSs'
addTickCmdGRHSs :: GRHSs Id (LHsCmd Id) -> TM (GRHSs Id (LHsCmd Id))
addTickCmdGRHSs (GRHSs guarded (L l local_binds)) = do
bindLocals binders $ do
local_binds' <- addTickHsLocalBinds local_binds
guarded' <- mapM (liftL addTickCmdGRHS) guarded
return $ GRHSs guarded' (L l local_binds')
where
binders = collectLocalBinders local_binds
addTickCmdGRHS :: GRHS Id (LHsCmd Id) -> TM (GRHS Id (LHsCmd Id))
-- The *guards* are *not* Cmds, although the body is
-- C.f. addTickGRHS for the BinBox stuff
addTickCmdGRHS (GRHS stmts cmd)
= do { (stmts',expr') <- addTickLStmts' (Just $ BinBox $ GuardBinBox)
stmts (addTickLHsCmd cmd)
; return $ GRHS stmts' expr' }
addTickLCmdStmts :: [LStmt Id (LHsCmd Id)] -> TM [LStmt Id (LHsCmd Id)]
addTickLCmdStmts stmts = do
(stmts, _) <- addTickLCmdStmts' stmts (return ())
return stmts
addTickLCmdStmts' :: [LStmt Id (LHsCmd Id)] -> TM a -> TM ([LStmt Id (LHsCmd Id)], a)
addTickLCmdStmts' lstmts res
= bindLocals binders $ do
lstmts' <- mapM (liftL addTickCmdStmt) lstmts
a <- res
return (lstmts', a)
where
binders = collectLStmtsBinders lstmts
addTickCmdStmt :: Stmt Id (LHsCmd Id) -> TM (Stmt Id (LHsCmd Id))
addTickCmdStmt (BindStmt pat c bind fail ty) = do
liftM5 BindStmt
(addTickLPat pat)
(addTickLHsCmd c)
(return bind)
(return fail)
(return ty)
addTickCmdStmt (LastStmt c noret ret) = do
liftM3 LastStmt
(addTickLHsCmd c)
(pure noret)
(addTickSyntaxExpr hpcSrcSpan ret)
addTickCmdStmt (BodyStmt c bind' guard' ty) = do
liftM4 BodyStmt
(addTickLHsCmd c)
(addTickSyntaxExpr hpcSrcSpan bind')
(addTickSyntaxExpr hpcSrcSpan guard')
(return ty)
addTickCmdStmt (LetStmt (L l binds)) = do
liftM (LetStmt . L l)
(addTickHsLocalBinds binds)
addTickCmdStmt stmt@(RecStmt {})
= do { stmts' <- addTickLCmdStmts (recS_stmts stmt)
; ret' <- addTickSyntaxExpr hpcSrcSpan (recS_ret_fn stmt)
; mfix' <- addTickSyntaxExpr hpcSrcSpan (recS_mfix_fn stmt)
; bind' <- addTickSyntaxExpr hpcSrcSpan (recS_bind_fn stmt)
; return (stmt { recS_stmts = stmts', recS_ret_fn = ret'
, recS_mfix_fn = mfix', recS_bind_fn = bind' }) }
addTickCmdStmt ApplicativeStmt{} =
panic "ToDo: addTickCmdStmt ApplicativeLastStmt"
-- Others should never happen in a command context.
addTickCmdStmt stmt = pprPanic "addTickHsCmd" (ppr stmt)
addTickHsRecordBinds :: HsRecordBinds Id -> TM (HsRecordBinds Id)
addTickHsRecordBinds (HsRecFields fields dd)
= do { fields' <- mapM addTickHsRecField fields
; return (HsRecFields fields' dd) }
addTickHsRecField :: LHsRecField' id (LHsExpr Id) -> TM (LHsRecField' id (LHsExpr Id))
addTickHsRecField (L l (HsRecField id expr pun))
= do { expr' <- addTickLHsExpr expr
; return (L l (HsRecField id expr' pun)) }
addTickArithSeqInfo :: ArithSeqInfo Id -> TM (ArithSeqInfo Id)
addTickArithSeqInfo (From e1) =
liftM From
(addTickLHsExpr e1)
addTickArithSeqInfo (FromThen e1 e2) =
liftM2 FromThen
(addTickLHsExpr e1)
(addTickLHsExpr e2)
addTickArithSeqInfo (FromTo e1 e2) =
liftM2 FromTo
(addTickLHsExpr e1)
(addTickLHsExpr e2)
addTickArithSeqInfo (FromThenTo e1 e2 e3) =
liftM3 FromThenTo
(addTickLHsExpr e1)
(addTickLHsExpr e2)
(addTickLHsExpr e3)
liftL :: (Monad m) => (a -> m a) -> Located a -> m (Located a)
liftL f (L loc a) = do
a' <- f a
return $ L loc a'
data TickTransState = TT { tickBoxCount:: Int
, mixEntries :: [MixEntry_]
, uniqSupply :: UniqSupply
}
data TickTransEnv = TTE { fileName :: FastString
, density :: TickDensity
, tte_dflags :: DynFlags
, exports :: NameSet
, inlines :: VarSet
, declPath :: [String]
, inScope :: VarSet
, blackList :: Map SrcSpan ()
, this_mod :: Module
, tickishType :: TickishType
}
-- deriving Show
data TickishType = ProfNotes | HpcTicks | Breakpoints | SourceNotes
deriving (Eq)
coveragePasses :: DynFlags -> [TickishType]
coveragePasses dflags =
ifa (hscTarget dflags == HscInterpreted) Breakpoints $
ifa (gopt Opt_Hpc dflags) HpcTicks $
ifa (gopt Opt_SccProfilingOn dflags &&
profAuto dflags /= NoProfAuto) ProfNotes $
ifa (debugLevel dflags > 0) SourceNotes []
where ifa f x xs | f = x:xs
| otherwise = xs
-- | Tickishs that only make sense when their source code location
-- refers to the current file. This might not always be true due to
-- LINE pragmas in the code - which would confuse at least HPC.
tickSameFileOnly :: TickishType -> Bool
tickSameFileOnly HpcTicks = True
tickSameFileOnly _other = False
type FreeVars = OccEnv Id
noFVs :: FreeVars
noFVs = emptyOccEnv
-- Note [freevars]
-- For breakpoints we want to collect the free variables of an
-- expression for pinning on the HsTick. We don't want to collect
-- *all* free variables though: in particular there's no point pinning
-- on free variables that are will otherwise be in scope at the GHCi
-- prompt, which means all top-level bindings. Unfortunately detecting
-- top-level bindings isn't easy (collectHsBindsBinders on the top-level
-- bindings doesn't do it), so we keep track of a set of "in-scope"
-- variables in addition to the free variables, and the former is used
-- to filter additions to the latter. This gives us complete control
-- over what free variables we track.
data TM a = TM { unTM :: TickTransEnv -> TickTransState -> (a,FreeVars,TickTransState) }
-- a combination of a state monad (TickTransState) and a writer
-- monad (FreeVars).
instance Functor TM where
fmap = liftM
instance Applicative TM where
pure a = TM $ \ _env st -> (a,noFVs,st)
(<*>) = ap
instance Monad TM where
(TM m) >>= k = TM $ \ env st ->
case m env st of
(r1,fv1,st1) ->
case unTM (k r1) env st1 of
(r2,fv2,st2) ->
(r2, fv1 `plusOccEnv` fv2, st2)
instance HasDynFlags TM where
getDynFlags = TM $ \ env st -> (tte_dflags env, noFVs, st)
instance MonadUnique TM where
getUniqueSupplyM = TM $ \_ st -> (uniqSupply st, noFVs, st)
getUniqueM = TM $ \_ st -> let (u, us') = takeUniqFromSupply (uniqSupply st)
in (u, noFVs, st { uniqSupply = us' })
getState :: TM TickTransState
getState = TM $ \ _ st -> (st, noFVs, st)
setState :: (TickTransState -> TickTransState) -> TM ()
setState f = TM $ \ _ st -> ((), noFVs, f st)
getEnv :: TM TickTransEnv
getEnv = TM $ \ env st -> (env, noFVs, st)
withEnv :: (TickTransEnv -> TickTransEnv) -> TM a -> TM a
withEnv f (TM m) = TM $ \ env st ->
case m (f env) st of
(a, fvs, st') -> (a, fvs, st')
getDensity :: TM TickDensity
getDensity = TM $ \env st -> (density env, noFVs, st)
ifDensity :: TickDensity -> TM a -> TM a -> TM a
ifDensity d th el = do d0 <- getDensity; if d == d0 then th else el
getFreeVars :: TM a -> TM (FreeVars, a)
getFreeVars (TM m)
= TM $ \ env st -> case m env st of (a, fv, st') -> ((fv,a), fv, st')
freeVar :: Id -> TM ()
freeVar id = TM $ \ env st ->
if id `elemVarSet` inScope env
then ((), unitOccEnv (nameOccName (idName id)) id, st)
else ((), noFVs, st)
addPathEntry :: String -> TM a -> TM a
addPathEntry nm = withEnv (\ env -> env { declPath = declPath env ++ [nm] })
getPathEntry :: TM [String]
getPathEntry = declPath `liftM` getEnv
getFileName :: TM FastString
getFileName = fileName `liftM` getEnv
isGoodSrcSpan' :: SrcSpan -> Bool
isGoodSrcSpan' pos@(RealSrcSpan _) = srcSpanStart pos /= srcSpanEnd pos
isGoodSrcSpan' (UnhelpfulSpan _) = False
isGoodTickSrcSpan :: SrcSpan -> TM Bool
isGoodTickSrcSpan pos = do
file_name <- getFileName
tickish <- tickishType `liftM` getEnv
let need_same_file = tickSameFileOnly tickish
same_file = Just file_name == srcSpanFileName_maybe pos
return (isGoodSrcSpan' pos && (not need_same_file || same_file))
ifGoodTickSrcSpan :: SrcSpan -> TM a -> TM a -> TM a
ifGoodTickSrcSpan pos then_code else_code = do
good <- isGoodTickSrcSpan pos
if good then then_code else else_code
bindLocals :: [Id] -> TM a -> TM a
bindLocals new_ids (TM m)
= TM $ \ env st ->
case m env{ inScope = inScope env `extendVarSetList` new_ids } st of
(r, fv, st') -> (r, fv `delListFromOccEnv` occs, st')
where occs = [ nameOccName (idName id) | id <- new_ids ]
isBlackListed :: SrcSpan -> TM Bool
isBlackListed pos = TM $ \ env st ->
case Map.lookup pos (blackList env) of
Nothing -> (False,noFVs,st)
Just () -> (True,noFVs,st)
-- the tick application inherits the source position of its
-- expression argument to support nested box allocations
allocTickBox :: BoxLabel -> Bool -> Bool -> SrcSpan -> TM (HsExpr Id)
-> TM (LHsExpr Id)
allocTickBox boxLabel countEntries topOnly pos m =
ifGoodTickSrcSpan pos (do
(fvs, e) <- getFreeVars m
env <- getEnv
tickish <- mkTickish boxLabel countEntries topOnly pos fvs (declPath env)
return (L pos (HsTick tickish (L pos e)))
) (do
e <- m
return (L pos e)
)
-- the tick application inherits the source position of its
-- expression argument to support nested box allocations
allocATickBox :: BoxLabel -> Bool -> Bool -> SrcSpan -> FreeVars
-> TM (Maybe (Tickish Id))
allocATickBox boxLabel countEntries topOnly pos fvs =
ifGoodTickSrcSpan pos (do
let
mydecl_path = case boxLabel of
TopLevelBox x -> x
LocalBox xs -> xs
_ -> panic "allocATickBox"
tickish <- mkTickish boxLabel countEntries topOnly pos fvs mydecl_path
return (Just tickish)
) (return Nothing)
mkTickish :: BoxLabel -> Bool -> Bool -> SrcSpan -> OccEnv Id -> [String]
-> TM (Tickish Id)
mkTickish boxLabel countEntries topOnly pos fvs decl_path = do
let ids = filter (not . isUnliftedType . idType) $ occEnvElts fvs
-- unlifted types cause two problems here:
-- * we can't bind them at the GHCi prompt
-- (bindLocalsAtBreakpoint already fliters them out),
-- * the simplifier might try to substitute a literal for
-- the Id, and we can't handle that.
me = (pos, decl_path, map (nameOccName.idName) ids, boxLabel)
cc_name | topOnly = head decl_path
| otherwise = concat (intersperse "." decl_path)
dflags <- getDynFlags
env <- getEnv
case tickishType env of
HpcTicks -> do
c <- liftM tickBoxCount getState
setState $ \st -> st { tickBoxCount = c + 1
, mixEntries = me : mixEntries st }
return $ HpcTick (this_mod env) c
ProfNotes -> do
ccUnique <- getUniqueM
let cc = mkUserCC (mkFastString cc_name) (this_mod env) pos ccUnique
count = countEntries && gopt Opt_ProfCountEntries dflags
return $ ProfNote cc count True{-scopes-}
Breakpoints -> do
c <- liftM tickBoxCount getState
setState $ \st -> st { tickBoxCount = c + 1
, mixEntries = me:mixEntries st }
return $ Breakpoint c ids
SourceNotes | RealSrcSpan pos' <- pos ->
return $ SourceNote pos' cc_name
_otherwise -> panic "mkTickish: bad source span!"
allocBinTickBox :: (Bool -> BoxLabel) -> SrcSpan -> TM (HsExpr Id)
-> TM (LHsExpr Id)
allocBinTickBox boxLabel pos m = do
env <- getEnv
case tickishType env of
HpcTicks -> do e <- liftM (L pos) m
ifGoodTickSrcSpan pos
(mkBinTickBoxHpc boxLabel pos e)
(return e)
_other -> allocTickBox (ExpBox False) False False pos m
mkBinTickBoxHpc :: (Bool -> BoxLabel) -> SrcSpan -> LHsExpr Id
-> TM (LHsExpr Id)
mkBinTickBoxHpc boxLabel pos e =
TM $ \ env st ->
let meT = (pos,declPath env, [],boxLabel True)
meF = (pos,declPath env, [],boxLabel False)
meE = (pos,declPath env, [],ExpBox False)
c = tickBoxCount st
mes = mixEntries st
in
( L pos $ HsTick (HpcTick (this_mod env) c) $ L pos $ HsBinTick (c+1) (c+2) e
-- notice that F and T are reversed,
-- because we are building the list in
-- reverse...
, noFVs
, st {tickBoxCount=c+3 , mixEntries=meF:meT:meE:mes}
)
mkHpcPos :: SrcSpan -> HpcPos
mkHpcPos pos@(RealSrcSpan s)
| isGoodSrcSpan' pos = toHpcPos (srcSpanStartLine s,
srcSpanStartCol s,
srcSpanEndLine s,
srcSpanEndCol s - 1)
-- the end column of a SrcSpan is one
-- greater than the last column of the
-- span (see SrcLoc), whereas HPC
-- expects to the column range to be
-- inclusive, hence we subtract one above.
mkHpcPos _ = panic "bad source span; expected such spans to be filtered out"
hpcSrcSpan :: SrcSpan
hpcSrcSpan = mkGeneralSrcSpan (fsLit "Haskell Program Coverage internals")
matchesOneOfMany :: [LMatch Id body] -> Bool
matchesOneOfMany lmatches = sum (map matchCount lmatches) > 1
where
matchCount (L _ (Match _ _pats _ty (GRHSs grhss _binds))) = length grhss
type MixEntry_ = (SrcSpan, [String], [OccName], BoxLabel)
-- For the hash value, we hash everything: the file name,
-- the timestamp of the original source file, the tab stop,
-- and the mix entries. We cheat, and hash the show'd string.
-- This hash only has to be hashed at Mix creation time,
-- and is for sanity checking only.
mixHash :: FilePath -> UTCTime -> Int -> [MixEntry] -> Int
mixHash file tm tabstop entries = fromIntegral $ hashString
(show $ Mix file tm 0 tabstop entries)
{-
************************************************************************
* *
* initialisation
* *
************************************************************************
Each module compiled with -fhpc declares an initialisation function of
the form `hpc_init_<module>()`, which is emitted into the _stub.c file
and annotated with __attribute__((constructor)) so that it gets
executed at startup time.
The function's purpose is to call hs_hpc_module to register this
module with the RTS, and it looks something like this:
static void hpc_init_Main(void) __attribute__((constructor));
static void hpc_init_Main(void)
{extern StgWord64 _hpc_tickboxes_Main_hpc[];
hs_hpc_module("Main",8,1150288664,_hpc_tickboxes_Main_hpc);}
-}
hpcInitCode :: Module -> HpcInfo -> SDoc
hpcInitCode _ (NoHpcInfo {}) = Outputable.empty
hpcInitCode this_mod (HpcInfo tickCount hashNo)
= vcat
[ text "static void hpc_init_" <> ppr this_mod
<> text "(void) __attribute__((constructor));"
, text "static void hpc_init_" <> ppr this_mod <> text "(void)"
, braces (vcat [
text "extern StgWord64 " <> tickboxes <>
text "[]" <> semi,
text "hs_hpc_module" <>
parens (hcat (punctuate comma [
doubleQuotes full_name_str,
int tickCount, -- really StgWord32
int hashNo, -- really StgWord32
tickboxes
])) <> semi
])
]
where
tickboxes = ppr (mkHpcTicksLabel $ this_mod)
module_name = hcat (map (text.charToC) $
bytesFS (moduleNameFS (Module.moduleName this_mod)))
package_name = hcat (map (text.charToC) $
bytesFS (unitIdFS (moduleUnitId this_mod)))
full_name_str
| moduleUnitId this_mod == mainUnitId
= module_name
| otherwise
= package_name <> char '/' <> module_name
| mettekou/ghc | compiler/deSugar/Coverage.hs | bsd-3-clause | 51,968 | 0 | 22 | 15,484 | 13,612 | 6,879 | 6,733 | 955 | 8 |
-----------------------------------------------------------------------------
-- |
-- Module : Plugins.Monitors.Thermal
-- Copyright : (c) Juraj Hercek
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Juraj Hercek <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- A thermal monitor for Xmobar
--
-----------------------------------------------------------------------------
module Plugins.Monitors.Thermal where
import Control.Monad (liftM)
import qualified Data.ByteString.Lazy.Char8 as B
import Plugins.Monitors.Common
import System.Posix.Files (fileExist)
-- | Default thermal configuration.
thermalConfig :: IO MConfig
thermalConfig = mkMConfig
"Thm: <temp>C" -- template
["temp"] -- available replacements
-- | Retrieves thermal information. Argument is name of thermal directory in
-- \/proc\/acpi\/thermal_zone. Returns the monitor string parsed according to
-- template (either default or user specified).
runThermal :: [String] -> Monitor String
runThermal args = do
let zone = head args
file = "/proc/acpi/thermal_zone/" ++ zone ++ "/temperature"
exists <- io $ fileExist file
if exists
then do number <- io $ liftM ((read :: String -> Int) . stringParser (1, 0)) (B.readFile file)
thermal <- showWithColors show number
parseTemplate [ thermal ]
else return $ "Thermal (" ++ zone ++ "): N/A"
| dragosboca/xmobar | src/Plugins/Monitors/Thermal.hs | bsd-3-clause | 1,453 | 0 | 16 | 288 | 240 | 137 | 103 | 19 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.License
-- Description : The License data type.
-- Copyright : Isaac Jones 2003-2005
-- Duncan Coutts 2008
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- Package descriptions contain fields for specifying the name of a software
-- license and the name of the file containing the text of that license. While
-- package authors may choose any license they like, Cabal provides an
-- enumeration of a small set of common free and open source software licenses.
-- This is done so that Hackage can recognise licenses, so that tools can detect
-- <https://en.wikipedia.org/wiki/License_compatibility licensing conflicts>,
-- and to deter
-- <https://en.wikipedia.org/wiki/License_proliferation license proliferation>.
--
-- It is recommended that all package authors use the @license-file@ or
-- @license-files@ fields in their package descriptions. Further information
-- about these fields can be found in the
-- <http://www.haskell.org/cabal/users-guide/developing-packages.html#package-descriptions Cabal users guide>.
--
-- = Additional resources
--
-- The following websites provide information about free and open source
-- software licenses:
--
-- * <http://www.opensource.org The Open Source Initiative (OSI)>
--
-- * <https://www.fsf.org The Free Software Foundation (FSF)>
--
-- = Disclaimer
--
-- The descriptions of software licenses provided by this documentation are
-- intended for informational purposes only and in no way constitute legal
-- advice. Please read the text of the licenses and consult a lawyer for any
-- advice regarding software licensing.
module Distribution.License (
License(..),
knownLicenses,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Version
import Distribution.Text
import qualified Distribution.Compat.ReadP as Parse
import qualified Text.PrettyPrint as Disp
-- | Indicates the license under which a package's source code is released.
-- Versions of the licenses not listed here will be rejected by Hackage and
-- cause @cabal check@ to issue a warning.
data License =
-- TODO: * remove BSD4
-- | GNU General Public License,
-- <https://www.gnu.org/licenses/old-licenses/gpl-2.0.html version 2> or
-- <https://www.gnu.org/licenses/gpl.html version 3>.
GPL (Maybe Version)
-- | <https://www.gnu.org/licenses/agpl.html GNU Affero General Public License, version 3>.
| AGPL (Maybe Version)
-- | GNU Lesser General Public License,
-- <https://www.gnu.org/licenses/old-licenses/lgpl-2.1.html version 2.1> or
-- <https://www.gnu.org/licenses/lgpl.html version 3>.
| LGPL (Maybe Version)
-- | <http://www.opensource.org/licenses/bsd-license 2-clause BSD license>.
| BSD2
-- | <http://www.opensource.org/licenses/bsd-3-clause 3-clause BSD license>.
| BSD3
-- | <http://directory.fsf.org/wiki/License:BSD_4Clause 4-clause BSD license>.
-- This license has not been approved by the OSI and is incompatible with
-- the GNU GPL. It is provided for historical reasons and should be avoided.
| BSD4
-- | <http://www.opensource.org/licenses/MIT MIT license>.
| MIT
-- | <http://www.isc.org/downloads/software-support-policy/isc-license/ ISC license>
| ISC
-- | <https://www.mozilla.org/MPL/ Mozilla Public License, version 2.0>.
| MPL Version
-- | <https://www.apache.org/licenses/ Apache License, version 2.0>.
| Apache (Maybe Version)
-- | The author of a package disclaims any copyright to its source code and
-- dedicates it to the public domain. This is not a software license. Please
-- note that it is not possible to dedicate works to the public domain in
-- every jurisdiction, nor is a work that is in the public domain in one
-- jurisdiction necessarily in the public domain elsewhere.
| PublicDomain
-- | Explicitly 'All Rights Reserved', eg for proprietary software. The
-- package may not be legally modified or redistributed by anyone but the
-- rightsholder.
| AllRightsReserved
-- | No license specified which legally defaults to 'All Rights Reserved'.
-- The package may not be legally modified or redistributed by anyone but
-- the rightsholder.
| UnspecifiedLicense
-- | Any other software license.
| OtherLicense
-- | Indicates an erroneous license name.
| UnknownLicense String
deriving (Generic, Read, Show, Eq, Typeable, Data)
instance Binary License
-- | The list of all currently recognised licenses.
knownLicenses :: [License]
knownLicenses = [ GPL unversioned, GPL (version [2]), GPL (version [3])
, LGPL unversioned, LGPL (version [2, 1]), LGPL (version [3])
, AGPL unversioned, AGPL (version [3])
, BSD2, BSD3, MIT, ISC
, MPL (mkVersion [2, 0])
, Apache unversioned, Apache (version [2, 0])
, PublicDomain, AllRightsReserved, OtherLicense]
where
unversioned = Nothing
version = Just . mkVersion
instance Text License where
disp (GPL version) = Disp.text "GPL" <<>> dispOptVersion version
disp (LGPL version) = Disp.text "LGPL" <<>> dispOptVersion version
disp (AGPL version) = Disp.text "AGPL" <<>> dispOptVersion version
disp (MPL version) = Disp.text "MPL" <<>> dispVersion version
disp (Apache version) = Disp.text "Apache" <<>> dispOptVersion version
disp (UnknownLicense other) = Disp.text other
disp other = Disp.text (show other)
parse = do
name <- Parse.munch1 (\c -> isAlphaNum c && c /= '-')
version <- Parse.option Nothing (Parse.char '-' >> fmap Just parse)
return $! case (name, version :: Maybe Version) of
("GPL", _ ) -> GPL version
("LGPL", _ ) -> LGPL version
("AGPL", _ ) -> AGPL version
("BSD2", Nothing) -> BSD2
("BSD3", Nothing) -> BSD3
("BSD4", Nothing) -> BSD4
("ISC", Nothing) -> ISC
("MIT", Nothing) -> MIT
("MPL", Just version') -> MPL version'
("Apache", _ ) -> Apache version
("PublicDomain", Nothing) -> PublicDomain
("AllRightsReserved", Nothing) -> AllRightsReserved
("OtherLicense", Nothing) -> OtherLicense
_ -> UnknownLicense $ name ++
maybe "" (('-':) . display) version
dispOptVersion :: Maybe Version -> Disp.Doc
dispOptVersion Nothing = Disp.empty
dispOptVersion (Just v) = dispVersion v
dispVersion :: Version -> Disp.Doc
dispVersion v = Disp.char '-' <<>> disp v
| mydaum/cabal | Cabal/Distribution/License.hs | bsd-3-clause | 6,986 | 0 | 16 | 1,649 | 985 | 563 | 422 | 71 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module T14174a where
import Data.Kind
data TyFun :: Type -> Type -> Type
type a ~> b = TyFun a b -> Type
infixr 0 ~>
type family Apply (f :: k1 ~> k2) (x :: k1) :: k2
type a @@ b = Apply a b
infixl 9 @@
data FunArrow = (:->) | (:~>)
class FunType (arr :: FunArrow) where
type Fun (k1 :: Type) arr (k2 :: Type) :: Type
class FunType arr => AppType (arr :: FunArrow) where
type App k1 arr k2 (f :: Fun k1 arr k2) (x :: k1) :: k2
type FunApp arr = (FunType arr, AppType arr)
instance FunType (:->) where
type Fun k1 (:->) k2 = k1 -> k2
instance AppType (:->) where
type App k1 (:->) k2 (f :: k1 -> k2) x = f x
instance FunType (:~>) where
type Fun k1 (:~>) k2 = k1 ~> k2
instance AppType (:~>) where
type App k1 (:~>) k2 (f :: k1 ~> k2) x = f @@ x
infixr 0 -?>
type (-?>) (k1 :: Type) (k2 :: Type) (arr :: FunArrow) = Fun k1 arr k2
type family ElimBool (p :: Bool -> Type)
(z :: Bool)
(pFalse :: p False)
(pTrue :: p True)
:: p z where
-- Commenting out the line below makes the panic go away
ElimBool p z pFalse pTrue = ElimBoolPoly (:->) p z pFalse pTrue
type family ElimBoolPoly (arr :: FunArrow)
(p :: (Bool -?> Type) arr)
(z :: Bool)
(pFalse :: App Bool arr Type p False)
(pTrue :: App Bool arr Type p True)
:: App Bool arr Type p z
| sdiehl/ghc | testsuite/tests/polykinds/T14174a.hs | bsd-3-clause | 1,640 | 13 | 13 | 515 | 588 | 338 | 250 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.