code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SQL.Databases.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Partially updates a resource containing information about a database
-- inside a Cloud SQL instance. This method supports patch semantics.
--
-- /See:/ <https://developers.google.com/cloud-sql/ Cloud SQL Admin API Reference> for @sql.databases.patch@.
module Network.Google.Resource.SQL.Databases.Patch
(
-- * REST Resource
DatabasesPatchResource
-- * Creating a Request
, databasesPatch
, DatabasesPatch
-- * Request Lenses
, dpXgafv
, dpUploadProtocol
, dpProject
, dpDatabase
, dpAccessToken
, dpUploadType
, dpPayload
, dpCallback
, dpInstance
) where
import Network.Google.Prelude
import Network.Google.SQLAdmin.Types
-- | A resource alias for @sql.databases.patch@ method which the
-- 'DatabasesPatch' request conforms to.
type DatabasesPatchResource =
"v1" :>
"projects" :>
Capture "project" Text :>
"instances" :>
Capture "instance" Text :>
"databases" :>
Capture "database" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Database :>
Patch '[JSON] Operation
-- | Partially updates a resource containing information about a database
-- inside a Cloud SQL instance. This method supports patch semantics.
--
-- /See:/ 'databasesPatch' smart constructor.
data DatabasesPatch =
DatabasesPatch'
{ _dpXgafv :: !(Maybe Xgafv)
, _dpUploadProtocol :: !(Maybe Text)
, _dpProject :: !Text
, _dpDatabase :: !Text
, _dpAccessToken :: !(Maybe Text)
, _dpUploadType :: !(Maybe Text)
, _dpPayload :: !Database
, _dpCallback :: !(Maybe Text)
, _dpInstance :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DatabasesPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dpXgafv'
--
-- * 'dpUploadProtocol'
--
-- * 'dpProject'
--
-- * 'dpDatabase'
--
-- * 'dpAccessToken'
--
-- * 'dpUploadType'
--
-- * 'dpPayload'
--
-- * 'dpCallback'
--
-- * 'dpInstance'
databasesPatch
:: Text -- ^ 'dpProject'
-> Text -- ^ 'dpDatabase'
-> Database -- ^ 'dpPayload'
-> Text -- ^ 'dpInstance'
-> DatabasesPatch
databasesPatch pDpProject_ pDpDatabase_ pDpPayload_ pDpInstance_ =
DatabasesPatch'
{ _dpXgafv = Nothing
, _dpUploadProtocol = Nothing
, _dpProject = pDpProject_
, _dpDatabase = pDpDatabase_
, _dpAccessToken = Nothing
, _dpUploadType = Nothing
, _dpPayload = pDpPayload_
, _dpCallback = Nothing
, _dpInstance = pDpInstance_
}
-- | V1 error format.
dpXgafv :: Lens' DatabasesPatch (Maybe Xgafv)
dpXgafv = lens _dpXgafv (\ s a -> s{_dpXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
dpUploadProtocol :: Lens' DatabasesPatch (Maybe Text)
dpUploadProtocol
= lens _dpUploadProtocol
(\ s a -> s{_dpUploadProtocol = a})
-- | Project ID of the project that contains the instance.
dpProject :: Lens' DatabasesPatch Text
dpProject
= lens _dpProject (\ s a -> s{_dpProject = a})
-- | Name of the database to be updated in the instance.
dpDatabase :: Lens' DatabasesPatch Text
dpDatabase
= lens _dpDatabase (\ s a -> s{_dpDatabase = a})
-- | OAuth access token.
dpAccessToken :: Lens' DatabasesPatch (Maybe Text)
dpAccessToken
= lens _dpAccessToken
(\ s a -> s{_dpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
dpUploadType :: Lens' DatabasesPatch (Maybe Text)
dpUploadType
= lens _dpUploadType (\ s a -> s{_dpUploadType = a})
-- | Multipart request metadata.
dpPayload :: Lens' DatabasesPatch Database
dpPayload
= lens _dpPayload (\ s a -> s{_dpPayload = a})
-- | JSONP
dpCallback :: Lens' DatabasesPatch (Maybe Text)
dpCallback
= lens _dpCallback (\ s a -> s{_dpCallback = a})
-- | Database instance ID. This does not include the project ID.
dpInstance :: Lens' DatabasesPatch Text
dpInstance
= lens _dpInstance (\ s a -> s{_dpInstance = a})
instance GoogleRequest DatabasesPatch where
type Rs DatabasesPatch = Operation
type Scopes DatabasesPatch =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/sqlservice.admin"]
requestClient DatabasesPatch'{..}
= go _dpProject _dpInstance _dpDatabase _dpXgafv
_dpUploadProtocol
_dpAccessToken
_dpUploadType
_dpCallback
(Just AltJSON)
_dpPayload
sQLAdminService
where go
= buildClient (Proxy :: Proxy DatabasesPatchResource)
mempty
| brendanhay/gogol | gogol-sqladmin/gen/Network/Google/Resource/SQL/Databases/Patch.hs | mpl-2.0 | 5,820 | 0 | 21 | 1,478 | 941 | 547 | 394 | 136 | 1 |
{- |
Module : $Header$
Description : Property tests for timespans.
Copyright : (c) plaimi 2014
License : AGPL-3
Maintainer : [email protected]
-} module Tempuhs.Props.Timespan where
import Test.Framework
(
Test,
testGroup,
)
import Test.Framework.Providers.QuickCheck2
(
testProperty,
)
import Tempuhs.Props.Timespan.Laws
import Tempuhs.Props.Timespan.Props
import Tempuhs.Props.Instances ()
timespanLaws :: [Test]
timespanLaws = parentLaws
++ flexLaws
parentLaws :: [Test]
parentLaws = [testGroup "ParentLaws"
[testProperty notDescParentLaw notDescParentTest
,testProperty notSelfParentLaw notSelfParentTest]]
flexLaws :: [Test]
flexLaws = [testGroup "FlexLaws"
[testProperty isFlexibleLaw isFlexibleTest
,testProperty isFlexLaw isFlexTest
,testProperty beginMinLaw beginMinTest
,testProperty endMaxLaw endMaxTest]]
| plaimi/tempuhs-server | prop/Tempuhs/Props/Timespan.hs | agpl-3.0 | 950 | 0 | 8 | 216 | 165 | 98 | 67 | 24 | 1 |
{-# LANGUAGE DataKinds #-}
module Main where
-- Imports ---------------------------------------------------------------------
import Auth
import Relude
import Servant
import qualified Data.ByteString.Char8 as BS8
import qualified Data.Text as T
--port qualified Network.HTTP.Types as W
import qualified Network.Wai as W
import qualified Network.Wai.Handler.Warp as W
import Control.Monad.IO.Class
-- Types -----------------------------------------------------------------------
type Api = "googauth" :> QueryParam "state" Text
:> QueryParam "code" Text
:> QueryParam "error" Text
:> Get Text
:<|> Get Text
authCode ∷ MonadIO m ⇒ Maybe Text → Maybe Text → Maybe Text → m Text
authCode (Just _) Nothing (Just msg) = return $ "auth error: " <> msg
authCode (Just _) (Just _) Nothing = return "auth success"
authCode _ _ _ = return "invalid auth"
frontPage ∷ MonadIO m ⇒ m Text
frontPage = return "front page"
apiServer ∷ W.Application
apiServer = serve (Proxy∷Proxy Api) $ authCode :<|> frontPage
-- Values ----------------------------------------------------------------------
main ∷ IO ()
main = do
[clid] <- map (ClientID . T.pack) <$> getArgs
let params = authExample & clientID .~ clid
BS8.putStrLn $ authURL params ^. uriBS
W.run 8080 apiServer
| bsummer4/gcal | src/Main.hs | agpl-3.0 | 1,422 | 0 | 12 | 329 | 356 | 188 | 168 | -1 | -1 |
module PrimeNumber where
isPrime :: Int -> Bool
isPrime 1 = False
isPrime 2 = True
isPrime 3 = True
isPrime k = null [ x | x <- [2 .. (1 + (floor $ sqrt $ fromIntegral k))], k `mod` x == 0 ]
| ice1000/OI-codes | codewars/101-200/is-prime-number.hs | agpl-3.0 | 193 | 0 | 14 | 47 | 100 | 54 | 46 | 6 | 1 |
module Braxton.A284435 (a284435) where
import Helpers.BraxtonHelper (enumerateSequences, SymmetricRelation(..), ReflexiveRelation(..))
a284435 n = length $ enumerateA284435 n n
enumerateA284435 = enumerateSequences Reflexive NonSymmetric product
-- (1,1,2,2,4,4,4,48,144,144...)
-- Counts A282193
| peterokagey/haskellOEIS | src/Braxton/A284435.hs | apache-2.0 | 298 | 0 | 6 | 29 | 66 | 39 | 27 | 4 | 1 |
module Main where
import Codec.Sarsi (Event (..), Level (..), Location (..), Message (..))
import Control.Concurrent.Async (async, cancel)
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TBQueue (newTBQueue, readTBQueue)
import Control.Concurrent.STM.TVar (TVar, modifyTVar', newTVar, readTVar, readTVarIO, stateTVar, writeTVar)
import Control.Monad (when)
import Data.Machine (ProcessT, asParts, auto, autoM, final, runT, runT_, scan, sinkPart_, (<~))
import Data.Machine.Fanout (fanout)
import Data.MessagePack (Object (..))
import qualified Data.MessagePack.RPC as RPC
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Vector (Vector)
import qualified Data.Vector as Vector
import NVIM.Client (CommandQueue, ask', mkConnection, send)
import NVIM.Command (Command (..))
import NVIM.QuickFix (toQuickFix)
import Sarsi (Topic (..), getBroker, getTopic, title)
import Sarsi.Consumer (consumeOrWait)
import System.Environment (getArgs)
import System.Exit (ExitCode (..), exitWith)
import System.IO (Handle, IOMode (WriteMode))
import qualified System.IO as IO
import System.IO.Machine (sinkIO, sourceIO)
data BuildStatus = Starting | Building | Done
deriving (Show, Eq)
data PluginAction = Focus | Next | Previous
deriving (Bounded, Show, Enum, Eq, Ord, Read)
pluginActions :: [PluginAction]
pluginActions = [minBound ..]
data PluginState = PluginState
{ buildStatus :: BuildStatus,
buildErrors :: Vector (Location, [Text]),
buildWarnings :: Vector (Location, [Text]),
focus :: Maybe (Level, Int),
buffer :: Object,
window :: Maybe Object
}
deriving (Show)
locationLast :: PluginState -> Maybe Location
locationLast s =
if Vector.null $ buildErrors s
then
if Vector.null $ buildWarnings s
then Nothing
else Just . fst $ Vector.last (buildWarnings s)
else Just . fst $ Vector.last (buildErrors s)
echo :: String -> Command
echo str = NvimCommand [ObjectStr . Text.pack $ concat ["echo \"", str, "\""]]
echom :: String -> Command
echom str = NvimCommand [ObjectStr . Text.pack $ concat ["echom \"", title, ": ", str, "\""]]
jumpTo :: Location -> [Command]
jumpTo loc =
(\x -> NvimCommand [ObjectStr . Text.pack $ x])
<$> [ concat ["drop +", show $ line loc, " ", Text.unpack $ filePath loc],
concat ["call cursor(", show $ line loc, ", ", show $ column loc, ")"],
"normal zz"
]
openLogFile :: Topic -> IO Handle
openLogFile (Topic _ fp _) = IO.openFile (concat [fp, "-nvim.log"]) WriteMode
parseAction :: Maybe Handle -> (Text, [Object]) -> IO [PluginAction]
parseAction _ (m, params) | m == method = return $ cmd =<< unpack =<< params
where
cmd (ObjectStr c) | Text.isPrefixOf c (Text.pack "cfirst") = [Focus]
cmd (ObjectStr c) | Text.isPrefixOf c (Text.pack "cnext") = [Next]
cmd (ObjectStr c) | Text.isPrefixOf c (Text.pack "cprevious") = [Previous]
cmd _ = []
method = Text.pack "CmdlineLeave"
unpack (ObjectArray xs) = Vector.toList xs
unpack _ = []
parseAction hLog x = do
putLogLn hLog $ concat ["unsupported notification: ", show x]
return []
parseArgs :: [String] -> Either String Bool
parseArgs [] = Right False
parseArgs ["--log"] = Right True
parseArgs _ = Left "usage: [--log]"
pluginStateInit :: Object -> PluginState
pluginStateInit b = PluginState Done Vector.empty Vector.empty Nothing b Nothing
putLogLn :: Maybe Handle -> String -> IO ()
putLogLn Nothing _ = return ()
putLogLn (Just h) s = IO.hPutStrLn h s >> IO.hFlush h
update :: Monoid a => Maybe Handle -> CommandQueue -> TVar PluginState -> Event -> IO a
update h q s' e = do
display e
case e of
(Start _) -> updateState (\s -> s {buildStatus = Starting})
(Finish _ _) -> do
emptyErrors <-
atomically $
stateTVar
s'
( \s -> case buildStatus s of
Building -> (Vector.null $ buildErrors s, s {buildStatus = Done})
_ -> (True, s {buildStatus = Done, buildErrors = Vector.empty, buildWarnings = Vector.empty})
)
when emptyErrors $ windowClose q s'
(Notify msg) ->
updateState
( \s ->
if buildStatus s /= Building
then s {buildStatus = Building, focus = Nothing, buildErrors = Vector.empty, buildWarnings = Vector.empty}
else s
)
>> updateMsg msg
trace h
return mempty
where
display (Start _) = nvim_ h q $ echom $ show e
display (Finish _ _) = nvim_ h q $ echom $ show e
display (Notify (Message loc lvl _)) = nvim_ h q $ echo $ concat [show loc, " ", show lvl]
trace Nothing = return ()
trace _ = do
s <- readTVarIO s'
putLogLn h $ show s
updateMsg msg = atomically $ modifyTVar' s' (f msg)
where
f x s = g x
where
g (Message loc Error txts) = s {buildErrors = Vector.snoc es (loc, txts)}
g (Message loc Warning txts) = s {buildWarnings = Vector.snoc ws (loc, txts)}
(es, ws) = case buildStatus s of
Starting -> (Vector.empty, Vector.empty)
_ -> (buildErrors s, buildWarnings s)
updateState f = atomically $ modifyTVar' s' f
-- TODO Wrap this in an appropriate transformer
nvim :: Maybe Handle -> CommandQueue -> Command -> IO (Maybe Object)
nvim hLog q cmd = do
r <- ask' q cmd
case r of
RPC.Success a -> return $ Just a
RPC.Error err -> do
putLogLn hLog $ show err
return Nothing
nvim_ :: Maybe Handle -> CommandQueue -> Command -> IO ()
nvim_ h q c = nvim h q c >> return ()
-- TODO Important: could they all be into STM? how to avoid unnecessary readTVarIO?
-- There must be a useful `Async + STM` atomic layer
bufferSetLines :: CommandQueue -> TVar PluginState -> [Text] -> IO ()
bufferSetLines q s' txts = do
s <- readTVarIO s'
let b = buffer s
(RPC.Success _) <- ask' q $ NvimBufSetLines b 0 64 False txts
return ()
windowClose :: CommandQueue -> TVar PluginState -> IO ()
windowClose q s' = do
s <- readTVarIO s'
case window s of
Nothing -> return ()
Just w -> do
-- Tolerate failure if window was closed manually by user
_ <- ask' q $ NvimWinClose w False
atomically . modifyTVar' s' $ \x -> x {window = Nothing}
return ()
bufferShow :: CommandQueue -> TVar PluginState -> Int -> IO ()
bufferShow q s' height = do
windowClose q s'
(RPC.Success (ObjectInt rows)) <- ask' q (NvimWinGetHeight $ ObjectInt 0)
(RPC.Success (ObjectInt cols)) <- ask' q (NvimWinGetWidth $ ObjectInt 0)
s <- readTVarIO s'
(RPC.Success w) <- ask' q $ openWin (buffer s) rows cols
atomically . modifyTVar' s' $ \x -> x {window = Just w}
return ()
where
openWin b rows cols =
NvimOpenWin
b
False
( ObjectMap $
( Vector.fromList
[ (ObjectStr $ Text.pack "style", ObjectStr $ Text.pack "minimal"),
(ObjectStr $ Text.pack "relative", ObjectStr $ Text.pack "win"),
(ObjectStr $ Text.pack "row", ObjectInt $ rows - height),
(ObjectStr $ Text.pack "col", ObjectInt 0),
(ObjectStr $ Text.pack "width", ObjectInt cols),
(ObjectStr $ Text.pack "height", ObjectInt height)
]
)
)
actionFocus :: Maybe Handle -> CommandQueue -> TVar PluginState -> Level -> Int -> IO ()
actionFocus hLog q s' lvl rank = do
s <- readTVarIO s'
active <- fixingIsActive hLog q s
let (loc, txts) = focusContent lvl rank s
if not active
then return ()
else do
_ <- traverse (nvim_ hLog q) $ jumpTo loc
bufferSetLines q s' txts
bufferShow q s' $ length txts
nvim_ hLog q $ echo $ concat [show lvl, ": ", show (rank + 1), "/", show . length $ messagesSelect s lvl]
return ()
actionMove :: Maybe Handle -> CommandQueue -> TVar PluginState -> (PluginState -> PluginState) -> IO ()
actionMove hLog q s' f = do
fcs <-
atomically $ do
s <- readTVar s'
let s'' = f s
writeTVar s' s''
return $ focus s''
case fcs of
Nothing -> return ()
Just (lvl, rank) -> actionFocus hLog q s' lvl rank
fixingIsActive :: Maybe Handle -> CommandQueue -> PluginState -> IO Bool
fixingIsActive hLog q s = do
qfLast <- nvim hLog q $ NvimCommandOutput [ObjectStr . Text.pack $ "clist -1"]
case (qfLast, (locationString <$> (locationLast s))) of
(Just (ObjectStr ln), Just loc) -> return $ not (Text.null . snd $ Text.breakOn (Text.pack loc) ln)
_ -> return False
where
locationString :: Location -> String
locationString (Location fp c l) = concat [Text.unpack fp, ":", show l, " col ", show c]
focusContent :: Level -> Int -> PluginState -> (Location, [Text])
focusContent lvl rank s = Vector.unsafeIndex xs rank
where
xs = case lvl of
Warning -> buildWarnings s
Error -> buildErrors s
focusDefault :: PluginState -> Maybe (Level, Int)
focusDefault s = select (Vector.null $ buildErrors s) (Vector.null $ buildWarnings s)
where
select False _ = Just (Error, 0)
select True False = Just (Warning, 0)
select True True = Nothing
focusMove :: Int -> PluginState -> PluginState
focusMove i s =
case focus s of
Nothing -> s {focus = focusDefault s}
Just (lvl, rank) -> s {focus = Just $ f lvl (rank + i)}
where
f lvl rank | rank < 0 = f (toggle lvl) ((length $ select lvl) + rank)
f lvl rank | rank >= (length $ select lvl) = f (toggle lvl) (rank - (length $ select lvl))
f lvl rank = (lvl, rank)
toggle lvl | Vector.null $ select (toggle' lvl) = lvl
toggle lvl = toggle' lvl
toggle' Warning = Error
toggle' Error = Warning
select = messagesSelect s
messagesSelect :: PluginState -> Level -> Vector (Location, [Text])
messagesSelect s Warning = buildWarnings s
messagesSelect s Error = buildErrors s
-- TODO How to make it shudown gracefully? currently it's probably killed by nvim while blocking in `consumerOrWait`
main :: IO ()
main = do
args <- getArgs
case parseArgs args of
Left err -> do
putStrLn err
exitWith $ ExitFailure 1
Right logging -> do
IO.hSetBuffering IO.stdin IO.NoBuffering
IO.hSetBuffering IO.stdout IO.NoBuffering
b <- getBroker
t <- getTopic b "."
hLog <- if logging then Just <$> (openLogFile t) else return Nothing
qCmds <- atomically $ newTBQueue 8
qNotifs <- atomically $ newTBQueue 8
connClose <- mkConnection IO.stdin IO.stdout qCmds qNotifs (errHandler hLog)
nvim_ hLog qCmds $ NvimCommand [ObjectStr . Text.pack $ "au CmdlineLeave * call rpcnotify(g:sarsi, 'CmdlineLeave', [getcmdline()])"]
(Just buf) <- nvim hLog qCmds $ NvimCreateBuf False True
state <- atomically $ newTVar $ pluginStateInit buf
notifier <-
async . runT_ $
autoM (notify hLog qCmds state) <~ asParts <~ (autoM $ parseAction hLog) <~ (sourceIO . atomically $ readTBQueue qNotifs)
putLogLn hLog "ready"
_ <- consumeOrWait t (consumer hLog state qCmds)
cancel notifier
connClose
_ <- traverse IO.hClose hLog
return ()
where
errHandler hLog err = do
putLogLn hLog $ show err
notify hLog q s' Focus = do
s <- readTVarIO s'
case focus s of
Nothing ->
case focusDefault s of
Nothing -> return ()
Just (lvl, rank) -> do
atomically . modifyTVar' s' $ \x -> x {focus = Just (lvl, rank)}
actionFocus hLog q s' lvl rank
Just (lvl, rank) -> actionFocus hLog q s' lvl rank
notify hLog q s' Next = actionMove hLog q s' (focusMove 1)
notify hLog q s' Previous = actionMove hLog q s' (focusMove (-1))
consumer h s q Nothing src = consumer h s q (Just 0) src
consumer h s q (Just i) src = do
i' <- runT $ final <~ asParts <~ fanout [quickFixes, pluginUpdate] <~ src
return (Left $ head i')
where
quickFixes = auto (\x -> [x]) <~ sinkPart_ id (sinkIO (send q) <~ asParts) <~ toQuickFixes i
pluginUpdate = autoM (update h q s)
toQuickFixes :: Int -> ProcessT IO Event (Int, [Command])
toQuickFixes acc = scan f (acc, [])
where
f (i, _) event = toQuickFix i event
| aloiscochard/sarsi | sarsi-nvim/Main.hs | apache-2.0 | 12,234 | 0 | 23 | 3,125 | 4,738 | 2,392 | 2,346 | 272 | 10 |
import System.Environment
import Text.Read (readMaybe)
joinTuple :: Monad m => (m a, m b) -> m (a, b)
joinTuple (a', b') = do
a <- a'
b <- b'
return (a, b)
addMaybes :: Num a => Maybe a -> Maybe a -> Maybe a
addMaybes a b = uncurry (+) <$> joinTuple (a, b)
main :: IO ()
main = getArgs >>= \args ->
print $ foldl (\x -> (x `addMaybes`) . readMaybe) (Just 0) args
| rebeccaskinner/erl_m | examples/example_maybe.hs | apache-2.0 | 408 | 0 | 12 | 122 | 209 | 109 | 100 | 12 | 1 |
module Model where
import Prelude
import Yesod
import Data.Text (Text)
import Database.Persist.Quasi
import Data.Time.Clock
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist sqlSettings, mkMigrate "migrateAll"]
$(persistFileWith lowerCaseSettings "config/models")
| madebyjeffrey/socrsite | Model.hs | bsd-2-clause | 433 | 0 | 8 | 58 | 65 | 38 | 27 | -1 | -1 |
{-# LANGUAGE DeriveTraversable #-}
module Data.Propagator.Supported where
import Control.Applicative
import Data.HashSet
import Data.Propagator.Class
import Data.Propagator.Name
data Supported a = Supported !(HashSet Name) a
deriving (Functor, Foldable, Traversable, Show)
instance Eq a => Eq (Supported a) where
Supported _ a == Supported _ b = a == b
instance Ord a => Ord (Supported a) where
Supported _ a `compare` Supported _ b = compare a b
instance Applicative Supported where
pure = Supported mempty
Supported xs a <* Supported ys _ = Supported (union xs ys) a
Supported xs _ *> Supported ys b = Supported (union xs ys) b
Supported xs f <*> Supported ys a = Supported (union xs ys) (f a)
instance Monad Supported where
return = Supported mempty
(>>) = (*>)
Supported xs a >>= f = case f a of
Supported ys b -> Supported (union xs ys) b
instance Propagated a => Propagated (Supported a) where
merge (Supported xs a) (Supported ys b) = case merge a b of
Change False c -> Change False (Supported xs c)
Change True c -> Change True (Supported (union xs ys) c)
Contradiction zs s -> Contradiction (zs `union` xs `union` ys) s
instance Num a => Num (Supported a) where
(+) = liftA2 (+)
(-) = liftA2 (-)
(*) = liftA2 (*)
abs = fmap abs
signum = fmap signum
negate = fmap negate
fromInteger = pure . fromInteger
instance Fractional a => Fractional (Supported a) where
(/) = liftA2 (/)
recip = fmap recip
fromRational = pure . fromRational
instance Floating a => Floating (Supported a) where
pi = pure pi
exp = fmap exp
log = fmap log
sqrt = fmap sqrt
logBase = liftA2 logBase
(**) = liftA2 (**)
sin = fmap sin
cos = fmap cos
tan = fmap tan
asin = fmap asin
acos = fmap acos
atan = fmap atan
sinh = fmap sinh
cosh = fmap cosh
tanh = fmap tanh
asinh = fmap asinh
acosh = fmap acosh
atanh = fmap atanh
| ekmett/propagators | src/Data/Propagator/Supported.hs | bsd-2-clause | 1,918 | 0 | 13 | 452 | 811 | 410 | 401 | 60 | 0 |
{-# LANGUAGE TemplateHaskell, BangPatterns #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Query.Query (testQuery_Query) where
import Test.HUnit (Assertion, assertEqual)
import Test.QuickCheck hiding (Result)
import Test.QuickCheck.Monadic
import Data.Function (on)
import Data.List
import qualified Data.Map as Map
import Data.Maybe
import qualified Data.Set as Set
import Text.JSON (JSValue(..), showJSON)
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Objects (genEmptyCluster)
import Ganeti.BasicTypes
import Ganeti.Errors
import Ganeti.JSON
import Ganeti.Objects
import Ganeti.Query.Filter
import qualified Ganeti.Query.Group as Group
import Ganeti.Query.Language
import qualified Ganeti.Query.Node as Node
import Ganeti.Query.Query
import qualified Ganeti.Query.Job as Job
import Ganeti.Utils (sepSplit)
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Helpers
-- | Checks if a list of field definitions contains unknown fields.
hasUnknownFields :: [FieldDefinition] -> Bool
hasUnknownFields = (QFTUnknown `notElem`) . map fdefKind
-- * Test cases
-- ** Node queries
-- | Tests that querying any existing fields, via either query or
-- queryFields, will not return unknown fields.
prop_queryNode_noUnknown :: Property
prop_queryNode_noUnknown =
forAll (choose (0, maxNodes) >>= genEmptyCluster) $ \cluster ->
forAll (elements (Map.keys Node.fieldsMap)) $ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cluster False (Query (ItemTypeOpCode QRNode)
[field] EmptyFilter)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields (ItemTypeOpCode QRNode) [field])
stop $ conjoin
[ printTestCase ("Got unknown fields via query (" ++
show fdefs ++ ")") (hasUnknownFields fdefs)
, printTestCase ("Got unknown result status via query (" ++
show fdata ++ ")")
(all (all ((/= RSUnknown) . rentryStatus)) fdata)
, printTestCase ("Got unknown fields via query fields (" ++
show fdefs'++ ")") (hasUnknownFields fdefs')
]
-- | Tests that an unknown field is returned as such.
prop_queryNode_Unknown :: Property
prop_queryNode_Unknown =
forAll (choose (0, maxNodes) >>= genEmptyCluster) $ \cluster ->
forAll (arbitrary `suchThat` (`notElem` Map.keys Node.fieldsMap))
$ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cluster False (Query (ItemTypeOpCode QRNode)
[field] EmptyFilter)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields (ItemTypeOpCode QRNode) [field])
stop $ conjoin
[ printTestCase ("Got known fields via query (" ++ show fdefs ++ ")")
(not $ hasUnknownFields fdefs)
, printTestCase ("Got /= ResultUnknown result status via query (" ++
show fdata ++ ")")
(all (all ((== RSUnknown) . rentryStatus)) fdata)
, printTestCase ("Got a Just in a result value (" ++
show fdata ++ ")")
(all (all (isNothing . rentryValue)) fdata)
, printTestCase ("Got known fields via query fields (" ++ show fdefs'
++ ")") (not $ hasUnknownFields fdefs')
]
-- | Checks that a result type is conforming to a field definition.
checkResultType :: FieldDefinition -> ResultEntry -> Property
checkResultType _ (ResultEntry RSNormal Nothing) =
failTest "Nothing result in RSNormal field"
checkResultType _ (ResultEntry _ Nothing) = passTest
checkResultType fdef (ResultEntry RSNormal (Just v)) =
case (fdefKind fdef, v) of
(QFTText , JSString {}) -> passTest
(QFTBool , JSBool {}) -> passTest
(QFTNumber , JSRational {}) -> passTest
(QFTTimestamp , JSRational {}) -> passTest
(QFTUnit , JSRational {}) -> passTest
(QFTOther , _) -> passTest -- meh, QFT not precise...
(kind, _) -> failTest $ "Type mismatch, field definition says " ++
show kind ++ " but returned value is " ++ show v ++
" for field '" ++ fdefName fdef ++ "'"
checkResultType _ (ResultEntry r (Just _)) =
failTest $ "Just result in " ++ show r ++ " field"
-- | Tests that querying any existing fields, the following three
-- properties hold: RSNormal corresponds to a Just value, any other
-- value corresponds to Nothing, and for a RSNormal and value field,
-- the type of the value corresponds to the type of the field as
-- declared in the FieldDefinition.
prop_queryNode_types :: Property
prop_queryNode_types =
forAll (choose (0, maxNodes)) $ \numnodes ->
forAll (genEmptyCluster numnodes) $ \cfg ->
forAll (elements (Map.keys Node.fieldsMap)) $ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cfg False (Query (ItemTypeOpCode QRNode)
[field] EmptyFilter)) >>= resultProp
stop $ conjoin
[ printTestCase ("Inconsistent result entries (" ++ show fdata ++ ")")
(conjoin $ map (conjoin . zipWith checkResultType fdefs) fdata)
, printTestCase "Wrong field definitions length"
(length fdefs ==? 1)
, printTestCase "Wrong field result rows length"
(all ((== 1) . length) fdata)
, printTestCase "Wrong number of result rows"
(length fdata ==? numnodes)
]
-- | Test that queryFields with empty fields list returns all node fields.
case_queryNode_allfields :: Assertion
case_queryNode_allfields = do
fdefs <- case queryFields (QueryFields (ItemTypeOpCode QRNode) []) of
Bad msg -> fail $ "Error in query all fields: " ++
formatError msg
Ok (QueryFieldsResult v) -> return v
let field_sort = compare `on` fdefName
assertEqual "Mismatch in all fields list"
(sortBy field_sort . map (\(f, _, _) -> f) $ Map.elems Node.fieldsMap)
(sortBy field_sort fdefs)
-- | Check if cluster node names are unique (first elems).
areNodeNamesSane :: ConfigData -> Bool
areNodeNamesSane cfg =
let fqdns = map nodeName . Map.elems . fromContainer $ configNodes cfg
names = map (head . sepSplit '.') fqdns
in length names == length (nub names)
-- | Check that the nodes reported by a name filter are sane.
prop_queryNode_filter :: Property
prop_queryNode_filter =
forAll (choose (1, maxNodes)) $ \nodes ->
forAll (genEmptyCluster nodes `suchThat`
areNodeNamesSane) $ \cluster -> monadicIO $ do
let node_list = map nodeName . Map.elems . fromContainer $
configNodes cluster
count <- pick $ choose (1, nodes)
fqdn_set <- pick . genSetHelper node_list $ Just count
let fqdns = Set.elems fqdn_set
names = map (head . sepSplit '.') fqdns
flt = makeSimpleFilter "name" $ map Left names
QueryResult _ fdata <-
run (query cluster False (Query (ItemTypeOpCode QRNode)
["name"] flt)) >>= resultProp
stop $ conjoin
[ printTestCase "Invalid node names" $
map (map rentryValue) fdata ==? map (\f -> [Just (showJSON f)]) fqdns
]
-- ** Group queries
prop_queryGroup_noUnknown :: Property
prop_queryGroup_noUnknown =
forAll (choose (0, maxNodes) >>= genEmptyCluster) $ \cluster ->
forAll (elements (Map.keys Group.fieldsMap)) $ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cluster False (Query (ItemTypeOpCode QRGroup)
[field] EmptyFilter)) >>=
resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields (ItemTypeOpCode QRGroup) [field])
stop $ conjoin
[ printTestCase ("Got unknown fields via query (" ++ show fdefs ++ ")")
(hasUnknownFields fdefs)
, printTestCase ("Got unknown result status via query (" ++
show fdata ++ ")")
(all (all ((/= RSUnknown) . rentryStatus)) fdata)
, printTestCase ("Got unknown fields via query fields (" ++ show fdefs'
++ ")") (hasUnknownFields fdefs')
]
prop_queryGroup_Unknown :: Property
prop_queryGroup_Unknown =
forAll (choose (0, maxNodes) >>= genEmptyCluster) $ \cluster ->
forAll (arbitrary `suchThat` (`notElem` Map.keys Group.fieldsMap))
$ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cluster False (Query (ItemTypeOpCode QRGroup)
[field] EmptyFilter)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields (ItemTypeOpCode QRGroup) [field])
stop $ conjoin
[ printTestCase ("Got known fields via query (" ++ show fdefs ++ ")")
(not $ hasUnknownFields fdefs)
, printTestCase ("Got /= ResultUnknown result status via query (" ++
show fdata ++ ")")
(all (all ((== RSUnknown) . rentryStatus)) fdata)
, printTestCase ("Got a Just in a result value (" ++
show fdata ++ ")")
(all (all (isNothing . rentryValue)) fdata)
, printTestCase ("Got known fields via query fields (" ++ show fdefs'
++ ")") (not $ hasUnknownFields fdefs')
]
prop_queryGroup_types :: Property
prop_queryGroup_types =
forAll (choose (0, maxNodes)) $ \numnodes ->
forAll (genEmptyCluster numnodes) $ \cfg ->
forAll (elements (Map.keys Group.fieldsMap)) $ \field -> monadicIO $ do
QueryResult fdefs fdata <-
run (query cfg False (Query (ItemTypeOpCode QRGroup)
[field] EmptyFilter)) >>= resultProp
stop $ conjoin
[ printTestCase ("Inconsistent result entries (" ++ show fdata ++ ")")
(conjoin $ map (conjoin . zipWith checkResultType fdefs) fdata)
, printTestCase "Wrong field definitions length" (length fdefs ==? 1)
, printTestCase "Wrong field result rows length"
(all ((== 1) . length) fdata)
]
case_queryGroup_allfields :: Assertion
case_queryGroup_allfields = do
fdefs <- case queryFields (QueryFields (ItemTypeOpCode QRGroup) []) of
Bad msg -> fail $ "Error in query all fields: " ++
formatError msg
Ok (QueryFieldsResult v) -> return v
let field_sort = compare `on` fdefName
assertEqual "Mismatch in all fields list"
(sortBy field_sort . map (\(f, _, _) -> f) $ Map.elems Group.fieldsMap)
(sortBy field_sort fdefs)
-- | Check that the node count reported by a group list is sane.
--
-- FIXME: also verify the node list, etc.
prop_queryGroup_nodeCount :: Property
prop_queryGroup_nodeCount =
forAll (choose (0, maxNodes)) $ \nodes ->
forAll (genEmptyCluster nodes) $ \cluster -> monadicIO $
do
QueryResult _ fdata <-
run (query cluster False (Query (ItemTypeOpCode QRGroup)
["node_cnt"] EmptyFilter)) >>= resultProp
stop $ conjoin
[ printTestCase "Invalid node count" $
map (map rentryValue) fdata ==? [[Just (showJSON nodes)]]
]
-- ** Job queries
-- | Tests that querying any existing fields, via either query or
-- queryFields, will not return unknown fields. This uses 'undefined'
-- for config, as job queries shouldn't use the configuration, and an
-- explicit filter as otherwise non-live queries wouldn't return any
-- result rows.
prop_queryJob_noUnknown :: Property
prop_queryJob_noUnknown =
forAll (listOf (arbitrary::Gen (Positive Integer))) $ \ids ->
forAll (elements (Map.keys Job.fieldsMap)) $ \field -> monadicIO $ do
let qtype = ItemTypeLuxi QRJob
flt = makeSimpleFilter (nameField qtype) $
map (\(Positive i) -> Right i) ids
QueryResult fdefs fdata <-
run (query undefined False (Query qtype [field] flt)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields qtype [field])
stop $ conjoin
[ printTestCase ("Got unknown fields via query (" ++
show fdefs ++ ")") (hasUnknownFields fdefs)
, printTestCase ("Got unknown result status via query (" ++
show fdata ++ ")")
(all (all ((/= RSUnknown) . rentryStatus)) fdata)
, printTestCase ("Got unknown fields via query fields (" ++
show fdefs'++ ")") (hasUnknownFields fdefs')
]
-- | Tests that an unknown field is returned as such.
prop_queryJob_Unknown :: Property
prop_queryJob_Unknown =
forAll (listOf (arbitrary::Gen (Positive Integer))) $ \ids ->
forAll (arbitrary `suchThat` (`notElem` Map.keys Job.fieldsMap))
$ \field -> monadicIO $ do
let qtype = ItemTypeLuxi QRJob
flt = makeSimpleFilter (nameField qtype) $
map (\(Positive i) -> Right i) ids
QueryResult fdefs fdata <-
run (query undefined False (Query qtype [field] flt)) >>= resultProp
QueryFieldsResult fdefs' <-
resultProp $ queryFields (QueryFields qtype [field])
stop $ conjoin
[ printTestCase ("Got known fields via query (" ++ show fdefs ++ ")")
(not $ hasUnknownFields fdefs)
, printTestCase ("Got /= ResultUnknown result status via query (" ++
show fdata ++ ")")
(all (all ((== RSUnknown) . rentryStatus)) fdata)
, printTestCase ("Got a Just in a result value (" ++
show fdata ++ ")")
(all (all (isNothing . rentryValue)) fdata)
, printTestCase ("Got known fields via query fields (" ++ show fdefs'
++ ")") (not $ hasUnknownFields fdefs')
]
-- ** Misc other tests
-- | Tests that requested names checking behaves as expected.
prop_getRequestedNames :: Property
prop_getRequestedNames =
forAll genName $ \node1 ->
let chk = getRequestedNames . Query (ItemTypeOpCode QRNode) []
q_node1 = QuotedString node1
eq_name = EQFilter "name"
eq_node1 = eq_name q_node1
in conjoin [ printTestCase "empty filter" $ chk EmptyFilter ==? []
, printTestCase "and filter" $ chk (AndFilter [eq_node1]) ==? []
, printTestCase "simple equality" $ chk eq_node1 ==? [node1]
, printTestCase "non-name field" $
chk (EQFilter "foo" q_node1) ==? []
, printTestCase "non-simple filter" $
chk (OrFilter [ eq_node1 , LTFilter "foo" q_node1]) ==? []
]
testSuite "Query/Query"
[ 'prop_queryNode_noUnknown
, 'prop_queryNode_Unknown
, 'prop_queryNode_types
, 'prop_queryNode_filter
, 'case_queryNode_allfields
, 'prop_queryGroup_noUnknown
, 'prop_queryGroup_Unknown
, 'prop_queryGroup_types
, 'case_queryGroup_allfields
, 'prop_queryGroup_nodeCount
, 'prop_queryJob_noUnknown
, 'prop_queryJob_Unknown
, 'prop_getRequestedNames
]
| apyrgio/snf-ganeti | test/hs/Test/Ganeti/Query/Query.hs | bsd-2-clause | 16,213 | 0 | 23 | 4,036 | 3,963 | 2,056 | 1,907 | 278 | 7 |
{-# LANGUAGE LiberalTypeSynonyms, UnboxedTuples, ScopedTypeVariables, Rank2Types #-}
module Data.TrieMap.TrieKey.Projection (MapMaybe, MapEither, Project(..), mapMaybeM, mapEitherM, both, both') where
import Data.TrieMap.Sized
import Data.TrieMap.TrieKey.Subset
type MapMaybe f a b = f a -> Maybe (f b)
type MapEither f a b c = f a -> (# Maybe (f b), Maybe (f c) #)
type Id a = a
class Project f where
mapMaybe :: Sized b => MapMaybe Id a b -> f a -> f b
mapEither :: (Sized b, Sized c) => MapEither Id a b c -> f a -> (# f b, f c #)
mapEither f a = (# mapMaybe f1 a, mapMaybe f2 a #) where
f1 a = case f a of
(# b, _ #) -> b
f2 a = case f a of
(# _, c #) -> c
mapMaybe (f :: MapMaybe Id a b) a = case mapEither g a of
(# fb, _ #) -> fb
where g :: MapEither Id a b (Elem a)
g a = (# f a, Nothing #)
instance Project Maybe where
mapMaybe f m = m >>= f
mapEither _ Nothing = (# Nothing, Nothing #)
mapEither f (Just a) = f a
mapMaybeM :: (Sized b, Project f, Nullable f) => MapMaybe Id a b -> MapMaybe f a b
mapMaybeM f a = guardNull (mapMaybe f a)
mapEitherM :: (Sized b, Sized c, Project f, Nullable f) => MapEither Id a b c -> MapEither f a b c
mapEitherM f a = case mapEither f a of
(# b, c #) -> (# guardNull b, guardNull c #)
both :: (Sized b, Sized c) => (forall x . Sized x => f x -> f' x) -> (a -> (# f b, f c #)) -> a -> (# f' b, f' c #)
both g f a = case f a of
(# x, y #) -> (# g x, g y #)
both' :: (b -> b') -> (c -> c') -> (a -> (# b, c #)) -> a -> (# b', c' #)
both' g1 g2 f a = case f a of
(# x, y #) -> (# g1 x, g2 y #) | lowasser/TrieMap | Data/TrieMap/TrieKey/Projection.hs | bsd-3-clause | 1,594 | 1 | 13 | 426 | 815 | 419 | 396 | 34 | 1 |
-- Copyright (c) 2017-2018, Travis Bemann
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- o Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
--
-- o Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- o Neither the name of the copyright holder nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-- ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
-- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
{-# LANGUAGE OverloadedStrings, OverloadedLists #-}
module Network.IRC.Client.Amphibian.Log
(Response(..),
Error(..),
Log,
newLog,
startLog,
stopLog,
loadLog,
writeLog,
readLog,
getLogLoaded,
getLogRunning)
where
import Network.IRC.Client.Amphibian.Types
import Network.IRC.Client.Amphibian.Utility
import qualified Data.Text as T
import qualified Data.ByteString as B
import qualified Data.Sequence as S
import qualified Network.Socket as NS
import Data.Functor ((<$>))
import Data.Sequence ((|>))
import Data.Foldable (toList)
import System.IO (stderr,
openFile,
hClose,
hFlush,
IOMode(..),
Handle)
import Data.Text.IO (hPutStr,
readFile)
import Text.Printf (printf)
import Control.Concurrent.Async (Async,
async,
cancel)
import Control.Concurrent.STM (STM,
atomically,
orElse,
retry,
TVar,
newTVar,
writeTVar,
readTVar)
import Control.Concurrent.STM.TQueue (TQueue,
newTQueue,
writeTQueue,
readTQueue)
import Control.Concurrent.STM.TMVar (TMVar,
newEmptyTMVar,
putTMVar)
import Control.Exception (catch,
IOException,
SomeException)
import System.Environment.XDG.BaseDir (getUserDataDir)
import System.FilePath.Posix ((</>))
import System.Directory (createDirectoryIfMissing)
import Prelude hiding (readFile)
-- | Log state type
data LogData = LogData
{ logHandle :: Maybe Handle,
logText :: S.Seq T.Text,
logLoaded :: Bool }
-- | Chunk size
chunkSize :: Int
chunkSize = 4096
-- | Create a new log.
newLog :: Int -> STM Log
newLog initialMaxLines = do
running <- newTVar False
actions <- newTQueue
return Log { logRunning = running,
logActions = actions,
logInitialMaxLines = initialMaxLines }
-- | Start a log.
startLog :: Log -> IO (Either Error ())
startLog log = do
alreadyRunning <- atomically $ do
running <- readTVar $ logRunning log
if not running
then do writeTVar (logRunning log) True
return False
else return True
if not alreadyRunning
then do let state = LogData { logHandle = Nothing,
logText = S.empty,
logLoaded = False }
async $ do runLog log state
return $ Right ()
else return . Left $ Error "log already started"
-- | Stop a log.
stopLog :: Log -> STM (Response ())
stopLog log = do
running <- readTVar $ logRunning log
response <- newEmptyTMVar
let response' = Response response
if not running
then putTMVar response $ Right ()
else writeTQueue (logActions log) $ StopLog response'
return response'
-- | Load log.
loadLog :: Log -> NS.HostName -> NS.PortNumber -> B.ByteString ->
STM (Response ())
loadLog log hostname port nickOrName = do
running <- readTVar $ logRunning log
response <- newEmptyTMVar
let response' = Response response
if not running
then putTMVar response . Left $ Error "log not started"
else writeTQueue (logActions log) $
LoadLog hostname port nickOrName response'
return response'
-- | Write to a log.
writeLog :: Log -> T.Text -> STM (Response ())
writeLog log text = do
running <- readTVar $ logRunning log
response <- newEmptyTMVar
let response' = Response response
if not running
then putTMVar response . Left $ Error "log not started"
else writeTQueue (logActions log) $ WriteLog text response'
return response'
-- | Read from a log.
readLog :: Log -> STM (Response T.Text)
readLog log = do
running <- readTVar $ logRunning log
response <- newEmptyTMVar
let response' = Response response
if not running
then putTMVar response . Left $ Error "log not started"
else writeTQueue (logActions log) $ ReadLog response'
return response'
-- | Get whether a log is loaded.
getLogLoaded :: Log -> STM (Response Bool)
getLogLoaded log = do
running <- readTVar $ logRunning log
response <- newEmptyTMVar
let response' = Response response
if not running
then putTMVar response . Left $ Error "log not started"
else writeTQueue (logActions log) $ GetLogLoaded response'
return response'
-- | Get whether a log is running.
getLogRunning :: Log -> STM Bool
getLogRunning = readTVar . logRunning
-- | Run log.
runLog :: Log -> LogData -> IO ()
runLog outer log = do
action <- atomically . readTQueue $ logActions outer
case action of
LoadLog hostname port nickOrName response -> do
log <- handleLoadLog log hostname port nickOrName $
logInitialMaxLines outer
runLog outer log
WriteLog text response -> do
log <- handleWriteLog log text response
runLog outer log
ReadLog response -> do
log <- handleReadLog log response
runLog outer log
GetLogLoaded response -> do
log <- handleGetLogLoaded log response
runLog outer log
StopLog response ->
handleStopLog log outer response
-- | Load log from file.
handleLoadLog :: LogData -> NS.HostName -> NS.PortNumber -> B.ByteString ->
Int -> IO LogData
handleLoadLog log hostname port nickOrName initialMaxLines =
if not $ logLoaded log
then loadLog' `catch` (\e -> return $ const log (e :: IOException))
else return log
where loadLog' = do
logDir <- getUserDataDir $ "amphibian" </> "log" </> hostname
createDirectoryIfMissing True logDir
let filePath = logDir </> (T.unpack . ourDecodeUtf8 $ nickOrName)
text <- readFile filePath `catch`
(\e -> return $ const "" (e :: SomeException))
let text' = shortenText text initialMaxLines
handle <- openFile filePath AppendMode
return $ log { logText = logText log |> text',
logHandle = Just handle,
logLoaded = True }
shortenText text initialMaxLines =
shortenText' (reverse $ T.chunksOf chunkSize text) initialMaxLines
[""] ""
shortenText' chunks count parts section =
if count > 0
then
let (prev, part) = T.breakOnEnd "\n" section
in if prev /= ""
then shortenText' chunks (count - 1) (part : parts)
(T.dropEnd 1 prev)
else
case chunks of
chunk : rest -> shortenText' rest count parts $
T.append chunk part
[] -> T.intercalate "\n" $ part : parts
else T.intercalate "\n" parts
-- | Handle write log.
handleWriteLog :: LogData -> T.Text -> Response () -> IO LogData
handleWriteLog log text (Response response) = do
case logHandle log of
Just handle -> do
hPutStr handle text
hFlush handle
Nothing -> return ()
atomically . putTMVar response $ Right ()
return $ log { logText = logText log |> text }
-- | Handle read log.
handleReadLog :: LogData -> Response T.Text -> IO LogData
handleReadLog log (Response response) = do
let text = T.concat . toList $ logText log
atomically . putTMVar response $ Right text
return $ log { logText = S.singleton text }
-- | Get whether the log is loaded.
handleGetLogLoaded :: LogData -> Response Bool -> IO LogData
handleGetLogLoaded log (Response response) = do
atomically . putTMVar response . Right $ logLoaded log
return log
-- | Handle stop log.
handleStopLog :: LogData -> Log -> Response () -> IO ()
handleStopLog log outer (Response response) = do
case logHandle log of
Just handle -> hClose handle
Nothing -> return ()
atomically $ do
writeTVar (logRunning outer) False
putTMVar response $ Right ()
| tabemann/amphibian | src/Network/IRC/Client/Amphibian/Log.hs | bsd-3-clause | 9,765 | 0 | 17 | 2,816 | 2,336 | 1,192 | 1,144 | 209 | 5 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP
-- {-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP
----------------------------------------------------------------------
-- |
-- Module : ReificationRules.Exp
-- Copyright : (c) 2016 Conal Elliott
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Lambda expressions
----------------------------------------------------------------------
-- Whether to sugar during show, including 'let'
#define Sugared
module ReificationRules.Exp where
-- TODO: Explicit exports
import Control.Arrow (first)
import Data.Maybe (fromMaybe,catMaybes,listToMaybe)
import Data.Char (isDigit)
import System.IO.Unsafe (unsafePerformIO) -- experiment
import qualified Data.Map as M
-- import Debug.Trace
-- transformers
import Control.Monad.Trans.Reader
import Control.Monad.Trans.State
import Data.Proof.EQ
import ReificationRules.Misc (Unop,Unit,(:*),Eq1'(..),(===?),Evalable(..),PrimBasics(..))
import ReificationRules.ShowUtils
-- | Variable names
type Name = String
-- | Typed variable. Phantom
data V a = V Name
instance Show (V a) where
showsPrec _ (V n) = showString n
varName :: V a -> Name
varName (V name) = name
instance Eq1' V where
V a ==== V b = a == b
infixr 1 :$
infixr 8 :@
-- | Binding patterns
data Pat :: * -> * where
UnitPat :: Pat Unit
VarPat :: V a -> Pat a
(:$) :: Pat a -> Pat b -> Pat (a :* b)
(:@) :: Pat a -> Pat a -> Pat a
-- NOTE: ":@" is named to suggest "as patterns", but is more general ("and patterns").
-- TODO: Rename UnitPat and VarPat to PUnit and PVar
instance Show (Pat a) where
showsPrec _ UnitPat = showString "()"
showsPrec p (VarPat v) = showsPrec p v
showsPrec p (a :$ b) = showsPair p a b
showsPrec p (a :@ b) = showsOp2 False "@" (8,AssocRight) p a b
infixl 9 :^
-- | Lambda expressions
data E :: (* -> *) -> (* -> *) where
Var :: V a -> E p a
ConstE :: p a -> E p a
(:^) :: E p (a -> b) -> E p a -> E p b
Lam :: Pat a -> E p b -> E p (a -> b)
-- letE :: Pat a -> E p a -> E p b -> E p b
-- letE q rhs body = (Lam q body) :^ rhs
letPair :: Name -> Name -> E p c -> E p (a :* b -> c)
letPair a b = Lam (VarPat (V a) :$ VarPat (V b))
{--------------------------------------------------------------------
Show
--------------------------------------------------------------------}
intercalateShows :: Foldable f => ShowS -> f ShowS -> ShowS
intercalateShows gap = foldr1 (\ g f -> g . gap . f)
instance (HasOpInfo prim, Show' prim, Eq1' prim, PrimBasics prim) => Show (E prim a) where
#ifdef Sugared
-- showsPrec p (Either (Lam q a) (Lam r b) :^ ab) =
-- showParen (p > 0) $
-- showString "case " . shows ab . showString " of { "
-- . shows q . showString " -> " . shows a . showString " ; "
-- . shows r . showString " -> " . shows b . showString " } "
showsPrec p e@(Lam {} :^ _) = -- beta multi-redex as "let"
showParen (p > 0) $
showString "let " . shBinds binds . showString " in " . body
where
(binds,body) = collect e
collect :: E prim b -> ([ShowS],ShowS)
collect (Lam q e' :^ rhs) =
first ((shows q . showString " = " . shows rhs) :) (collect e')
collect e' = ([],shows e')
shBinds [b] = b
shBinds bs = showString "{ "
. intercalateShows (showString "; ") bs
. showString " }"
showsPrec p (ConstE ((==== pairP) -> True) :^ u :^ v)
= showsPair p u v
#endif
showsPrec p (ConstE prim :^ u :^ v) | Just (OpInfo op fixity) <- opInfo prim =
showsOp2 False op fixity p u v
showsPrec _ (Var (V n)) = showString n
showsPrec p (ConstE c) = showsPrec' p c
showsPrec p (u :^ v) = showsApp p u v
showsPrec p e@(Lam {}) = showParen (p > 0) $
showString "\\ " . intercalateShows (showString " ") pats
. showString " -> " . body
where
(pats,body) = collect e
where
-- Collect shown patterns and body
collect :: E prim b -> ([ShowS],ShowS)
collect (Lam q e') = first (shows q :) (collect e')
collect e' = ([],shows e')
-- showsPrec p (Either f g) = showsOp2' "|||" (2,AssocRight) p f g
-- showsPrec p (Loop h) = showsApp1 "loop" p h
-- showsPrec p (CoerceE e) = showsApp1 "coerce" p e
-- TODO: Multi-line pretty printer with indentation
{--------------------------------------------------------------------
Evaluation
--------------------------------------------------------------------}
evalE :: (HasOpInfo p, Show' p, Evalable p) => -- , Eq1' p, PrimBasics p
E p a -> a
evalE e = -- trace ("evalE: " ++ show e) $
eval' e [] -- provide empty environment
-- Expression evaluation requires a binding environment. In other words,
-- expressions evaluate to a function from environments.
-- | Single variable binding
data Bind = forall a. Bind (V a) a
-- | Variable environment
type Env = [Bind]
extendEnv :: Pat b -> b -> (Env -> Env)
extendEnv UnitPat () = id
extendEnv (VarPat vb) b = (Bind vb b :)
extendEnv (p :$ q) (a,b) = extendEnv q b . extendEnv p a
extendEnv (p :@ q) b = extendEnv q b . extendEnv p b
-- extendEnv ZeroPat Zero = id
-- extendEnv (SuccPat q) (Succ m) = extendEnv q m
-- TODO: Rewrite extendEnv so that it examines the pattern just once,
-- independently from the value.
lookupVar :: forall a. V a -> Env -> Maybe a
lookupVar va = listToMaybe . catMaybes . map check
where
check :: Bind -> Maybe a
check (Bind vb b) | Just Refl <- va ===? vb = Just b
| otherwise = Nothing
eval' :: (HasOpInfo p, Show' p, Evalable p) =>
E p a -> Env -> a
eval' (Var v) env = fromMaybe (error $ "eval': unbound variable: " ++ show v) $
lookupVar v env
eval' (ConstE p) _ = eval p
eval' (u :^ v) env = (eval' u env) (eval' v env)
eval' (Lam p e) env = \ x -> eval' e (extendEnv p x env)
-- eval' (Either f g) env = eval' f env `either` eval' g env
-- eval' (Loop h) env = loop (eval' h env)
-- eval' (CoerceE e) env = coerce (eval' e env)
-- TODO: Rework so that eval' can work independently of env. Will save repeated
-- evals.
{--------------------------------------------------------------------
Special expressions
--------------------------------------------------------------------}
reifyE :: a -> E p a
reifyE _ = error "reifyE: Oops -- not eliminated."
{-# NOINLINE reifyE #-} -- to give reify/eval rules a chance
{--------------------------------------------------------------------
Clean up variable names
--------------------------------------------------------------------}
-- Max numeric suffix used per simple name
type UsedNames = M.Map Name Int
-- Renaming substitution
type Renamer = M.Map Name Name
type RenameEnv = (UsedNames,Renamer)
type RenameR = Reader RenameEnv
type RenameS = State RenameEnv
-- Huh?
huh :: a -> a
huh = unsafePerformIO . return
-- Without huh, renameVars calls get removed by the compiler in GHC 7.10.3 and 8.1.
-- Demand changes from <S,U> to <L,U>.
--
-- TODO: Understand what's going on here.
renameVars :: forall p a. (Show' p, HasOpInfo p, Eq1' p, PrimBasics p) => Unop (E p a)
renameVars e0 = huh $
runReader (renameExp e0) mempty
where
renameExp :: E p b -> RenameR (E p b)
-- renameExp e | trace ("renameExp: " ++ show e) False = undefined
renameExp (Var (V nm)) =
do (_,renamer) <- ask
let nm' = fromMaybe (error ("RR.Exp.rename: free variable " ++ show nm
++ " in expression " ++ show e0))
(M.lookup nm renamer)
return (Var (V nm'))
renameExp (ConstE p) = return (ConstE p)
renameExp (u :^ v) = (:^) <$> renameExp u <*> renameExp v
renameExp (Lam pat body) =
do env <- ask
let (pat',env') = runState (renamePat pat) env
body' <- local (const env') (renameExp body)
return $ -- trace ("rename lam env = " ++ show env) $
-- trace ("rename lam env' = " ++ show env') $
Lam pat' body'
renamePat :: Pat b -> RenameS (Pat b)
-- renamePat p | trace ("renamePat: " ++ show p) False = undefined
renamePat UnitPat = return UnitPat
renamePat (VarPat (V name)) =
do (used,renamer) <- get
let base = stripName name
(mbN,used') = M.insertLookupWithKey (const (+)) base 1 used
name' = maybe base ((base ++) . show) mbN
renamer' = M.insert name name' renamer
put (used',renamer')
return $ VarPat (V name')
renamePat (u :$ v) = (:$) <$> renamePat u <*> renamePat v
renamePat (u :@ v) = (:@) <$> renamePat u <*> renamePat v
{-# NOINLINE renameVars #-}
-- Names look like foo_suff. Drop the suffix and then any trailing digits.
-- Keep consistent with fqVarName in Plugin.
stripName :: Unop Name
stripName name = reverse
. dropWhile isDigit
. tail'
. dropWhile (/= '_')
. reverse
$ name
where
tail' [] = error ("stripName: missing suffix in " ++ show name)
tail' (_:cs) = cs
| conal/reification-rules | src/ReificationRules/Exp.hs | bsd-3-clause | 9,559 | 0 | 19 | 2,554 | 2,622 | 1,382 | 1,240 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable #-}
module Text.XML.ToJSON
(
{-| This library provide a way to convert xml to json.
Further more, by combining with aeson's parsing facility, it provide a way to parse xml to haskell data type.
-}
xmlToJSON
, parseXML
, JSONParseError(JSONParseError)
-- * utils
, tokensToJSON
, elementToJSON
, tokensToElement
, tokenToBuilder
) where
import Control.Monad (when, liftM)
import Control.Arrow (second)
import Control.Exception (Exception)
import Data.Typeable (Typeable)
import qualified Data.Text as T
import qualified Data.ByteString.Lazy as L
import Data.Conduit (($=), ($$), MonadThrow(monadThrow))
import qualified Data.Conduit.List as C
import qualified Data.HashMap.Strict as HM
import qualified Data.Vector as V
import Text.HTML.TagStream
import qualified Text.HTML.TagStream.Text as T
import Text.XML.ToJSON.Builder
import Data.Aeson (Value(..), Object, FromJSON, fromJSON, Result(Error, Success))
-- | Convert tagstream-conduit `Token' to xml element `Builder'
tokenToBuilder :: T.Token -> Builder
tokenToBuilder (TagOpen s as selfClose) = do
beginElement s
addAttrs as
when selfClose endElement
tokenToBuilder (TagClose _) = endElement -- FIXME should match tag name?
tokenToBuilder (Text s) = addValue s
tokenToBuilder _ = return ()
-- |Convert xml `Element' to aeson `Value' .
--
-- xml attributes and text values are converted to special object attribute @__attributes@ and @__values@.
elementToJSON :: Element -> Value
elementToJSON (Element as vs cs) =
if null as && null cs
then
String (T.concat vs)
else
Object $ HM.fromListWith mergeObject
$ attrs
++ values
++ map (second elementToJSON) cs
where
attrs = if null as
then []
else [("__attributes", Object (attrsToObject as))]
values = if null vs
then []
else [("__values", Array (V.fromList (map String vs)))]
attrsToObject :: [(T.Text, T.Text)] -> Object
attrsToObject = HM.fromList . map (second String)
mergeObject :: Value -> Value -> Value
mergeObject v (Array arr) = Array (V.snoc arr v)
mergeObject v1 v2 = Array (V.fromList [v2, v1])
-- | Consume a list of `T.Token' to build an `Element'
tokensToElement :: [T.Token] -> Element
tokensToElement ts = runBuilder (mapM_ tokenToBuilder ts)
-- |Convert list of tagstream-conduit `T.Token` to aeson `Value', combining of `tokensToElement' and `elementToJSON'
tokensToJSON :: [T.Token] -> Value
tokensToJSON = elementToJSON . tokensToElement
newtype JSONParseError = JSONParseError String
deriving (Typeable, Show)
instance Exception JSONParseError
-- | parse xml to haskell data type by using aeson's `FromJSON'.
parseXML :: (MonadThrow m, FromJSON a) => L.ByteString -> m a
parseXML s = xmlToJSON s >>= convert
where
convert v =
case fromJSON v of
Error err -> monadThrow (JSONParseError err)
Success a -> return a
-- | Parse lazy xml `ByteString' to aeson `Value'.
xmlToJSON :: MonadThrow m => L.ByteString -> m Value
xmlToJSON s = liftM tokensToJSON $ C.sourceList (L.toChunks s) $= T.tokenStreamBS $$ C.consume
| yihuang/xml2json | Text/XML/ToJSON.hs | bsd-3-clause | 3,268 | 0 | 14 | 710 | 821 | 456 | 365 | 69 | 5 |
--------------------------------------------------------------------
-- |
-- Module : Text.Feed.Types
-- Copyright : (c) Galois, Inc. 2008
-- License : BSD3
--
-- Maintainer: Sigbjorn Finne <[email protected]>
-- Stability : provisional
-- Portability:
--
--------------------------------------------------------------------
module Text.Feed.Types where
import Text.RSS.Syntax as RSS
import Text.Atom.Feed as Atom
import Text.RSS1.Syntax as RSS1
import Text.XML as XML
import Data.Text (Text)
-- | The abstract type of feed documents. The internal representation
-- is as whatever feed variant type the document was either imported or
-- has now been translated to.
data Feed
= AtomFeed Atom.Feed
| RSSFeed RSS.RSS
| RSS1Feed RSS1.Feed
-- if we're unable to correctly the well-formed XML as a feed,
-- keep it as an untyped document.
| XMLFeed XML.Element
deriving (Show)
-- | The abstract type of feed items. Like the 'Text.Feed.Types.Feed' type, the
-- representation of a value is as one of the different RSS item\/entry
-- variants.
data Item
= AtomItem Atom.Entry
| RSSItem RSS.RSSItem
| RSS1Item RSS1.Item
| XMLItem XML.Element
deriving (Show)
-- | The kinds of feed documents supported.
data FeedKind
= AtomKind
| RSSKind (Maybe Text) -- Nothing => default version (2.0)
| RDFKind (Maybe Text) -- Nothing => default version (1.0)
deriving (Eq, Show)
| haskell-pkg-janitors/feed | Text/Feed/Types.hs | bsd-3-clause | 1,397 | 0 | 8 | 241 | 188 | 122 | 66 | 23 | 0 |
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module DB.Deck.Queries where
import Prelude hiding (id)
import Control.Monad.IO.Class (liftIO)
import Data.Functor ((<$>))
import Data.Maybe (listToMaybe)
import Data.Text.Lazy (Text)
import Database.PostgreSQL.Simple
import Debug.Trace
import Web.Scotty
import DB.Deck.Model
import DB.CardSimple.Model
allDecks :: Connection -> ActionM [Deck]
allDecks conn = liftIO $ query_ conn "SELECT * FROM deck WHERE deleted = false"
singleDeck :: Connection -> Int -> ActionM (Maybe Deck)
singleDeck conn deckId = liftIO $ listToMaybe <$> query conn singleQuery (Only deckId)
where
singleQuery = "SELECT * FROM deck WHERE id = ? and deleted = false"
allDecksPaged :: Connection -> Int -> Int -> ActionM [Deck]
allDecksPaged conn limit offset = liftIO $ query conn pagedQuery (limit, offset)
where
pagedQuery = "SELECT * FROM deck WHERE deleted = false LIMIT ? OFFSET ?"
deckChildCardSimple :: Connection -> Int -> ActionM [CardSimple]
deckChildCardSimple conn deckId = liftIO $ query conn cardQuery (Only deckId)
where
cardQuery = "SELECT * FROM card_simple WHERE deck_id = ? AND deleted = false"
createDeck :: Connection -> Deck -> ActionM (Maybe Deck)
createDeck conn (Deck {..}) = liftIO $ listToMaybe <$> query conn createQuery (name, description, created_by)
where
createQuery = "INSERT INTO deck (name, description, created_by) VALUES (?, ?, ?) RETURNING *"
updateDeck :: Connection -> Deck -> ActionM (Maybe Deck)
updateDeck conn (Deck {..}) = do newD <- liftIO $ listToMaybe <$> query conn updateQuery (name, description, id)
trace (tStr newD) $ return newD
where
updateQuery = "UPDATE deck SET name = ?, description = ? WHERE id = ? RETURNING *"
tStr d = "Updated deck to: " ++ show d
deleteDeckAndCards :: Connection -> Int -> ActionM Bool
deleteDeckAndCards conn deckId = do
let deleteDeckQuery = "UPDATE deck SET deleted = true WHERE id = ?"
deleteCardQuery = "UPDATE card_simple SET deleted = true WHERE deck_id = ?"
_ <- liftIO $ execute conn deleteCardQuery (Only deckId)
numRows <- liftIO $ execute conn deleteDeckQuery (Only deckId)
return $ if numRows == 1 then True else False
| ppseafield/backend-flashcard | src/DB/Deck/Queries.hs | bsd-3-clause | 2,331 | 0 | 11 | 521 | 586 | 309 | 277 | 38 | 2 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeOperators #-}
module TypeFamilies where
import Opaleye.Internal.TypeFamilies
data (:~) a b where
Eq :: (:~) a a
-- If it compiles, it works
tests :: ()
tests = ()
where _ = Eq :: a :~ (Pure :<$> Id :<| a :<| b)
_ = Eq :: a :~ (Id :<| a)
_ = Eq :: (a -> a) :~ (((->) :<$> Id :<*> Id) :<| a)
_ = Eq :: (a -> b)
:~ (((->) :<$> Pure a :<*> Pure b) :<| c)
_ = Eq :: Maybe a :~ ((Maybe :<$> Pure a) :<| b)
_ = Eq :: Maybe a :~ ((Maybe :<$> Id) :<| a)
_ = Eq :: a :~ ((Pure a) :<| b)
| WraithM/haskell-opaleye | Test/TypeFamilies.hs | bsd-3-clause | 603 | 0 | 13 | 211 | 274 | 154 | 120 | 16 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Main
(
main
) where
import LaTeXGrapher.Math
import LaTeXGrapher.Data.Function
import LaTeXGrapher.Data.Context
import LaTeXGrapher.Data.Markup
import LaTeXGrapher.Parser
import LaTeXGrapher.Parser.Expression
import LaTeXGrapher.Output
import LaTeXGrapher.Plot
import LaTeXGrapher.Repl
import LaTeXGrapher.Diagrams
import System.Console.CmdArgs.Implicit hiding (args,name)
import Data.List
import Data.List.Split
import System.IO
import System.Environment
data GrapherOpts = GrapherOpts
{ width :: Int
, height :: Int
, output :: FilePath
, input :: FilePath
}
deriving (Show, Data, Typeable)
grapherOpts :: String -> GrapherOpts
grapherOpts prog = GrapherOpts
{ width = 400
&= typ "INT"
&= help "Desired width of the output image (default 400)"
, height = 400
&= typ "INT"
&= help "Desired height of the output image (default 400)"
, output = def
&= typFile
&= help "Output file"
, input = def
&= argPos 0
}
&= summary "Command-line LaTeXGrapher generation."
&= program prog
stressContext :: GrapherOpts -> Context -> IO ()
stressContext o c = case es of
Just es -> stressContext' c es
Nothing -> putStrLn "Eval error in context."
where
fs = plotFunctions c
ns = map name fs
es = sequence . map (lookupF c) $ ns
stressContext' c es = putStrLn $ show (sum vs)
where
vs = [ e [x] | e <- es, x <- xs ]
xs = [(fromInteger x) / 100.0 | x <- [-1000..1000]] :: [Double]
diagramsContext :: GrapherOpts -> Context -> IO ()
diagramsContext o c = h ps
where ps = plot c
mm = minMax c
h (Left e) = print e
h (Right ps') = ppPlot ps' >> plotDiagrams mm ps' (output o) (fromIntegral $ width o) (fromIntegral $ height o)
ppPlot = mapM_ (putStrLn . pp)
where
pp p@PlotPoint{} = show p
pp t@Tick{} = show t
pp (Plot ss s e l) = concat [ "Plot\n", ppSeg ss, show (s, e, l) ]
ppSeg (PolyLine ss) = unlines $ map show ss
putStrings :: [String] -> IO ()
putStrings = mapM_ putStrLn
buildFilter :: ([Plot] -> MinMax -> [String]) -> GrapherOpts -> Context -> IO ()
buildFilter f = const (plot >>= h)
where
h (Left e) _ = print e
h (Right ps) c = putStrings $ f ps (minMax c)
runProgram o f = do
handle <- openFile (input o) ReadMode
c <- hGetContents handle
case parseGrapher c of
Left e -> putStrLn "Error parsing input: " >> print e
Right c -> f o c
main = do
prog <- getProgName
args <- getArgs
opts <- cmdArgs (grapherOpts prog)
chooseContext opts
chooseContext :: GrapherOpts -> IO ()
chooseContext opts =
case splitOn "." (output opts) of
[""] -> putStrLn "No output file given."
["repl"] -> runProgram opts (const replContext)
ps | last ps `elem` ["eps"] -> do
let f = case last ps of
_ -> diagramsContext
runProgram opts f
| otherwise -> putStrLn $ "Unknown file type: " ++ last ps | fryguybob/LaTeXGrapher | src/Main.hs | bsd-3-clause | 3,183 | 0 | 16 | 942 | 1,089 | 555 | 534 | 88 | 3 |
module Language.GDL.Query
( query, qextract
, instantiate
) where
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B
import qualified Data.Map as M
import Data.Maybe
import Data.STRef
import Control.Monad
import Control.Monad.ST
import Language.GDL.Syntax
import Language.GDL.Unify
qextract :: Query -> (Query -> Maybe a) -> Database -> [a]
qextract q ex db = catMaybes $ map (ex . flip instantiate q) $ query db q
query :: Database -> Query -> [Substitution]
query db q = runST $ do
counter <- newSTRef (0 :: Integer)
qeval' counter db q [M.empty]
qeval' :: STRef s Integer -> Database -> Query -> [Substitution]
-> ST s [Substitution]
qeval' _ _ Pass frames = return frames
qeval' counter db (Query struct) frames = fmap concat . mapM applied $ frames
where applied frame = fmap concat . mapM (apply counter db frame struct) $
cheat struct
cheat (Atom s) = db M.! s
cheat (Compound ((Atom s):_)) = db M.! s
cheat _ = error "cheat in qeval'"
qeval' counter db (And conjuncts) frames =
foldM (flip $ qeval' counter db) frames conjuncts
qeval' counter db (Or disjuncts) frames =
fmap concat . mapM (\child -> qeval' counter db child frames) $ disjuncts
qeval' counter db (Not child) frames = do
frames' <- mapM (\frame -> qeval' counter db child [frame]) frames
return [frame | (frame, []) <- zip frames frames']
qeval' _counter _db (Distinct c1 c2) frames = return $ filter different frames
where different f = inst f c1 /= inst f c2
apply :: STRef s Integer -> Database -> Substitution -> Term -> Clause
-> ST s [Substitution]
apply counter db frame struct clause = do
num <- readSTRef counter
modifySTRef counter (+1)
apply' counter db frame struct
(rewriteClause (B.pack (show num ++ "#")) clause)
apply' :: STRef s Integer -> Database -> Substitution -> Term -> Clause
-> ST s [Substitution]
apply' counter db frame struct (conclusion, body) =
case unify frame struct conclusion of
Just frame' -> qeval' counter db body [frame']
Nothing -> return []
rewriteClause :: ByteString -> Clause -> Clause
rewriteClause p (bod, concl) = (rewrite p bod, rewriteQ p concl)
rewriteQ :: ByteString -> Query -> Query
rewriteQ p (Query t) = Query $ rewrite p t
rewriteQ p (And cs) = And $ map (rewriteQ p) cs
rewriteQ p (Or cs) = Or $ map (rewriteQ p) cs
rewriteQ p (Distinct c1 c2) = Distinct (rewrite p c1) (rewrite p c2)
rewriteQ p (Not c) = Not $ rewriteQ p c
rewriteQ _ Pass = Pass
rewrite :: ByteString -> Term -> Term
rewrite _ x@(Atom _) = x
rewrite p (Var i) = Var (p `B.append` i)
rewrite _ (AntiVar i) = AntiVar i
rewrite p (Compound cs) = Compound $ map (rewrite p) cs
rewrite _ Wild = Wild
instantiate :: Substitution -> Query -> Query
instantiate _ Pass = Pass
instantiate sub (Query q) = Query $ inst sub q
instantiate sub (And cs) = And $ map (instantiate sub) cs
instantiate sub (Or cs) = Or $ map (instantiate sub) cs
instantiate sub (Not c) = Not $ instantiate sub c
instantiate sub (Distinct t1 t2) = Distinct (inst sub t1) (inst sub t2)
inst :: Substitution -> Term -> Term
inst _ x@(Atom _) = x
inst sub (Var i) = case M.lookup i sub of
Just v -> inst sub v
Nothing -> error $ "Cannot instantiate variable " ++ B.unpack i
inst _ (AntiVar i) = error $ "Cannot instantiate anti-variable " ++ B.unpack i
inst sub (Compound cs) = Compound $ map (inst sub) cs
inst _ Wild = error $ "Cannot instantiate wildcard"
| ian-ross/ggp | Language/GDL/Query.hs | bsd-3-clause | 3,501 | 0 | 14 | 755 | 1,530 | 768 | 762 | 79 | 3 |
-- | The gists API as described at <http://developer.github.com/v3/gists/>.
module Github.Gists (
gists
,gists'
,gist
,gist'
,module Github.Data
) where
import Github.Data
import Github.Private
-- | The list of all gists created by the user
--
-- > gists' (Just ("github-username", "github-password")) "mike-burns"
gists' :: Maybe BasicAuth -> String -> IO (Either Error [Gist])
gists' auth userName = githubGet' auth ["users", userName, "gists"]
-- | The list of all public gists created by the user.
--
-- > gists "mike-burns"
gists :: String -> IO (Either Error [Gist])
gists = gists' Nothing
-- | A specific gist, given its id, with authentication credentials
--
-- > gist' (Just ("github-username", "github-password")) "225074"
gist' :: Maybe BasicAuth -> String -> IO (Either Error Gist)
gist' auth gistId = githubGet' auth ["gists", gistId]
-- | A specific gist, given its id.
--
-- > gist "225074"
gist :: String -> IO (Either Error Gist)
gist = gist' Nothing
| kfish/github | Github/Gists.hs | bsd-3-clause | 976 | 0 | 10 | 163 | 210 | 118 | 92 | 16 | 1 |
{-# LANGUAGE CPP, NondecreasingIndentation, ScopedTypeVariables #-}
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow, 2005-2012
--
-- The GHC API
--
-- -----------------------------------------------------------------------------
module ETA.Main.GHC (
-- * Initialisation
defaultErrorHandler,
defaultCleanupHandler,
prettyPrintGhcErrors,
-- * GHC Monad
Ghc, GhcT, GhcMonad(..), HscEnv,
runGhc, runGhcT, initGhcMonad,
gcatch, gbracket, gfinally,
printException,
handleSourceError,
needsTemplateHaskell,
-- * Flags and settings
DynFlags(..), GeneralFlag(..), Severity(..), HscTarget(..), gopt,
GhcMode(..), GhcLink(..), defaultObjectTarget,
parseDynamicFlags,
getSessionDynFlags, setSessionDynFlags,
getProgramDynFlags, setProgramDynFlags,
getInteractiveDynFlags, setInteractiveDynFlags,
parseStaticFlags,
-- * Targets
Target(..), TargetId(..), Phase,
setTargets,
getTargets,
addTarget,
removeTarget,
guessTarget,
-- * Loading\/compiling the program
depanal,
load, LoadHowMuch(..), InteractiveImport(..),
SuccessFlag(..), succeeded, failed,
defaultWarnErrLogger, WarnErrLogger,
workingDirectoryChanged,
parseModule, typecheckModule, desugarModule, loadModule,
ParsedModule(..), TypecheckedModule(..), DesugaredModule(..),
TypecheckedSource, ParsedSource, RenamedSource, -- ditto
TypecheckedMod, ParsedMod,
moduleInfo, renamedSource, typecheckedSource,
parsedSource, coreModule,
-- ** Compiling to Core
CoreModule(..),
compileToCoreModule, compileToCoreSimplified,
-- * Inspecting the module structure of the program
ModuleGraph, ModSummary(..), ms_mod_name, ModLocation(..),
getModSummary,
getModuleGraph,
isLoaded,
topSortModuleGraph,
-- * Inspecting modules
ModuleInfo,
getModuleInfo,
modInfoTyThings,
modInfoTopLevelScope,
modInfoExports,
modInfoInstances,
modInfoIsExportedName,
modInfoLookupName,
modInfoIface,
modInfoSafe,
lookupGlobalName,
findGlobalAnns,
mkPrintUnqualifiedForModule,
ModIface(..),
SafeHaskellMode(..),
-- * Querying the environment
-- packageDbModules,
-- * Printing
PrintUnqualified, alwaysQualify,
-- * Interactive evaluation
getBindings, getInsts, getPrintUnqual,
findModule, lookupModule,
#ifdef GHCI
isModuleTrusted,
moduleTrustReqs,
setContext, getContext,
getNamesInScope,
getRdrNamesInScope,
getGRE,
moduleIsInterpreted,
getInfo,
exprType,
typeKind,
parseName,
RunResult(..),
runStmt, runStmtWithLocation, runDecls, runDeclsWithLocation,
runTcInteractive, -- Desired by some clients (Trac #8878)
parseImportDecl, SingleStep(..),
resume,
Resume(resumeStmt, resumeThreadId, resumeBreakInfo, resumeSpan,
resumeHistory, resumeHistoryIx),
History(historyBreakInfo, historyEnclosingDecls),
GHC.getHistorySpan, getHistoryModule,
getResumeContext,
abandon, abandonAll,
InteractiveEval.back,
InteractiveEval.forward,
showModule,
isModuleInterpreted,
InteractiveEval.compileExpr, HValue, dynCompileExpr,
GHC.obtainTermFromId, GHC.obtainTermFromVal, reconstructType,
modInfoModBreaks,
ModBreaks(..), BreakIndex,
BreakInfo(breakInfo_number, breakInfo_module),
BreakArray, setBreakOn, setBreakOff, getBreak,
#endif
lookupName,
#ifdef GHCI
-- ** EXPERIMENTAL
setGHCiMonad,
#endif
-- * Abstract syntax elements
-- ** Packages
UnitId,
-- ** Modules
Module, mkModule, pprModule, moduleName, moduleUnitId,
ModuleName, mkModuleName, moduleNameString,
-- ** Names
Name,
isExternalName, nameModule, pprParenSymName, nameSrcSpan,
NamedThing(..),
RdrName(Qual,Unqual),
-- ** Identifiers
Id, idType,
isImplicitId, isDeadBinder,
isExportedId, isLocalId, isGlobalId,
isRecordSelector,
isPrimOpId, isFCallId, isClassOpId_maybe,
isDataConWorkId, idDataCon,
isBottomingId, isDictonaryId,
recordSelectorFieldLabel,
-- ** Type constructors
TyCon,
tyConTyVars, tyConDataCons, tyConArity,
isClassTyCon, isTypeSynonymTyCon, isTypeFamilyTyCon, isNewTyCon,
isPrimTyCon, isFunTyCon,
isFamilyTyCon, isOpenFamilyTyCon, isOpenTypeFamilyTyCon,
tyConClass_maybe,
synTyConRhs_maybe, synTyConDefn_maybe, synTyConResKind,
-- ** Type variables
TyVar,
alphaTyVars,
-- ** Data constructors
DataCon,
dataConSig, dataConType, dataConTyCon, dataConFieldLabels,
dataConIsInfix, isVanillaDataCon, dataConUserType,
dataConSrcBangs,
StrictnessMark(..), isMarkedStrict,
-- ** Classes
Class,
classMethods, classSCTheta, classTvsFds, classATs,
pprFundeps,
-- ** Instances
ClsInst,
instanceDFunId,
pprInstance, pprInstanceHdr,
pprFamInst,
FamInst,
-- ** Types and Kinds
Type, splitForAllTys, funResultTy,
pprParendType, pprTypeApp,
Kind,
PredType,
ThetaType, pprForAll, pprThetaArrowTy,
-- ** Entities
TyThing(..),
-- ** Syntax
module HsSyn, -- ToDo: remove extraneous bits
-- ** Fixities
FixityDirection(..),
defaultFixity, maxPrecedence,
negateFixity,
compareFixity,
-- ** Source locations
SrcLoc(..), RealSrcLoc,
mkSrcLoc, noSrcLoc,
srcLocFile, srcLocLine, srcLocCol,
SrcSpan(..), RealSrcSpan,
mkSrcSpan, srcLocSpan, isGoodSrcSpan, noSrcSpan,
srcSpanStart, srcSpanEnd,
srcSpanFile,
srcSpanStartLine, srcSpanEndLine,
srcSpanStartCol, srcSpanEndCol,
-- ** Located
GenLocated(..), Located,
-- *** Constructing Located
noLoc, mkGeneralLocated,
-- *** Deconstructing Located
getLoc, unLoc,
-- *** Combining and comparing Located values
eqLocated, cmpLocated, combineLocs, addCLoc,
leftmost_smallest, leftmost_largest, rightmost,
spans, isSubspanOf,
-- * Exceptions
GhcException(..), showGhcException,
-- * Token stream manipulations
Token,
getTokenStream, getRichTokenStream,
showRichTokenStream, addSourceToTokens,
-- * Pure interface to the parser
parser,
-- * API Annotations
ApiAnns,AnnKeywordId(..),AnnotationComment(..),
getAnnotation, getAndRemoveAnnotation,
getAnnotationComments, getAndRemoveAnnotationComments,
-- * Miscellaneous
--sessionHscEnv,
cyclicModuleErr,
-- * Telemetry
startMetrics,
endMetrics
) where
{-
ToDo:
* inline bits of HscMain here to simplify layering: hscTcExpr, hscStmt.
* what StaticFlags should we expose, if any?
-}
#ifdef GHCI
import ETA.Interactive.ByteCodeInstr
import ETA.Main.BreakArray
import InteractiveEval
import TcRnDriver ( runTcInteractive )
#endif
import ETA.Main.PprTyThing ( pprFamInst )
import ETA.Main.HscMain
import ETA.Main.GhcMake
import ETA.Main.DriverPipeline ( compileOne' )
import ETA.Main.GhcMonad
import ETA.TypeCheck.TcRnMonad ( finalSafeMode )
import ETA.TypeCheck.TcRnTypes
import ETA.Main.Packages
import ETA.BasicTypes.NameSet
import ETA.BasicTypes.RdrName
import qualified ETA.HsSyn.HsSyn as HsSyn -- hack as we want to reexport the whole module
import ETA.HsSyn.HsSyn
import ETA.Types.Type hiding( typeKind )
import ETA.Types.Kind ( synTyConResKind )
import ETA.TypeCheck.TcType hiding( typeKind )
import ETA.BasicTypes.Id
import ETA.Prelude.TysPrim ( alphaTyVars )
import ETA.Types.TyCon
import ETA.Types.Class
import ETA.BasicTypes.DataCon
import ETA.BasicTypes.Name hiding ( varName )
import ETA.BasicTypes.Avail
import ETA.Types.InstEnv
import ETA.Types.FamInstEnv ( FamInst )
import ETA.BasicTypes.SrcLoc
import ETA.Core.CoreSyn
import ETA.Main.TidyPgm
import ETA.Main.DriverPhases ( Phase(..), isHaskellSrcFilename )
import ETA.Main.Finder
import ETA.Main.HscTypes
import ETA.Main.DynFlags
import ETA.Main.StaticFlags
import ETA.Main.SysTools
import ETA.Main.Annotations
import ETA.BasicTypes.Module
import ETA.Utils.UniqFM
import ETA.Utils.Metrics
import ETA.Utils.Json
import ETA.Utils.Panic
-- import ETA.Utils.Platform
import ETA.Utils.Bag ( unitBag )
import ETA.Main.ErrUtils
import ETA.Utils.MonadUtils
import ETA.Utils.Util
import ETA.Utils.StringBuffer
import ETA.Utils.Outputable
import ETA.BasicTypes.BasicTypes
import ETA.Utils.Maybes ( expectJust )
import ETA.Utils.FastString
import qualified ETA.Parser.Parser as Parser
import ETA.Parser.Lexer
import ETA.Parser.ApiAnnotation
import System.Directory ( doesFileExist )
import Data.Maybe
import Data.List ( find )
import Data.Time
import Data.Typeable ( Typeable )
import Data.Word ( Word8 )
import Control.Monad
import System.Exit ( exitWith, ExitCode(..) )
import ETA.Utils.Exception
import Data.IORef
import System.FilePath
import System.Directory
import System.IO
import Prelude hiding (init)
-- %************************************************************************
-- %* *
-- Initialisation: exception handlers
-- %* *
-- %************************************************************************
-- | Install some default exception handlers and run the inner computation.
-- Unless you want to handle exceptions yourself, you should wrap this around
-- the top level of your program. The default handlers output the error
-- message(s) to stderr and exit cleanly.
defaultErrorHandler :: (ExceptionMonad m, MonadIO m)
=> FatalMessager -> FlushOut -> m a -> m a
defaultErrorHandler fm (FlushOut flushOut) inner =
-- top-level exception handler: any unrecognised exception is a compiler bug.
ghandle (\exception -> liftIO $ do
endMetrics
flushOut
case fromException exception of
-- an IO exception probably isn't our fault, so don't panic
Just (ioe :: IOException) ->
fatalErrorMsg'' fm (show ioe)
_ -> case fromException exception of
Just UserInterrupt ->
-- Important to let this one propagate out so our
-- calling process knows we were interrupted by ^C
liftIO $ throwIO UserInterrupt
Just StackOverflow ->
fatalErrorMsg'' fm "stack overflow: use +RTS -K<size> to increase it"
_ -> case fromException exception of
Just (ex :: ExitCode) -> liftIO $ throwIO ex
_ ->
fatalErrorMsg'' fm
(show (Panic (show exception)))
exitWith (ExitFailure 1)
) $
-- error messages propagated as exceptions
handleGhcException
(\ge -> liftIO $ do
flushOut
case ge of
PhaseFailed _ code -> exitWith code
Signal _ -> exitWith (ExitFailure 1)
_ -> do fatalErrorMsg'' fm (show ge)
exitWith (ExitFailure 1)
) $
inner
-- | Install a default cleanup handler to remove temporary files deposited by
-- a GHC run. This is separate from 'defaultErrorHandler', because you might
-- want to override the error handling, but still get the ordinary cleanup
-- behaviour.
defaultCleanupHandler :: (ExceptionMonad m, MonadIO m) =>
DynFlags -> m a -> m a
defaultCleanupHandler dflags inner =
-- make sure we clean up after ourselves
inner `gfinally`
(liftIO $ do
cleanTempFiles dflags
cleanTempDirs dflags
)
-- exceptions will be blocked while we clean the temporary files,
-- so there shouldn't be any difficulty if we receive further
-- signals.
-- %************************************************************************
-- %* *
-- The Ghc Monad
-- %* *
-- %************************************************************************
-- | Run function for the 'Ghc' monad.
--
-- It initialises the GHC session and warnings via 'initGhcMonad'. Each call
-- to this function will create a new session which should not be shared among
-- several threads.
--
-- Any errors not handled inside the 'Ghc' action are propagated as IO
-- exceptions.
runGhc :: Maybe FilePath -- ^ See argument to 'initGhcMonad'.
-> Ghc a -- ^ The action to perform.
-> IO a
runGhc mb_top_dir ghc = do
ref <- newIORef (panic "empty session")
let session = Session ref
flip unGhc session $ do
initGhcMonad mb_top_dir
ghc
-- XXX: unregister interrupt handlers here?
-- | Run function for 'GhcT' monad transformer.
--
-- It initialises the GHC session and warnings via 'initGhcMonad'. Each call
-- to this function will create a new session which should not be shared among
-- several threads.
runGhcT :: (ExceptionMonad m, Functor m, MonadIO m) =>
Maybe FilePath -- ^ See argument to 'initGhcMonad'.
-> GhcT m a -- ^ The action to perform.
-> m a
runGhcT mb_top_dir ghct = do
ref <- liftIO $ newIORef (panic "empty session")
let session = Session ref
flip unGhcT session $ do
initGhcMonad mb_top_dir
ghct
-- | Initialise a GHC session.
--
-- If you implement a custom 'GhcMonad' you must call this function in the
-- monad run function. It will initialise the session variable and clear all
-- warnings.
--
-- The first argument should point to the directory where GHC's library files
-- reside. More precisely, this should be the output of @ghc --print-libdir@
-- of the version of GHC the module using this API is compiled with. For
-- portability, you should use the @ghc-paths@ package, available at
-- <http://hackage.haskell.org/package/ghc-paths>.
initGhcMonad :: GhcMonad m => Maybe FilePath -> m ()
initGhcMonad mb_top_dir
= do { env <- liftIO $
do { installSignalHandlers -- catch ^C
; initStaticOpts
; mySettings <- initSysTools mb_top_dir
; dflags <- initDynFlags (defaultDynFlags mySettings)
; setUnsafeGlobalDynFlags dflags
-- c.f. DynFlags.parseDynamicFlagsFull, which
-- creates DynFlags and sets the UnsafeGlobalDynFlags
; newHscEnv dflags }
; setSession env }
-- %************************************************************************
-- %* *
-- Flags & settings
-- %* *
-- %************************************************************************
-- $DynFlags
--
-- The GHC session maintains two sets of 'DynFlags':
--
-- * The "interactive" @DynFlags@, which are used for everything
-- related to interactive evaluation, including 'runStmt',
-- 'runDecls', 'exprType', 'lookupName' and so on (everything
-- under \"Interactive evaluation\" in this module).
--
-- * The "program" @DynFlags@, which are used when loading
-- whole modules with 'load'
--
-- 'setInteractiveDynFlags', 'getInteractiveDynFlags' work with the
-- interactive @DynFlags@.
--
-- 'setProgramDynFlags', 'getProgramDynFlags' work with the
-- program @DynFlags@.
--
-- 'setSessionDynFlags' sets both @DynFlags@, and 'getSessionDynFlags'
-- retrieves the program @DynFlags@ (for backwards compatibility).
-- | Updates both the interactive and program DynFlags in a Session.
-- This also reads the package database (unless it has already been
-- read), and prepares the compilers knowledge about packages. It can
-- be called again to load new packages: just add new package flags to
-- (packageFlags dflags).
--
-- Returns a list of new packages that may need to be linked in using
-- the dynamic linker (see 'linkPackages') as a result of new package
-- flags. If you are not doing linking or doing static linking, you
-- can ignore the list of packages returned.
--
setSessionDynFlags :: GhcMonad m => DynFlags -> m [InstalledUnitId]
setSessionDynFlags dflags = do
dflags' <- checkNewDynFlags dflags
(dflags'', preload) <- liftIO $ initPackages dflags'
modifySession $ \h -> h{ hsc_dflags = dflags''
, hsc_IC = (hsc_IC h){ ic_dflags = dflags'' } }
invalidateModSummaryCache
return preload
-- | Sets the program 'DynFlags'.
setProgramDynFlags :: GhcMonad m => DynFlags -> m [InstalledUnitId]
setProgramDynFlags dflags = do
dflags' <- checkNewDynFlags dflags
(dflags'', preload) <- liftIO $ initPackages dflags'
modifySession $ \h -> h{ hsc_dflags = dflags'' }
invalidateModSummaryCache
return preload
-- When changing the DynFlags, we want the changes to apply to future
-- loads, but without completely discarding the program. But the
-- DynFlags are cached in each ModSummary in the hsc_mod_graph, so
-- after a change to DynFlags, the changes would apply to new modules
-- but not existing modules; this seems undesirable.
--
-- Furthermore, the GHC API client might expect that changing
-- log_action would affect future compilation messages, but for those
-- modules we have cached ModSummaries for, we'll continue to use the
-- old log_action. This is definitely wrong (#7478).
--
-- Hence, we invalidate the ModSummary cache after changing the
-- DynFlags. We do this by tweaking the date on each ModSummary, so
-- that the next downsweep will think that all the files have changed
-- and preprocess them again. This won't necessarily cause everything
-- to be recompiled, because by the time we check whether we need to
-- recopmile a module, we'll have re-summarised the module and have a
-- correct ModSummary.
--
invalidateModSummaryCache :: GhcMonad m => m ()
invalidateModSummaryCache =
modifySession $ \h -> h { hsc_mod_graph = map inval (hsc_mod_graph h) }
where
inval ms = ms { ms_hs_date = addUTCTime (-1) (ms_hs_date ms) }
-- | Returns the program 'DynFlags'.
getProgramDynFlags :: GhcMonad m => m DynFlags
getProgramDynFlags = getSessionDynFlags
-- | Set the 'DynFlags' used to evaluate interactive expressions.
-- Note: this cannot be used for changes to packages. Use
-- 'setSessionDynFlags', or 'setProgramDynFlags' and then copy the
-- 'pkgState' into the interactive @DynFlags@.
setInteractiveDynFlags :: GhcMonad m => DynFlags -> m ()
setInteractiveDynFlags dflags = do
dflags' <- checkNewDynFlags dflags
modifySession $ \h -> h{ hsc_IC = (hsc_IC h) { ic_dflags = dflags' }}
-- | Get the 'DynFlags' used to evaluate interactive expressions.
getInteractiveDynFlags :: GhcMonad m => m DynFlags
getInteractiveDynFlags = withSession $ \h -> return (ic_dflags (hsc_IC h))
parseDynamicFlags :: MonadIO m =>
DynFlags -> [Located String]
-> m (DynFlags, [Located String], [Located String])
parseDynamicFlags = parseDynamicFlagsCmdLine
-- | Checks the set of new DynFlags for possibly erroneous option
-- combinations when invoking 'setSessionDynFlags' and friends, and if
-- found, returns a fixed copy (if possible).
checkNewDynFlags :: MonadIO m => DynFlags -> m DynFlags
checkNewDynFlags dflags = do
-- See Note [DynFlags consistency]
let (dflags', warnings) = makeDynFlagsConsistent dflags
liftIO $ handleFlagWarnings dflags warnings
return dflags'
-- %************************************************************************
-- %* *
-- Setting, getting, and modifying the targets
-- %* *
-- %************************************************************************
-- ToDo: think about relative vs. absolute file paths. And what
-- happens when the current directory changes.
-- | Sets the targets for this session. Each target may be a module name
-- or a filename. The targets correspond to the set of root modules for
-- the program\/library. Unloading the current program is achieved by
-- setting the current set of targets to be empty, followed by 'load'.
setTargets :: GhcMonad m => [Target] -> m ()
setTargets targets = modifySession (\h -> h{ hsc_targets = targets })
-- | Returns the current set of targets
getTargets :: GhcMonad m => m [Target]
getTargets = withSession (return . hsc_targets)
-- | Add another target.
addTarget :: GhcMonad m => Target -> m ()
addTarget target
= modifySession (\h -> h{ hsc_targets = target : hsc_targets h })
-- | Remove a target
removeTarget :: GhcMonad m => TargetId -> m ()
removeTarget target_id
= modifySession (\h -> h{ hsc_targets = filter (hsc_targets h) })
where
filter targets = [ t | t@(Target id _ _) <- targets, id /= target_id ]
-- | Attempts to guess what Target a string refers to. This function
-- implements the @--make@/GHCi command-line syntax for filenames:
--
-- - if the string looks like a Haskell source filename, then interpret it
-- as such
--
-- - if adding a .hs or .lhs suffix yields the name of an existing file,
-- then use that
--
-- - otherwise interpret the string as a module name
--
guessTarget :: GhcMonad m => String -> Maybe Phase -> m Target
guessTarget str (Just phase)
= return (Target (TargetFile str (Just phase)) True Nothing)
guessTarget str Nothing
| isHaskellSrcFilename file
= return (target (TargetFile file Nothing))
| otherwise
= do exists <- liftIO $ doesFileExist hs_file
if exists
then return (target (TargetFile hs_file Nothing))
else do
exists <- liftIO $ doesFileExist lhs_file
if exists
then return (target (TargetFile lhs_file Nothing))
else do
if looksLikeModuleName file
then return (target (TargetModule (mkModuleName file)))
else do
dflags <- getDynFlags
liftIO $ throwGhcExceptionIO
(ProgramError (showSDoc dflags $
text "target" <+> quotes (text file) <+>
text "is not a module name or a source file"))
where
(file,obj_allowed)
| '*':rest <- str = (rest, False)
| otherwise = (str, True)
hs_file = file <.> "hs"
lhs_file = file <.> "lhs"
target tid = Target tid obj_allowed Nothing
-- | Inform GHC that the working directory has changed. GHC will flush
-- its cache of module locations, since it may no longer be valid.
--
-- Note: Before changing the working directory make sure all threads running
-- in the same session have stopped. If you change the working directory,
-- you should also unload the current program (set targets to empty,
-- followed by load).
workingDirectoryChanged :: GhcMonad m => m ()
workingDirectoryChanged = withSession $ (liftIO . flushFinderCaches)
-- %************************************************************************
-- %* *
-- Running phases one at a time
-- %* *
-- %************************************************************************
class ParsedMod m where
modSummary :: m -> ModSummary
parsedSource :: m -> ParsedSource
class ParsedMod m => TypecheckedMod m where
renamedSource :: m -> Maybe RenamedSource
typecheckedSource :: m -> TypecheckedSource
moduleInfo :: m -> ModuleInfo
tm_internals :: m -> (TcGblEnv, ModDetails)
-- ToDo: improvements that could be made here:
-- if the module succeeded renaming but not typechecking,
-- we can still get back the GlobalRdrEnv and exports, so
-- perhaps the ModuleInfo should be split up into separate
-- fields.
class TypecheckedMod m => DesugaredMod m where
coreModule :: m -> ModGuts
-- | The result of successful parsing.
data ParsedModule =
ParsedModule { pm_mod_summary :: ModSummary
, pm_parsed_source :: ParsedSource
, pm_extra_src_files :: [FilePath]
, pm_annotations :: ApiAnns }
-- See Note [Api annotations] in ApiAnnotation.hs
instance ParsedMod ParsedModule where
modSummary m = pm_mod_summary m
parsedSource m = pm_parsed_source m
-- | The result of successful typechecking. It also contains the parser
-- result.
data TypecheckedModule =
TypecheckedModule { tm_parsed_module :: ParsedModule
, tm_renamed_source :: Maybe RenamedSource
, tm_typechecked_source :: TypecheckedSource
, tm_checked_module_info :: ModuleInfo
, tm_internals_ :: (TcGblEnv, ModDetails)
}
instance ParsedMod TypecheckedModule where
modSummary m = modSummary (tm_parsed_module m)
parsedSource m = parsedSource (tm_parsed_module m)
instance TypecheckedMod TypecheckedModule where
renamedSource m = tm_renamed_source m
typecheckedSource m = tm_typechecked_source m
moduleInfo m = tm_checked_module_info m
tm_internals m = tm_internals_ m
-- | The result of successful desugaring (i.e., translation to core). Also
-- contains all the information of a typechecked module.
data DesugaredModule =
DesugaredModule { dm_typechecked_module :: TypecheckedModule
, dm_core_module :: ModGuts
}
instance ParsedMod DesugaredModule where
modSummary m = modSummary (dm_typechecked_module m)
parsedSource m = parsedSource (dm_typechecked_module m)
instance TypecheckedMod DesugaredModule where
renamedSource m = renamedSource (dm_typechecked_module m)
typecheckedSource m = typecheckedSource (dm_typechecked_module m)
moduleInfo m = moduleInfo (dm_typechecked_module m)
tm_internals m = tm_internals_ (dm_typechecked_module m)
instance DesugaredMod DesugaredModule where
coreModule m = dm_core_module m
type ParsedSource = Located (HsModule RdrName)
type RenamedSource = (HsGroup Name, [LImportDecl Name], Maybe [LIE Name],
Maybe LHsDocString)
type TypecheckedSource = LHsBinds Id
-- NOTE:
-- - things that aren't in the output of the typechecker right now:
-- - the export list
-- - the imports
-- - type signatures
-- - type/data/newtype declarations
-- - class declarations
-- - instances
-- - extra things in the typechecker's output:
-- - default methods are turned into top-level decls.
-- - dictionary bindings
-- | Return the 'ModSummary' of a module with the given name.
--
-- The module must be part of the module graph (see 'hsc_mod_graph' and
-- 'ModuleGraph'). If this is not the case, this function will throw a
-- 'GhcApiError'.
--
-- This function ignores boot modules and requires that there is only one
-- non-boot module with the given name.
getModSummary :: GhcMonad m => ModuleName -> m ModSummary
getModSummary mod = do
mg <- liftM hsc_mod_graph getSession
case [ ms | ms <- mg, ms_mod_name ms == mod, not (isBootSummary ms) ] of
[] -> do dflags <- getDynFlags
liftIO $ throwIO $ mkApiErr dflags (text "Module not part of module graph")
[ms] -> return ms
multiple -> do dflags <- getDynFlags
liftIO $ throwIO $ mkApiErr dflags (text "getModSummary is ambiguous: " <+> ppr multiple)
-- | Parse a module.
--
-- Throws a 'SourceError' on parse error.
parseModule :: GhcMonad m => ModSummary -> m ParsedModule
parseModule ms = do
hsc_env <- getSession
let hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms }
hpm <- liftIO $ hscParse hsc_env_tmp ms
return (ParsedModule ms (hpm_module hpm) (hpm_src_files hpm)
(hpm_annotations hpm))
-- See Note [Api annotations] in ApiAnnotation.hs
-- | Typecheck and rename a parsed module.
--
-- Throws a 'SourceError' if either fails.
typecheckModule :: GhcMonad m => ParsedModule -> m TypecheckedModule
typecheckModule pmod = do
let ms = modSummary pmod
hsc_env <- getSession
let hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms }
(tc_gbl_env, rn_info)
<- liftIO $ hscTypecheckRename hsc_env_tmp ms $
HsParsedModule { hpm_module = parsedSource pmod,
hpm_src_files = pm_extra_src_files pmod,
hpm_annotations = pm_annotations pmod }
details <- liftIO $ makeSimpleDetails hsc_env_tmp tc_gbl_env
safe <- liftIO $ finalSafeMode (ms_hspp_opts ms) tc_gbl_env
return $
TypecheckedModule {
tm_internals_ = (tc_gbl_env, details),
tm_parsed_module = pmod,
tm_renamed_source = rn_info,
tm_typechecked_source = tcg_binds tc_gbl_env,
tm_checked_module_info =
ModuleInfo {
minf_type_env = md_types details,
minf_exports = availsToNameSet $ md_exports details,
minf_rdr_env = Just (tcg_rdr_env tc_gbl_env),
minf_instances = md_insts details,
minf_iface = Nothing,
minf_safe = safe
#ifdef GHCI
,minf_modBreaks = emptyModBreaks
#endif
}}
-- | Desugar a typechecked module.
desugarModule :: GhcMonad m => TypecheckedModule -> m DesugaredModule
desugarModule tcm = do
let ms = modSummary tcm
let (tcg, _) = tm_internals tcm
hsc_env <- getSession
let hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms }
guts <- liftIO $ hscDesugar hsc_env_tmp ms tcg
return $
DesugaredModule {
dm_typechecked_module = tcm,
dm_core_module = guts
}
-- | Load a module. Input doesn't need to be desugared.
--
-- A module must be loaded before dependent modules can be typechecked. This
-- always includes generating a 'ModIface' and, depending on the
-- 'DynFlags.hscTarget', may also include code generation.
--
-- This function will always cause recompilation and will always overwrite
-- previous compilation results (potentially files on disk).
--
loadModule :: (TypecheckedMod mod, GhcMonad m) => mod -> m mod
loadModule tcm = do
let ms = modSummary tcm
let mod = ms_mod_name ms
let loc = ms_location ms
let (tcg, _details) = tm_internals tcm
mb_linkable <- case ms_obj_date ms of
Just t | t > ms_hs_date ms -> do
l <- liftIO $ findObjectLinkable (ms_mod ms)
(ml_obj_file loc) t
return (Just l)
_otherwise -> return Nothing
let source_modified | isNothing mb_linkable = SourceModified
| otherwise = SourceUnmodified
-- we can't determine stability here
-- compile doesn't change the session
hsc_env <- getSession
mod_info <- liftIO $ compileOne' (Just tcg) Nothing
hsc_env ms 1 1 Nothing mb_linkable
source_modified
modifySession $ \e -> e{ hsc_HPT = addToUFM (hsc_HPT e) mod mod_info }
return tcm
-- %************************************************************************
-- %* *
-- Dealing with Core
-- %* *
-- %************************************************************************
-- | A CoreModule consists of just the fields of a 'ModGuts' that are needed for
-- the 'GHC.compileToCoreModule' interface.
data CoreModule
= CoreModule {
-- | Module name
cm_module :: !Module,
-- | Type environment for types declared in this module
cm_types :: !TypeEnv,
-- | Declarations
cm_binds :: CoreProgram,
-- | Safe Haskell mode
cm_safe :: SafeHaskellMode
}
instance Outputable CoreModule where
ppr (CoreModule {cm_module = mn, cm_types = te, cm_binds = cb,
cm_safe = sf})
= text "%module" <+> ppr mn <+> parens (ppr sf) <+> ppr te
$$ vcat (map ppr cb)
-- | This is the way to get access to the Core bindings corresponding
-- to a module. 'compileToCore' parses, typechecks, and
-- desugars the module, then returns the resulting Core module (consisting of
-- the module name, type declarations, and function declarations) if
-- successful.
compileToCoreModule :: GhcMonad m => FilePath -> m CoreModule
compileToCoreModule = compileCore False
-- | Like compileToCoreModule, but invokes the simplifier, so
-- as to return simplified and tidied Core.
compileToCoreSimplified :: GhcMonad m => FilePath -> m CoreModule
compileToCoreSimplified = compileCore True
compileCore :: GhcMonad m => Bool -> FilePath -> m CoreModule
compileCore simplify fn = do
-- First, set the target to the desired filename
target <- guessTarget fn Nothing
addTarget target
_ <- load LoadAllTargets
-- Then find dependencies
modGraph <- depanal [] True
case find ((== fn) . msHsFilePath) modGraph of
Just modSummary -> do
-- Now we have the module name;
-- parse, typecheck and desugar the module
mod_guts <- coreModule `fmap`
-- TODO: space leaky: call hsc* directly?
(desugarModule =<< typecheckModule =<< parseModule modSummary)
liftM (gutsToCoreModule (mg_safe_haskell mod_guts)) $
if simplify
then do
-- If simplify is true: simplify (hscSimplify), then tidy
-- (tidyProgram).
hsc_env <- getSession
simpl_guts <- liftIO $ hscSimplify hsc_env mod_guts
tidy_guts <- liftIO $ tidyProgram hsc_env simpl_guts
return $ Left tidy_guts
else
return $ Right mod_guts
Nothing -> panic "compileToCoreModule: target FilePath not found in\
module dependency graph"
where -- two versions, based on whether we simplify (thus run tidyProgram,
-- which returns a (CgGuts, ModDetails) pair, or not (in which case
-- we just have a ModGuts.
gutsToCoreModule :: SafeHaskellMode
-> Either (CgGuts, ModDetails) ModGuts
-> CoreModule
gutsToCoreModule safe_mode (Left (cg, md)) = CoreModule {
cm_module = cg_module cg,
cm_types = md_types md,
cm_binds = cg_binds cg,
cm_safe = safe_mode
}
gutsToCoreModule safe_mode (Right mg) = CoreModule {
cm_module = mg_module mg,
cm_types = typeEnvFromEntities (bindersOfBinds (mg_binds mg))
(mg_tcs mg)
(mg_fam_insts mg),
cm_binds = mg_binds mg,
cm_safe = safe_mode
}
-- %************************************************************************
-- %* *
-- Inspecting the session
-- %* *
-- %************************************************************************
-- | Get the module dependency graph.
getModuleGraph :: GhcMonad m => m ModuleGraph -- ToDo: DiGraph ModSummary
getModuleGraph = liftM hsc_mod_graph getSession
-- | Determines whether a set of modules requires Template Haskell.
--
-- Note that if the session's 'DynFlags' enabled Template Haskell when
-- 'depanal' was called, then each module in the returned module graph will
-- have Template Haskell enabled whether it is actually needed or not.
needsTemplateHaskell :: ModuleGraph -> Bool
needsTemplateHaskell ms =
any (xopt Opt_TemplateHaskell . ms_hspp_opts) ms
-- | Return @True@ <==> module is loaded.
isLoaded :: GhcMonad m => ModuleName -> m Bool
isLoaded m = withSession $ \hsc_env ->
return $! isJust (lookupUFM (hsc_HPT hsc_env) m)
-- | Return the bindings for the current interactive session.
getBindings :: GhcMonad m => m [TyThing]
getBindings = withSession $ \hsc_env ->
return $ icInScopeTTs $ hsc_IC hsc_env
-- | Return the instances for the current interactive session.
getInsts :: GhcMonad m => m ([ClsInst], [FamInst])
getInsts = withSession $ \hsc_env ->
return $ ic_instances (hsc_IC hsc_env)
getPrintUnqual :: GhcMonad m => m PrintUnqualified
getPrintUnqual = withSession $ \hsc_env ->
return (icPrintUnqual (hsc_dflags hsc_env) (hsc_IC hsc_env))
-- | Container for information about a 'Module'.
data ModuleInfo = ModuleInfo {
minf_type_env :: TypeEnv,
minf_exports :: NameSet, -- ToDo, [AvailInfo] like ModDetails?
minf_rdr_env :: Maybe GlobalRdrEnv, -- Nothing for a compiled/package mod
minf_instances :: [ClsInst],
minf_iface :: Maybe ModIface,
minf_safe :: SafeHaskellMode
#ifdef GHCI
,minf_modBreaks :: ModBreaks
#endif
}
-- We don't want HomeModInfo here, because a ModuleInfo applies
-- to package modules too.
-- | Request information about a loaded 'Module'
getModuleInfo :: GhcMonad m => Module -> m (Maybe ModuleInfo) -- XXX: Maybe X
getModuleInfo mdl = withSession $ \hsc_env -> do
let mg = hsc_mod_graph hsc_env
if mdl `elem` map ms_mod mg
then liftIO $ getHomeModuleInfo hsc_env mdl
else do
{- if isHomeModule (hsc_dflags hsc_env) mdl
then return Nothing
else -} liftIO $ getPackageModuleInfo hsc_env mdl
-- ToDo: we don't understand what the following comment means.
-- (SDM, 19/7/2011)
-- getPackageModuleInfo will attempt to find the interface, so
-- we don't want to call it for a home module, just in case there
-- was a problem loading the module and the interface doesn't
-- exist... hence the isHomeModule test here. (ToDo: reinstate)
getPackageModuleInfo :: HscEnv -> Module -> IO (Maybe ModuleInfo)
#ifdef GHCI
getPackageModuleInfo hsc_env mdl
= do eps <- hscEPS hsc_env
iface <- hscGetModuleInterface hsc_env mdl
let
avails = mi_exports iface
names = availsToNameSet avails
pte = eps_PTE eps
tys = [ ty | name <- concatMap availNames avails,
Just ty <- [lookupTypeEnv pte name] ]
--
return (Just (ModuleInfo {
minf_type_env = mkTypeEnv tys,
minf_exports = names,
minf_rdr_env = Just $! availsToGlobalRdrEnv (moduleName mdl) avails,
minf_instances = error "getModuleInfo: instances for package module unimplemented",
minf_iface = Just iface,
minf_safe = getSafeMode $ mi_trust iface,
minf_modBreaks = emptyModBreaks
}))
#else
-- bogusly different for non-GHCI (ToDo)
getPackageModuleInfo _hsc_env _mdl = do
return Nothing
#endif
getHomeModuleInfo :: HscEnv -> Module -> IO (Maybe ModuleInfo)
getHomeModuleInfo hsc_env mdl =
case lookupUFM (hsc_HPT hsc_env) (moduleName mdl) of
Nothing -> return Nothing
Just hmi -> do
let details = hm_details hmi
iface = hm_iface hmi
return (Just (ModuleInfo {
minf_type_env = md_types details,
minf_exports = availsToNameSet (md_exports details),
minf_rdr_env = mi_globals $! hm_iface hmi,
minf_instances = md_insts details,
minf_iface = Just iface,
minf_safe = getSafeMode $ mi_trust iface
#ifdef GHCI
,minf_modBreaks = getModBreaks hmi
#endif
}))
-- | The list of top-level entities defined in a module
modInfoTyThings :: ModuleInfo -> [TyThing]
modInfoTyThings minf = typeEnvElts (minf_type_env minf)
modInfoTopLevelScope :: ModuleInfo -> Maybe [Name]
modInfoTopLevelScope minf
= fmap (map gre_name . globalRdrEnvElts) (minf_rdr_env minf)
modInfoExports :: ModuleInfo -> [Name]
modInfoExports minf = nameSetElems $! minf_exports minf
-- | Returns the instances defined by the specified module.
-- Warning: currently unimplemented for package modules.
modInfoInstances :: ModuleInfo -> [ClsInst]
modInfoInstances = minf_instances
modInfoIsExportedName :: ModuleInfo -> Name -> Bool
modInfoIsExportedName minf name = elemNameSet name (minf_exports minf)
mkPrintUnqualifiedForModule :: GhcMonad m =>
ModuleInfo
-> m (Maybe PrintUnqualified) -- XXX: returns a Maybe X
mkPrintUnqualifiedForModule minf = withSession $ \hsc_env -> do
return (fmap (mkPrintUnqualified (hsc_dflags hsc_env)) (minf_rdr_env minf))
modInfoLookupName :: GhcMonad m =>
ModuleInfo -> Name
-> m (Maybe TyThing) -- XXX: returns a Maybe X
modInfoLookupName minf name = withSession $ \hsc_env -> do
case lookupTypeEnv (minf_type_env minf) name of
Just tyThing -> return (Just tyThing)
Nothing -> do
eps <- liftIO $ readIORef (hsc_EPS hsc_env)
return $! lookupType (hsc_dflags hsc_env)
(hsc_HPT hsc_env) (eps_PTE eps) name
modInfoIface :: ModuleInfo -> Maybe ModIface
modInfoIface = minf_iface
-- | Retrieve module safe haskell mode
modInfoSafe :: ModuleInfo -> SafeHaskellMode
modInfoSafe = minf_safe
#ifdef GHCI
modInfoModBreaks :: ModuleInfo -> ModBreaks
modInfoModBreaks = minf_modBreaks
#endif
isDictonaryId :: Id -> Bool
isDictonaryId id
= case tcSplitSigmaTy (idType id) of { (_tvs, _theta, tau) -> isDictTy tau }
-- | Looks up a global name: that is, any top-level name in any
-- visible module. Unlike 'lookupName', lookupGlobalName does not use
-- the interactive context, and therefore does not require a preceding
-- 'setContext'.
lookupGlobalName :: GhcMonad m => Name -> m (Maybe TyThing)
lookupGlobalName name = withSession $ \hsc_env -> do
liftIO $ lookupTypeHscEnv hsc_env name
findGlobalAnns :: (GhcMonad m, Typeable a) => ([Word8] -> a) -> AnnTarget Name -> m [a]
findGlobalAnns deserialize target = withSession $ \hsc_env -> do
ann_env <- liftIO $ prepareAnnotations hsc_env Nothing
return (findAnns deserialize ann_env target)
#ifdef GHCI
-- | get the GlobalRdrEnv for a session
getGRE :: GhcMonad m => m GlobalRdrEnv
getGRE = withSession $ \hsc_env-> return $ ic_rn_gbl_env (hsc_IC hsc_env)
#endif
-- -----------------------------------------------------------------------------
{- ToDo: Move the primary logic here to compiler/main/Packages.lhs
-- | Return all /external/ modules available in the package database.
-- Modules from the current session (i.e., from the 'HomePackageTable') are
-- not included. This includes module names which are reexported by packages.
packageDbModules :: GhcMonad m =>
Bool -- ^ Only consider exposed packages.
-> m [Module]
packageDbModules only_exposed = do
dflags <- getSessionDynFlags
let pkgs = eltsUFM (pkgIdMap (pkgState dflags))
return $
[ mkModule pid modname
| p <- pkgs
, not only_exposed || exposed p
, let pid = packageConfigId p
, modname <- exposedModules p
++ map exportName (reexportedModules p) ]
-}
-- -----------------------------------------------------------------------------
-- Misc exported utils
dataConType :: DataCon -> Type
dataConType dc = idType (dataConWrapId dc)
-- | print a 'NamedThing', adding parentheses if the name is an operator.
pprParenSymName :: NamedThing a => a -> SDoc
pprParenSymName a = parenSymOcc (getOccName a) (ppr (getName a))
-- ----------------------------------------------------------------------------
#if 0
-- ToDo:
-- - Data and Typeable instances for HsSyn.
-- ToDo: check for small transformations that happen to the syntax in
-- the typechecker (eg. -e ==> negate e, perhaps for fromIntegral)
-- ToDo: maybe use TH syntax instead of IfaceSyn? There's already a way
-- to get from TyCons, Ids etc. to TH syntax (reify).
-- :browse will use either lm_toplev or inspect lm_interface, depending
-- on whether the module is interpreted or not.
#endif
-- Extract the filename, stringbuffer content and dynflags associed to a module
--
-- XXX: Explain pre-conditions
getModuleSourceAndFlags :: GhcMonad m => Module -> m (String, StringBuffer, DynFlags)
getModuleSourceAndFlags mod = do
m <- getModSummary (moduleName mod)
case ml_hs_file $ ms_location m of
Nothing -> do dflags <- getDynFlags
liftIO $ throwIO $ mkApiErr dflags (text "No source available for module " <+> ppr mod)
Just sourceFile -> do
source <- liftIO $ hGetStringBuffer sourceFile
return (sourceFile, source, ms_hspp_opts m)
-- | Return module source as token stream, including comments.
--
-- The module must be in the module graph and its source must be available.
-- Throws a 'HscTypes.SourceError' on parse error.
getTokenStream :: GhcMonad m => Module -> m [Located Token]
getTokenStream mod = do
(sourceFile, source, flags) <- getModuleSourceAndFlags mod
let startLoc = mkRealSrcLoc (mkFastString sourceFile) 1 1
case lexTokenStream source startLoc flags of
POk _ ts -> return ts
PFailed span err ->
do dflags <- getDynFlags
liftIO $ throwIO $ mkSrcErr (unitBag $ mkPlainErrMsg dflags span err)
-- | Give even more information on the source than 'getTokenStream'
-- This function allows reconstructing the source completely with
-- 'showRichTokenStream'.
getRichTokenStream :: GhcMonad m => Module -> m [(Located Token, String)]
getRichTokenStream mod = do
(sourceFile, source, flags) <- getModuleSourceAndFlags mod
let startLoc = mkRealSrcLoc (mkFastString sourceFile) 1 1
case lexTokenStream source startLoc flags of
POk _ ts -> return $ addSourceToTokens startLoc source ts
PFailed span err ->
do dflags <- getDynFlags
liftIO $ throwIO $ mkSrcErr (unitBag $ mkPlainErrMsg dflags span err)
-- | Given a source location and a StringBuffer corresponding to this
-- location, return a rich token stream with the source associated to the
-- tokens.
addSourceToTokens :: RealSrcLoc -> StringBuffer -> [Located Token]
-> [(Located Token, String)]
addSourceToTokens _ _ [] = []
addSourceToTokens loc buf (t@(L span _) : ts)
= case span of
UnhelpfulSpan _ -> (t,"") : addSourceToTokens loc buf ts
RealSrcSpan s -> (t,str) : addSourceToTokens newLoc newBuf ts
where
(newLoc, newBuf, str) = go "" loc buf
start = realSrcSpanStart s
end = realSrcSpanEnd s
go acc loc buf | loc < start = go acc nLoc nBuf
| start <= loc && loc < end = go (ch:acc) nLoc nBuf
| otherwise = (loc, buf, reverse acc)
where (ch, nBuf) = nextChar buf
nLoc = advanceSrcLoc loc ch
-- | Take a rich token stream such as produced from 'getRichTokenStream' and
-- return source code almost identical to the original code (except for
-- insignificant whitespace.)
showRichTokenStream :: [(Located Token, String)] -> String
showRichTokenStream ts = go startLoc ts ""
where sourceFile = getFile $ map (getLoc . fst) ts
getFile [] = panic "showRichTokenStream: No source file found"
getFile (UnhelpfulSpan _ : xs) = getFile xs
getFile (RealSrcSpan s : _) = srcSpanFile s
startLoc = mkRealSrcLoc sourceFile 1 1
go _ [] = id
go loc ((L span _, str):ts)
= case span of
UnhelpfulSpan _ -> go loc ts
RealSrcSpan s
| locLine == tokLine -> ((replicate (tokCol - locCol) ' ') ++)
. (str ++)
. go tokEnd ts
| otherwise -> ((replicate (tokLine - locLine) '\n') ++)
. ((replicate (tokCol - 1) ' ') ++)
. (str ++)
. go tokEnd ts
where (locLine, locCol) = (srcLocLine loc, srcLocCol loc)
(tokLine, tokCol) = (srcSpanStartLine s, srcSpanStartCol s)
tokEnd = realSrcSpanEnd s
-- -----------------------------------------------------------------------------
-- Interactive evaluation
-- | Takes a 'ModuleName' and possibly a 'UnitId', and consults the
-- filesystem and package database to find the corresponding 'Module',
-- using the algorithm that is used for an @import@ declaration.
findModule :: GhcMonad m => ModuleName -> Maybe FastString -> m Module
findModule mod_name maybe_pkg = withSession $ \hsc_env -> do
let
dflags = hsc_dflags hsc_env
this_pkg = thisPackage dflags
--
case maybe_pkg of
Just pkg | fsToUnitId pkg /= this_pkg && pkg /= fsLit "this" -> liftIO $ do
res <- findImportedModule hsc_env mod_name maybe_pkg
case res of
Found _ m -> return m
err -> throwOneError $ noModError dflags noSrcSpan mod_name err
_otherwise -> do
home <- lookupLoadedHomeModule mod_name
case home of
Just m -> return m
Nothing -> liftIO $ do
res <- findImportedModule hsc_env mod_name maybe_pkg
case res of
Found loc m | moduleUnitId m /= this_pkg -> return m
| otherwise -> modNotLoadedError dflags m loc
err -> throwOneError $ noModError dflags noSrcSpan mod_name err
modNotLoadedError :: DynFlags -> Module -> ModLocation -> IO a
modNotLoadedError dflags m loc = throwGhcExceptionIO $ CmdLineError $ showSDoc dflags $
text "module is not loaded:" <+>
quotes (ppr (moduleName m)) <+>
parens (text (expectJust "modNotLoadedError" (ml_hs_file loc)))
-- | Like 'findModule', but differs slightly when the module refers to
-- a source file, and the file has not been loaded via 'load'. In
-- this case, 'findModule' will throw an error (module not loaded),
-- but 'lookupModule' will check to see whether the module can also be
-- found in a package, and if so, that package 'Module' will be
-- returned. If not, the usual module-not-found error will be thrown.
--
lookupModule :: GhcMonad m => ModuleName -> Maybe FastString -> m Module
lookupModule mod_name (Just pkg) = findModule mod_name (Just pkg)
lookupModule mod_name Nothing = withSession $ \hsc_env -> do
home <- lookupLoadedHomeModule mod_name
case home of
Just m -> return m
Nothing -> liftIO $ do
res <- findExposedPackageModule hsc_env mod_name Nothing
case res of
Found _ m -> return m
err -> throwOneError $ noModError (hsc_dflags hsc_env) noSrcSpan mod_name err
lookupLoadedHomeModule :: GhcMonad m => ModuleName -> m (Maybe Module)
lookupLoadedHomeModule mod_name = withSession $ \hsc_env ->
case lookupUFM (hsc_HPT hsc_env) mod_name of
Just mod_info -> return (Just (mi_module (hm_iface mod_info)))
_not_a_home_module -> return Nothing
#ifdef GHCI
-- | Check that a module is safe to import (according to Safe Haskell).
--
-- We return True to indicate the import is safe and False otherwise
-- although in the False case an error may be thrown first.
isModuleTrusted :: GhcMonad m => Module -> m Bool
isModuleTrusted m = withSession $ \hsc_env ->
liftIO $ hscCheckSafe hsc_env m noSrcSpan
-- | Return if a module is trusted and the pkgs it depends on to be trusted.
moduleTrustReqs :: GhcMonad m => Module -> m (Bool, [UnitId])
moduleTrustReqs m = withSession $ \hsc_env ->
liftIO $ hscGetSafe hsc_env m noSrcSpan
-- | EXPERIMENTAL: DO NOT USE.
--
-- Set the monad GHCi lifts user statements into.
--
-- Checks that a type (in string form) is an instance of the
-- @GHC.GHCi.GHCiSandboxIO@ type class. Sets it to be the GHCi monad if it is,
-- throws an error otherwise.
{-# WARNING setGHCiMonad "This is experimental! Don't use." #-}
setGHCiMonad :: GhcMonad m => String -> m ()
setGHCiMonad name = withSession $ \hsc_env -> do
ty <- liftIO $ hscIsGHCiMonad hsc_env name
modifySession $ \s ->
let ic = (hsc_IC s) { ic_monad = ty }
in s { hsc_IC = ic }
getHistorySpan :: GhcMonad m => History -> m SrcSpan
getHistorySpan h = withSession $ \hsc_env ->
return $ InteractiveEval.getHistorySpan hsc_env h
obtainTermFromVal :: GhcMonad m => Int -> Bool -> Type -> a -> m Term
obtainTermFromVal bound force ty a = withSession $ \hsc_env ->
liftIO $ InteractiveEval.obtainTermFromVal hsc_env bound force ty a
obtainTermFromId :: GhcMonad m => Int -> Bool -> Id -> m Term
obtainTermFromId bound force id = withSession $ \hsc_env ->
liftIO $ InteractiveEval.obtainTermFromId hsc_env bound force id
#endif
-- | Returns the 'TyThing' for a 'Name'. The 'Name' may refer to any
-- entity known to GHC, including 'Name's defined using 'runStmt'.
lookupName :: GhcMonad m => Name -> m (Maybe TyThing)
lookupName name =
withSession $ \hsc_env ->
liftIO $ hscTcRcLookupName hsc_env name
-- -----------------------------------------------------------------------------
-- Pure API
-- | A pure interface to the module parser.
--
parser :: String -- ^ Haskell module source text (full Unicode is supported)
-> DynFlags -- ^ the flags
-> FilePath -- ^ the filename (for source locations)
-> Either ErrorMessages (WarningMessages, Located (HsModule RdrName))
parser str dflags filename =
let
loc = mkRealSrcLoc (mkFastString filename) 1 1
buf = stringToStringBuffer str
in
case unP Parser.parseModule (mkPState dflags buf loc) of
PFailed span err ->
Left (unitBag (mkPlainErrMsg dflags span err))
POk pst rdr_module ->
let (warns,_) = getMessages pst in
Right (warns, rdr_module)
-- Telemetry API
eventsLog :: IO FilePath
eventsLog = fmap (</> "metrics" </> "events.log") $ findTopDir Nothing
startMetrics :: Mode -> IO ()
startMetrics mode = do
startTime <- liftIO $ getCurrentTime
modifyIORef (metrics unsafeGlobalDynFlags)
(const (Just $ emptyMetrics { metStartTime = startTime
, metMode = mode }))
endMetrics :: IO ()
endMetrics = do
endTime <- getCurrentTime
modifyIORef (metrics unsafeGlobalDynFlags)
(\may -> fmap (\m -> m { metEndTime = endTime }) may)
saveMetrics
saveMetrics :: IO ()
saveMetrics = do
mMetrics <- readIORef $ metrics unsafeGlobalDynFlags
case mMetrics of
Just metrics -> do
fp <- eventsLog
exists <- doesFileExist fp
let payload = (showSDocUnsafe . renderJSON $ json metrics) ++ "\n"
if exists
then appendFile fp payload
else do
createDirectoryIfMissing True $ takeDirectory fp
writeFile fp payload
Nothing -> return ()
| pparkkin/eta | compiler/ETA/Main/GHC.hs | bsd-3-clause | 56,273 | 4 | 28 | 15,184 | 10,181 | 5,512 | 4,669 | -1 | -1 |
module Algebra.Structures.UFD
( module Algebra.Structures.IntegrallyClosedDomain
, UFD(..)
) where
import Algebra.Structures.IntegrallyClosedDomain
class IntegrallyClosedDomain a => UFD a
| Alex128/abstract-math | src/Algebra/Structures/UFD.hs | bsd-3-clause | 204 | 0 | 6 | 33 | 43 | 26 | 17 | -1 | -1 |
module Data.Bool.Kleisli (
allM,
anyM,
orM,
andM,
kleisify,
(<.>)
) where
import Control.Monad
import Control.Monad.Trans.Maybe
import Data.Maybe
boolToMaybe :: Monad m => Bool -> m (Maybe ())
boolToMaybe True = return $ Just ()
boolToMaybe False = return Nothing
infixr 4 <.>
(<.>) :: (Applicative m) => (b -> c) -> (a -> m b) -> (a -> m c)
(<.>) f g = (<*>) (pure f) . ($) g
kleisify f = return . ($) f
helper bs a f = fmap (f . isJust) . runMaybeT . mapM_ (MaybeT . ($ a) . (<=<) (boolToMaybe . f)) $ bs
allM :: (Functor m, Monad m) => [a -> m Bool] -> a -> m Bool
allM bs a = helper bs a id
anyM :: (Functor m, Monad m) => [a -> m Bool] -> a -> m Bool
anyM bs a = helper bs a not
andM :: (Functor m, Monad m) => (a -> m Bool) -> (a -> m Bool) -> a -> m Bool
andM m1 m2 = allM [m1, m2]
orM :: (Functor m, Monad m) => (a -> m Bool) -> (a -> m Bool) -> a -> m Bool
orM m1 m2 = anyM [m1, m2]
| AnkalagonBlack/bool-kleisli | src/Data/Bool/Kleisli.hs | bsd-3-clause | 906 | 0 | 12 | 219 | 534 | 283 | 251 | 26 | 1 |
module RefacConDef (subConstantDef) where
import PrettyPrint
import PosSyntax
import AbstractIO
import Maybe
import TypedIds
import UniqueNames hiding (srcLoc)
import PNT
import TiPNT
import List
import RefacUtils
import PFE0 (findFile)
import MUtils (( # ))
-- folding against a constant definition
subConstantDef args
= do let fileName = args!!0
beginRow = read (args!!1)::Int
beginCol = read (args!!2)::Int
endRow = read (args!!3)::Int
endCol = read (args!!4)::Int
AbstractIO.putStrLn "subConstantDef"
(inscps, exps, mod, tokList) <- parseSourceFile fileName
let (pnt,subExp) = findDefNameAndExp tokList (beginRow, beginCol) (endRow, endCol) mod
if isPatBind pnt
then do
-- constant folding...
else do
-- function folding
let exp = locToExp (beginRow, beginCol) (endRow, endCol) tokList mod
(mod', ((tokList', m) , _)) <- runStateT (doSubstitution pnt exp mod) ((tokList,False),( 0, 0))
writeRefactoredFiles False [((fileName, m), (tokList', mod'))]
findDefNameAndExp toks beginPos endPos t
= fromMaybe (defaultPNT, defaultExp) (applyTU (once_tdTU (failTU `adhocTU` inMatch
`adhocTU` inPat)) t) --CAN NOT USE 'once_tdTU' here.
where --The selected sub-expression is in the rhs of a match
inMatch (match@(HsMatch loc1 pnt pats rhs ds)::HsMatchP)
| locToExp beginPos endPos toks rhs /= defaultExp
= Just (pnt, locToExp beginPos endPos toks rhs)
inMatch _ = Nothing
--The selected sub-expression is in the rhs of a pattern-binding
inPat (pat@(Dec (HsPatBind loc1 ps rhs ds))::HsDeclP)
| locToExp beginPos endPos toks rhs /= defaultExp
= if isSimplePatBind pat
then Just (patToPNT ps, locToExp beginPos endPos toks rhs)
else error "A complex pattern binding can not be generalised!"
inPat _ = Nothing
doSubstitution p e = applyTP (stop_tdTP (failTP `adhocTP` subExp))
where
subExp exp@((Exp _)::HsExpP)
| sameOccurrence exp e == False = if toRelativeLocs e == toRelativeLocs exp then
update exp (createFunc p) exp
else
mzero
| otherwise = mzero
createFunc pat =
(Exp (HsId (HsVar pat)))
| forste/haReFork | refactorer/RefacConDef.hs | bsd-3-clause | 2,849 | 0 | 14 | 1,146 | 726 | 391 | 335 | -1 | -1 |
import Control.Applicative
import qualified Data.ByteString as BS
import Data.MessagePack
main = do
sb <- newSimpleBuffer
pc <- newPacker sb
pack pc [1,2,3::Int]
pack pc True
pack pc "hoge"
bs <- simpleBufferData sb
os <- unpackObjectsFromString bs
mapM_ print os
| tanakh/hsmsgpack | test/Stream.hs | bsd-3-clause | 284 | 0 | 8 | 59 | 105 | 50 | 55 | 12 | 1 |
module Style.ParserSpec (spec) where
import qualified Data.Map as M
import Test.Hspec
import Style.Color
import Style.Types
import Style.Parser
style :: [(PropKey, PropVal)] -> Style
style = Style . M.fromAscList
testRed s = parseStyle ("background-color:" ++ s) `shouldBe` style [(BackgroundColor, red)]
spec :: Spec
spec = do
describe "parseStyle" $ do
describe "colors" $ do
mapM_ (\(name, val) -> it name $ testRed val) [
("can parse color names", "red")
, ("can parse three-digit hex codes", "#f00")
, ("can parse six-digit hex codes", "#ff0000")
, ("can parse rgb() values", "rgb(255,0,0)")
, ("can parse rgb() values clipped above", "rgb(300,0,0)")
, ("can parse rgb() values clipped below", "rgb(255,-10,0)")
, ("can parse rgb() values with percentages", "rgb(110%, 0%, 0%)")
]
it "can't parse rgb() values with mixed units" $ do
parseStyle "background-color:rgb(0,100%,7)" `shouldBe` style []
it "can parse pixels" $ do
parseStyle "width: 500px" `shouldBe` style [(Width, NumUnit 500 Px)]
it "can parse percentages" $ do
parseStyle "width: 100%" `shouldBe` style [(Width, NumUnit 100 Percent)]
| forestbelton/orb | test/Style/ParserSpec.hs | bsd-3-clause | 1,317 | 0 | 18 | 374 | 334 | 184 | 150 | 27 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.XML.Pickle.Basic where
import Control.Applicative
import Control.Monad (ap)
import qualified Control.Category as Cat
import Control.Exception (Exception)
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Typeable
import Data.XML.Types
data PU t a = PU
{ unpickleTree :: t -> UnpickleResult t a
, pickleTree :: a -> t
}
data UnpickleResult t a = UnpickleError UnpickleError
| NoResult Text -- ^ Not found, description of element
| Result a (Maybe t) -- ^ Result and remainder. The
-- remainder is wrapped in @Maybe@
-- to avoid a @Monoid@ constraint
-- on @t@
--
-- /Invariant/: When @t@ is a
-- @Monoid@, the empty remainder
-- should always be @Nothing@
-- instead of @Just mempty@
deriving (Functor, Show)
instance Applicative (UnpickleResult t) where
pure = return
(<*>) = ap
instance Monad (UnpickleResult t) where
return x = Result x Nothing
Result x r >>= f = case f x of
Result y r' -> Result y (if isJust r then r else r')
y -> y
UnpickleError e >>= _ = UnpickleError e
NoResult e >>= _ = NoResult e
data UnpickleError = ErrorMessage Text
| TraceStep (Text, Text) UnpickleError
| Variants [UnpickleError]
deriving (Show, Typeable)
instance Exception UnpickleError
infixl 6 <++>
(<++>) :: (Text, Text) -> UnpickleError -> UnpickleError
(<++>) = TraceStep
mapUnpickleError :: (UnpickleError -> UnpickleError)
-> UnpickleResult t a
-> UnpickleResult t a
mapUnpickleError f (UnpickleError e) = UnpickleError $ f e
mapUnpickleError _ x = x
missing :: String -> UnpickleError
missing e = upe $ "Entity not found: " ++ e
missingE :: String -> UnpickleResult t a
missingE = UnpickleError . missing
upe :: String -> UnpickleError
upe e = ErrorMessage (Text.pack e)
showTr :: (Text, Text) -> String
showTr (name, "") = Text.unpack name
showTr (name, extra) = concat [Text.unpack name , " (", Text.unpack extra, ")"]
printUPE :: UnpickleError -> [String]
printUPE (ErrorMessage m) = [Text.unpack m]
printUPE (Variants vs) = concat
. zipWith (:) (map (\x -> show x ++ ")") [(1 :: Int)..])
. map (map ( " " ++))
$ (printUPE <$> vs)
printUPE (TraceStep t es) =
let (n, es') = collapsSteps t es
in ("-> " ++ showTr t
++ if n > 0
then (" [x" ++ show (n+1) ++"]" )
else "")
: printUPE es'
where
collapsSteps t (TraceStep t' ns) | t == t'
= let (n, ns') = collapsSteps t ns
in (n+1, ns')
collapsSteps _ es = (0, es)
ppUnpickleError :: UnpickleError -> String
ppUnpickleError e = "Error while unpickling:\n"
++ unlines (map (" " ++) (printUPE e))
leftoverE :: String -> UnpickleResult t a
leftoverE l = UnpickleError . upe $ "Leftover Entities" ++ if null l then "" else
": " ++ l
child :: Show a => PU a b -> a -> UnpickleResult t b
child xp v = case unpickleTree xp v of
UnpickleError e -> UnpickleError e
NoResult e -> missingE $ Text.unpack e
Result _ (Just es) -> leftoverE $ show es
Result r Nothing -> Result r Nothing
child' :: PU t a -> t -> UnpickleResult t1 a
child' xp v = case unpickleTree xp v of
UnpickleError e -> UnpickleError e
NoResult e -> missingE $ Text.unpack e
Result _ (Just _es) -> leftoverE ""
Result r Nothing -> Result r Nothing
leftover :: Maybe t -> UnpickleResult t ()
leftover = Result ()
remList :: [t] -> Maybe [t]
remList [] = Nothing
remList xs = Just xs
mapError :: (UnpickleError -> UnpickleError) -> PU t a -> PU t a
mapError f xp = PU { unpickleTree = mapUnpickleError f . unpickleTree xp
, pickleTree = pickleTree xp
}
infixl 6 <++.>
(<++.>) :: (Text, Text) -> UnpickleResult t a -> UnpickleResult t a
(<++.>) s = mapUnpickleError (s <++>)
infixr 0 <?>
-- | Override the last backtrace level in the error report.
(<?>) :: (Text, Text) -> PU t a -> PU t a
(<?>) tr = mapError (swapStack tr)
where
swapStack ns (TraceStep _s e) = TraceStep ns e
swapStack _ns e = error $ "Can't replace non-trace step: " ++ show e
(<??>) :: Text -> PU t a -> PU t a
(<??>) tr = mapError (swapStack tr)
where
swapStack ns (TraceStep (_,s) e) = TraceStep (ns,s) e
swapStack _ns e = error $ "Can't replace non-trace step: " ++ show e
infixr 1 <?+>
-- | Add a back trace level to the error report.
(<?+>) :: (Text, Text) -> PU t a -> PU t a
(<?+>) tr = mapError (tr <++>)
data UnresolvedEntityException = UnresolvedEntityException
deriving (Typeable, Show)
instance Exception UnresolvedEntityException
ppName :: Name -> String
ppName (Name local ns pre) = let
ns' = case ns of
Nothing -> []
Just ns'' -> ["{", Text.unpack ns'',"}"]
pre' = case pre of
Nothing -> []
Just pre'' -> [Text.unpack pre'',":"]
in concat . concat $ [["\""],ns', pre', [Text.unpack local], ["\""]]
instance Cat.Category PU where
id = PU (\t -> Result t Nothing) id
g . f = PU { pickleTree = pickleTree f . pickleTree g
, unpickleTree = \val -> case unpickleTree f val of
UnpickleError e -> UnpickleError e
NoResult e -> NoResult e
Result resf re -> case unpickleTree g resf of
UnpickleError e -> UnpickleError e
NoResult e -> NoResult e
Result _ (Just _) -> leftoverE ""
Result resg Nothing -> Result resg re
}
| Philonous/xml-picklers | src/Data/XML/Pickle/Basic.hs | bsd-3-clause | 6,268 | 0 | 17 | 2,133 | 2,016 | 1,048 | 968 | 133 | 4 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
-- | 'HeaderName's define semantics for 'Text' values seen in HTTP headers
-- over the wire. This module provides classes to map both to and from
-- these reprsentations.
module Network.HTTP.Kinder.Header.Serialization (
-- * Classes for encoding and decoding
HeaderEncode (..)
, HeaderDecode (..)
-- ** Listing constraints to type-level lists
, AllHeaderEncodes
, AllHeaderDecodes
-- * Extra serialization utilities
, headerEncodePair
, headerEncodeBS
, headerDecodeBS
-- * Utilities for writing serialization instances
, displaySetOpt
, uniqueSet
, required
, withDefault
) where
import qualified Data.ByteString as S
import Data.CaseInsensitive (CI)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Singletons
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import Data.Time
import GHC.Exts
import Network.HTTP.Kinder.Common
import Network.HTTP.Kinder.Header.Definitions
import Network.HTTP.Kinder.Verb
import Network.HTTP.Media (MediaType, Quality)
import qualified Network.HTTP.Media as Media
-- | Determines a 'Text' representation for some value to be encoded as
-- a value of a given 'HeaderName'. Any proxy can be passed as the first
-- argument, although 'Sing' is a nice one to choose. Encodings may choose
-- to not be represented on the wire at all as desired by returning
-- 'Nothing'. This implies default behavior.
class HeaderEncode (n :: HeaderName) a where
headerEncode :: sing n -> a -> Maybe Text
-- | For a given concrete type @a@, a list of pairs @ts@ satisfies
-- @'AllHeaderEncode' a ts@ if each @(n, a)@ in @ts@ has @'HeaderEncode'
-- n a@.
type family AllHeaderEncodes hs :: Constraint where
AllHeaderEncodes '[] = ()
AllHeaderEncodes ( '(n, a) ': hs ) = (HeaderEncode n a, AllHeaderEncodes hs)
-- | Encode a 'HeaderName' singleton and a 'HeaderEncode'-represented value
-- as a pair of name and representation, ready to be sent over the wire.
headerEncodePair
:: forall a (n :: HeaderName)
. HeaderEncode n a => Sing n -> a -> Maybe (CI S.ByteString, S.ByteString)
headerEncodePair s a = do
bs <- headerEncodeBS s a
return (headerName s, bs)
-- | While the semantics of HTTP headers are built off of 'Text'-like
-- values, usually we require a 'S.ByteString' for emission. This helper
-- function converts a header value directly to a 'S.ByteString'.
headerEncodeBS :: HeaderEncode n a => sing n -> a -> Maybe S.ByteString
headerEncodeBS s = fmap Text.encodeUtf8 . headerEncode s
-- | Interprets a (possibly missing) 'Text' representation for some value
-- taking semantics at a given 'HeaderName'. Any proxy can be passed as the
-- first argument, although 'Sing' is a nice one to choose. If a value is
-- expected and no representation is provided then 'Nothing' can be passed
-- seeking a default value (should one exist).
class HeaderDecode (n :: HeaderName) a where
headerDecode :: sing n -> Maybe Text -> Either String a
-- | For a given concrete type @a@, a list of pairs @ts@ satisfies
-- @'AllHeaderDecode' a ts@ if each @(n, a)@ in @ts@ has @'HeaderDecode'
-- n a@.
type family AllHeaderDecodes hs :: Constraint where
AllHeaderDecodes '[] = ()
AllHeaderDecodes ( '(n, a) ': hs ) = (HeaderDecode n a, AllHeaderDecodes hs)
-- | While HTTP header semantics are built off of 'Text'-like values, we
-- usually read a raw 'S.ByteString' from the wire. This helper function
-- combines a 'HeaderDecode' with a UTF-8 decode so as to attempt to
-- deserialize header values directly from a 'S.ByteString'.
headerDecodeBS :: HeaderDecode n a => sing n -> Maybe S.ByteString -> Either String a
headerDecodeBS proxy mays =
case mays of
Nothing -> headerDecode proxy Nothing
Just s ->
case Text.decodeUtf8' s of
Left err -> Left (show err)
Right t -> headerDecode proxy (Just t)
-- Instances/Encoding
-- ----------------------------------------------------------------------------
-- | Output a set of text items as a comma-delimited list OR return nothing
-- if the set is empty
displaySetOpt :: Set Text -> Maybe Text
displaySetOpt s
| Set.null s = Nothing
| otherwise = Just (Text.intercalate "," (Set.toList s))
-- | Extend a 'HeaderEncode' instance on @'Set' v@ to @[v]@.
uniqueSet :: (Ord v, HeaderEncode n (Set v)) => sing n -> [v] -> Maybe Text
uniqueSet s = headerEncode s . Set.fromList
-- | Reports a "raw" value without interpretation
instance HeaderEncode n (Raw Text) where
headerEncode _ (Raw t) = Just t
instance HeaderEncode 'Allow (Set Verb) where
headerEncode _ = displaySetOpt . Set.map verbName
instance HeaderEncode 'Allow [Verb] where
headerEncode = uniqueSet
instance HeaderEncode 'AccessControlExposeHeaders (Set SomeHeaderName) where
headerEncode _ = displaySetOpt . Set.map headerName' where
headerName' (SomeHeaderName h) = headerName h
instance HeaderEncode 'AccessControlExposeHeaders [SomeHeaderName] where
headerEncode = uniqueSet
instance HeaderEncode 'AccessControlAllowHeaders (Set SomeHeaderName) where
headerEncode _ = displaySetOpt . Set.map headerName' where
headerName' (SomeHeaderName h) = headerName h
instance HeaderEncode 'AccessControlAllowHeaders [SomeHeaderName] where
headerEncode = uniqueSet
instance HeaderEncode 'AccessControlMaxAge NominalDiffTime where
headerEncode _ ndt = Just $ Text.pack (show (round ndt :: Int))
instance HeaderEncode 'AccessControlAllowOrigin Text where
headerEncode _ org = Just org
instance HeaderEncode 'AccessControlAllowMethods (Set Verb) where
headerEncode _ = displaySetOpt . Set.map verbName
instance HeaderEncode 'AccessControlAllowMethods [Verb] where
headerEncode = uniqueSet
instance HeaderEncode 'AccessControlAllowCredentials Bool where
headerEncode _ ok = Just (if ok then "true" else "false")
instance HeaderEncode 'ContentType MediaType where
headerEncode _ mt =
case Text.decodeUtf8' (Media.renderHeader mt) of
Left _err -> Nothing
Right txt -> Just txt
-- | Any value can be forced as optional if desired
instance HeaderEncode h t => HeaderEncode h (Maybe t) where
headerEncode p v = v >>= headerEncode p
-- Instances/Decoding
-- ----------------------------------------------------------------------------
-- | Fail to decode if there is no header. For headers which lack default
-- values. If a header lacks a natural default then avoiding failure should
-- be /explicitly/ requested in the types by wrapping it with a 'Maybe'.
required :: (Text -> Either String a) -> Maybe Text -> Either String a
required _ Nothing = Left "missing header value"
required f (Just t) = f t
-- | For headers with natural notions of default values.
withDefault :: a -> (Text -> Either String a) -> (Maybe Text -> Either String a)
withDefault def _ Nothing = Right def
withDefault _ f (Just a) = f a
instance HeaderDecode Accept [Quality MediaType] where
headerDecode _ = withDefault [] parser where
parser txt =
case Media.parseQuality (Text.encodeUtf8 txt) of
Nothing -> Left "malformed accept header"
Just mts -> Right mts
instance HeaderDecode ContentType MediaType where
headerDecode _ = required $ \txt ->
case Media.parseAccept (Text.encodeUtf8 txt) of
Nothing -> Left "malformed content type"
Just ct -> Right ct
-- | Returns the raw header value
instance HeaderDecode n (Raw Text) where
headerDecode _ = required $ \text -> Right (Raw text)
-- | Any value may be only optionally captured as desired
instance HeaderDecode h t => HeaderDecode h (Maybe t) where
headerDecode _ Nothing = Right Nothing
headerDecode p (Just t) = fmap Just (headerDecode p (Just t))
| tel/serv | http-kinder/src/Network/HTTP/Kinder/Header/Serialization.hs | bsd-3-clause | 8,388 | 0 | 13 | 1,790 | 1,719 | 907 | 812 | 124 | 3 |
module Paths_qed where
getDataDir :: IO FilePath
getDataDir = return "."
| ndmitchell/qed | src/Paths.hs | bsd-3-clause | 75 | 0 | 5 | 13 | 20 | 11 | 9 | 3 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Examples.Uninterpreted.Deduce
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Demonstrates uninterpreted sorts and how they can be used for deduction.
-- This example is inspired by the discussion at <http://stackoverflow.com/questions/10635783/using-axioms-for-deductions-in-z3>,
-- essentially showing how to show the required deduction using SBV.
-----------------------------------------------------------------------------
{-# LANGUAGE DeriveDataTypeable #-}
module Data.SBV.Examples.Uninterpreted.Deduce where
import Data.Generics
import Data.SBV
-- we will have our own "uninterpreted" functions corresponding
-- to not/or/and, so hide their Prelude counterparts.
import Prelude hiding (not, or, and)
-----------------------------------------------------------------------------
-- * Representing uninterpreted booleans
-----------------------------------------------------------------------------
-- | The uninterpreted sort 'B', corresponding to the carrier.
-- To prevent SBV from translating it to an enumerated type, we simply attach an unused field
data B = B ()
deriving (Eq, Ord, Data, Typeable, Read, Show)
-- | Default instance declaration for 'SymWord'
instance SymWord B
-- | Default instance declaration for 'HasKind'
instance HasKind B
-- | Handy shortcut for the type of symbolic values over 'B'
type SB = SBV B
-----------------------------------------------------------------------------
-- * Uninterpreted connectives over 'B'
-----------------------------------------------------------------------------
-- | Uninterpreted logical connective 'and'
and :: SB -> SB -> SB
and = uninterpret "AND"
-- | Uninterpreted logical connective 'or'
or :: SB -> SB -> SB
or = uninterpret "OR"
-- | Uninterpreted logical connective 'not'
not :: SB -> SB
not = uninterpret "NOT"
-----------------------------------------------------------------------------
-- * Axioms of the logical system
-----------------------------------------------------------------------------
-- | Distributivity of OR over AND, as an axiom in terms of
-- the uninterpreted functions we have introduced. Note how
-- variables range over the uninterpreted sort 'B'.
ax1 :: [String]
ax1 = [ "(assert (forall ((p B) (q B) (r B))"
, " (= (AND (OR p q) (OR p r))"
, " (OR p (AND q r)))))"
]
-- | One of De Morgan's laws, again as an axiom in terms
-- of our uninterpeted logical connectives.
ax2 :: [String]
ax2 = [ "(assert (forall ((p B) (q B))"
, " (= (NOT (OR p q))"
, " (AND (NOT p) (NOT q)))))"
]
-- | Double negation axiom, similar to the above.
ax3 :: [String]
ax3 = ["(assert (forall ((p B)) (= (NOT (NOT p)) p)))"]
-----------------------------------------------------------------------------
-- * Demonstrated deduction
-----------------------------------------------------------------------------
-- | Proves the equivalence @NOT (p OR (q AND r)) == (NOT p AND NOT q) OR (NOT p AND NOT r)@,
-- following from the axioms we have specified above. We have:
--
-- >>> test
-- Q.E.D.
test :: IO ThmResult
test = prove $ do addAxiom "OR distributes over AND" ax1
addAxiom "de Morgan" ax2
addAxiom "double negation" ax3
p <- free "p"
q <- free "q"
r <- free "r"
return $ not (p `or` (q `and` r))
.== (not p `and` not q) `or` (not p `and` not r)
| Copilot-Language/sbv-for-copilot | Data/SBV/Examples/Uninterpreted/Deduce.hs | bsd-3-clause | 3,661 | 0 | 15 | 731 | 399 | 237 | 162 | 35 | 1 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell, QuasiQuotes, TypeFamilies, MultiParamTypeClasses, ViewPatterns #-}
module YesodCoreTest.Reps
( specs
, Widget
, resourcesApp
) where
import Yesod.Core
import Test.Hspec
import Network.Wai
import Network.Wai.Test
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Char8 as S8
import Data.String (IsString)
import Data.Text (Text)
import Data.Maybe (fromJust)
import Data.Monoid (Endo (..))
import qualified Control.Monad.Trans.Writer as Writer
import qualified Data.Set as Set
data App = App
mkYesod "App" [parseRoutes|
/ HomeR GET !home
/json JsonR GET
/parent/#Int ParentR:
/#Text/child ChildR !child
|]
instance Yesod App
specialHtml :: IsString a => a
specialHtml = "text/html; charset=special"
getHomeR :: Handler TypedContent
getHomeR = selectRep $ do
rep typeHtml "HTML"
rep specialHtml "HTMLSPECIAL"
rep typeXml "XML"
rep typeJson "JSON"
rep :: Monad m => ContentType -> Text -> Writer.Writer (Data.Monoid.Endo [ProvidedRep m]) ()
rep ct t = provideRepType ct $ return (t :: Text)
getJsonR :: Handler TypedContent
getJsonR = selectRep $ do
rep typeHtml "HTML"
provideRep $ return $ object ["message" .= ("Invalid Login" :: Text)]
handleChildR :: Int -> Text -> Handler ()
handleChildR _ _ = return ()
testRequest :: Int -- ^ http status code
-> Request
-> ByteString -- ^ expected body
-> Spec
testRequest status req expected = it (S8.unpack $ fromJust $ lookup "Accept" $ requestHeaders req) $ do
app <- toWaiApp App
flip runSession app $ do
sres <- request req
assertStatus status sres
assertBody expected sres
test :: String -- ^ accept header
-> ByteString -- ^ expected body
-> Spec
test accept expected =
testRequest 200 (acceptRequest accept) expected
acceptRequest :: String -> Request
acceptRequest accept = defaultRequest
{ requestHeaders = [("Accept", S8.pack accept)]
}
specs :: Spec
specs = do
describe "selectRep" $ do
test "application/json" "JSON"
test (S8.unpack typeJson) "JSON"
test "text/xml" "XML"
test (S8.unpack typeXml) "XML"
test "text/xml,application/json" "XML"
test "text/xml;q=0.9,application/json;q=1.0" "JSON"
test (S8.unpack typeHtml) "HTML"
test "text/html" "HTML"
test specialHtml "HTMLSPECIAL"
testRequest 200 (acceptRequest "application/json") { pathInfo = ["json"] } "{\"message\":\"Invalid Login\"}"
testRequest 406 (acceptRequest "text/foo") "no match found for accept header"
test "text/*" "HTML"
test "*/*" "HTML"
describe "routeAttrs" $ do
it "HomeR" $ routeAttrs HomeR `shouldBe` Set.singleton "home"
it "JsonR" $ routeAttrs JsonR `shouldBe` Set.empty
it "ChildR" $ routeAttrs (ParentR 5 $ ChildR "ignored") `shouldBe` Set.singleton "child"
| psibi/yesod | yesod-core/test/YesodCoreTest/Reps.hs | mit | 2,897 | 0 | 15 | 600 | 821 | 411 | 410 | 74 | 1 |
{-# LANGUAGE CPP, ScopedTypeVariables, OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.LogRef
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
--
-- |
--
---------------------------------------------------------------------------------
module IDE.LogRef (
nextError
, previousError
, nextBreakpoint
, previousBreakpoint
, markLogRefs
, unmarkLogRefs
, defaultLineLogger
, defaultLineLogger'
, logOutputLines
, logOutputLines_
, logOutputLines_Default
, logOutput
, logOutputDefault
, logOutputPane
, logOutputForBuild
, logOutputForBreakpoints
, logOutputForSetBreakpoint
, logOutputForSetBreakpointDefault
, logOutputForLiveContext
, logOutputForLiveContextDefault
, logOutputForHistoricContext
, logOutputForHistoricContextDefault
, selectRef
, setBreakpointList
, showSourceSpan
, srcSpanParser
) where
import Graphics.UI.Gtk
import Control.Monad.Reader
import Text.ParserCombinators.Parsec.Language
import Text.ParserCombinators.Parsec hiding(Parser)
import qualified Text.ParserCombinators.Parsec.Token as P
import IDE.Core.State
import IDE.TextEditor
import IDE.Pane.SourceBuffer
import qualified IDE.Pane.Log as Log
import IDE.Utils.Tool
import System.FilePath (equalFilePath)
import Data.List (stripPrefix, elemIndex, isPrefixOf)
import Data.Maybe (catMaybes, isJust)
import System.Exit (ExitCode(..))
import System.Log.Logger (debugM)
import IDE.Utils.FileUtils(myCanonicalizePath)
import IDE.Pane.Log (getDefaultLogLaunch, IDELog(..), getLog)
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import Data.Conduit ((=$))
import IDE.Pane.WebKit.Output(setOutput)
import Data.IORef (atomicModifyIORef, IORef, readIORef)
import Data.Text (Text)
import Control.Applicative ((<$>))
import qualified Data.Text as T
(stripPrefix, isPrefixOf, unpack, unlines, pack, null)
import Data.Monoid ((<>))
showSourceSpan :: LogRef -> Text
showSourceSpan = T.pack . displaySrcSpan . logRefSrcSpan
selectRef :: Maybe LogRef -> IDEAction
selectRef (Just ref) = do
logRefs <- readIDE allLogRefs
case elemIndex ref logRefs of
Nothing -> liftIO $ debugM "leksah" "no index" >> return ()
Just index -> do
mbBuf <- selectSourceBuf (logRefFullFilePath ref)
case mbBuf of
Just buf -> markRefInSourceBuf index buf ref True
Nothing -> liftIO $ debugM "leksah" "no buf" >> return ()
log :: Log.IDELog <- Log.getLog
Log.markErrorInLog log (logLines ref)
selectRef Nothing = return ()
forOpenLogRefs :: (Int -> LogRef -> IDEBuffer -> IDEAction) -> IDEAction
forOpenLogRefs f = do
logRefs <- readIDE allLogRefs
allBufs <- allBuffers
forM_ [0 .. ((length logRefs)-1)] (\index -> do
let ref = logRefs !! index
fp = logRefFullFilePath ref
fpc <- liftIO $ myCanonicalizePath fp
forM_ (filter (\buf -> case (fileName buf) of
Just fn -> equalFilePath fpc fn
Nothing -> False) allBufs) (f index ref))
markLogRefs :: IDEAction
markLogRefs = do
forOpenLogRefs $ \index logRef buf -> markRefInSourceBuf index buf logRef False
unmarkLogRefs :: IDEAction
unmarkLogRefs = do
forOpenLogRefs $ \index logRef (IDEBuffer {sourceView = sv}) -> do
buf <- getBuffer sv
removeTagByName buf (T.pack $ show (logRefType logRef) ++ show index)
setErrorList :: [LogRef] -> IDEAction
setErrorList errs = do
unmarkLogRefs
breaks <- readIDE breakpointRefs
contexts <- readIDE contextRefs
modifyIDE_ (\ide -> ide{allLogRefs = errs ++ breaks ++ contexts})
setCurrentError Nothing
markLogRefs
triggerEventIDE ErrorChanged
return ()
setBreakpointList :: [LogRef] -> IDEAction
setBreakpointList breaks = do
ideR <- ask
unmarkLogRefs
errs <- readIDE errorRefs
contexts <- readIDE contextRefs
modifyIDE_ (\ide -> ide{allLogRefs = errs ++ breaks ++ contexts})
setCurrentBreak Nothing
markLogRefs
triggerEventIDE BreakpointChanged
return ()
addLogRefs :: [LogRef] -> IDEAction
addLogRefs refs = do
ideR <- ask
unmarkLogRefs
modifyIDE_ (\ide -> ide{allLogRefs = (allLogRefs ide) ++ refs})
setCurrentError Nothing
markLogRefs
triggerEventIDE ErrorChanged
triggerEventIDE BreakpointChanged
triggerEventIDE TraceChanged
return ()
nextError :: IDEAction
nextError = do
errs <- readIDE errorRefs
currentError <- readIDE currentError
if null errs
then return ()
else do
let new = case currentError of
Nothing -> 0
Just ref ->
case elemIndex ref errs of
Nothing -> 0
Just n | (n + 1) < length errs -> (n + 1)
Just n -> n
setCurrentError (Just $ errs!!new)
selectRef $ Just (errs!!new)
previousError :: IDEAction
previousError = do
errs <- readIDE errorRefs
currentError <- readIDE currentError
if null errs
then return ()
else do
let new = case currentError of
Nothing -> (length errs - 1)
Just ref ->
case elemIndex ref errs of
Nothing -> (length errs - 1)
Just n | n > 0 -> (n - 1)
Just n -> 0
setCurrentError (Just $ errs!!new)
selectRef $ Just (errs!!new)
nextBreakpoint :: IDEAction
nextBreakpoint = do
breaks <- readIDE breakpointRefs
currentBreak <- readIDE currentBreak
if null breaks
then return ()
else do
let new = case currentBreak of
Nothing -> 0
Just ref ->
case elemIndex ref breaks of
Nothing -> 0
Just n | (n + 1) < length breaks -> (n + 1)
Just n -> n
setCurrentBreak (Just $ breaks!!new)
selectRef $ Just (breaks!!new)
previousBreakpoint :: IDEAction
previousBreakpoint = do
breaks <- readIDE breakpointRefs
currentBreak <- readIDE currentBreak
if null breaks
then return ()
else do
let new = case currentBreak of
Nothing -> (length breaks - 1)
Just ref ->
case elemIndex ref breaks of
Nothing -> (length breaks - 1)
Just n | n > 0 -> (n - 1)
Just n -> 0
setCurrentBreak (Just $ breaks!!new)
selectRef $ Just (breaks!!new)
nextContext :: IDEAction
nextContext = do
contexts <- readIDE contextRefs
currentContext <- readIDE currentContext
if null contexts
then return ()
else do
let new = case currentContext of
Nothing -> 0
Just ref ->
case elemIndex ref contexts of
Nothing -> 0
Just n | (n + 1) < length contexts -> (n + 1)
Just n -> n
setCurrentContext (Just $ contexts!!new)
selectRef $ Just (contexts!!new)
previousContext :: IDEAction
previousContext = do
contexts <- readIDE contextRefs
currentContext <- readIDE currentContext
if null contexts
then return ()
else do
let new = case currentContext of
Nothing -> (length contexts - 1)
Just ref ->
case elemIndex ref contexts of
Nothing -> (length contexts - 1)
Just n | n > 0 -> (n - 1)
Just n -> 0
setCurrentContext (Just $ contexts!!new)
selectRef $ Just (contexts!!new)
lastContext :: IDEAction
lastContext = do
contexts <- readIDE contextRefs
currentContext <- readIDE currentContext
if null contexts
then return ()
else do
let new = (last contexts)
setCurrentContext (Just new)
selectRef $ Just new
#if MIN_VERSION_ghc(7,0,1)
fixColumn c = max 0 (c - 1)
#else
fixColumn = id
#endif
srcPathParser :: CharParser () FilePath
srcPathParser = try (do
symbol "dist/build/tmp-" -- Support for cabal haddock
many digit
char '/'
many (noneOf ":"))
<|> many (noneOf ":")
srcSpanParser :: CharParser () SrcSpan
srcSpanParser = try (do
filePath <- srcPathParser
char ':'
char '('
beginLine <- int
char ','
beginCol <- int
char ')'
char '-'
char '('
endLine <- int
char ','
endCol <- int
char ')'
return $ SrcSpan filePath beginLine (fixColumn beginCol) endLine (fixColumn endCol))
<|> try (do
filePath <- srcPathParser
char ':'
line <- int
char ':'
beginCol <- int
char '-'
endCol <- int
return $ SrcSpan filePath line (fixColumn beginCol) line (fixColumn endCol))
<|> try (do
filePath <- srcPathParser
char ':'
line <- int
char ':'
col <- int
return $ SrcSpan filePath line (fixColumn col) line (fixColumn col))
<?> "srcLocParser"
data BuildError = BuildLine
| EmptyLine
| ErrorLine SrcSpan LogRefType Text
| WarningLine Text
| OtherLine Text
buildLineParser :: CharParser () BuildError
buildLineParser = try (do
char '['
int
symbol "of"
int
char ']'
many (anyChar)
return BuildLine)
<|> try (do
whiteSpace
span <- srcSpanParser
char ':'
whiteSpace
refType <- try (do
symbol "Warning:"
return WarningRef)
<|> return ErrorRef
text <- T.pack <$> many anyChar
return (ErrorLine span refType text))
<|> try (do
whiteSpace
eof
return EmptyLine)
<|> try (do
whiteSpace
symbol "Warning:"
text <- T.pack <$> many anyChar
return (WarningLine ("Warning:" <> text)))
<|> try (do
text <- T.pack <$> many anyChar
eof
return (OtherLine text))
<?> "buildLineParser"
data BreakpointDescription = BreakpointDescription Int SrcSpan
breaksLineParser :: CharParser () BreakpointDescription
breaksLineParser = try (do
char '['
n <- int
char ']'
whiteSpace
many (noneOf " ")
whiteSpace
span <- srcSpanParser
return (BreakpointDescription n span))
<?> "buildLineParser"
setBreakpointLineParser :: CharParser () BreakpointDescription
setBreakpointLineParser = try (do
symbol "Breakpoint"
whiteSpace
n <- int
whiteSpace
symbol "activated"
whiteSpace
symbol "at"
whiteSpace
span <- srcSpanParser
return (BreakpointDescription n span))
<?> "setBreakpointLineParser"
lexer = P.makeTokenParser emptyDef
lexeme = P.lexeme lexer
whiteSpace = P.whiteSpace lexer
hexadecimal = P.hexadecimal lexer
symbol = P.symbol lexer
identifier = P.identifier lexer
colon = P.colon lexer
int = fmap fromInteger $ P.integer lexer
defaultLineLogger :: IDELog -> LogLaunch -> ToolOutput -> IDEM Int
defaultLineLogger log logLaunch out = liftIO $ defaultLineLogger' log logLaunch out
defaultLineLogger' :: IDELog -> LogLaunch -> ToolOutput -> IO Int
defaultLineLogger' log logLaunch out = do
case out of
ToolInput line -> appendLog' (line <> "\n") InputTag
ToolOutput line -> appendLog' (line <> "\n") LogTag
ToolError line -> appendLog' (line <> "\n") ErrorTag
ToolPrompt line -> do
unless (T.null line) $ appendLog' (line <> "\n") LogTag >> return ()
appendLog' (T.pack (concat (take 20 (repeat "- "))) <> "-\n") FrameTag
ToolExit ExitSuccess -> appendLog' (T.pack (take 41 (repeat '-')) <> "\n") FrameTag
ToolExit (ExitFailure 1) -> appendLog' (T.pack (take 41 (repeat '=')) <> "\n") FrameTag
ToolExit (ExitFailure n) -> appendLog' (T.pack (take 41 ("========== " ++ show n <> " " ++ repeat '=')) <> "\n") FrameTag
where
appendLog' = Log.appendLog log logLaunch
paneLineLogger :: IDELog -> LogLaunch -> ToolOutput -> IDEM (Maybe Text)
paneLineLogger log logLaunch out = liftIO $ paneLineLogger' log logLaunch out
paneLineLogger' :: IDELog -> LogLaunch -> ToolOutput -> IO (Maybe Text)
paneLineLogger' log logLaunch out = do
case out of
ToolInput line -> appendLog' (line <> "\n") InputTag >> return Nothing
ToolOutput line -> appendLog' (line <> "\n") LogTag >> return (Just line)
ToolError line -> appendLog' (line <> "\n") ErrorTag >> return Nothing
ToolPrompt line -> do
unless (T.null line) $ appendLog' (line <> "\n") LogTag >> return ()
appendLog' (T.pack (concat (take 20 (repeat "- "))) <> "-\n") FrameTag
return Nothing
ToolExit ExitSuccess -> appendLog' (T.pack (take 41 (repeat '-')) <> "\n") FrameTag >> return Nothing
ToolExit (ExitFailure 1) -> appendLog' (T.pack (take 41 (repeat '=')) <> "\n") FrameTag >> return Nothing
ToolExit (ExitFailure n) -> appendLog' (T.pack (take 41 ("========== " ++ show n ++ " " ++ repeat '=')) <> "\n") FrameTag >> return Nothing
where
appendLog' = Log.appendLog log logLaunch
logOutputLines :: LogLaunch -- ^ logLaunch
-> (IDELog -> LogLaunch -> ToolOutput -> IDEM a)
-> C.Sink ToolOutput IDEM [a]
logOutputLines logLaunch lineLogger = do
log :: Log.IDELog <- lift $ postSyncIDE Log.getLog
results <- (CL.mapM $ postSyncIDE . lineLogger log logLaunch) =$ CL.consume
lift $ triggerEventIDE (StatusbarChanged [CompartmentState "", CompartmentBuild False])
return results
logOutputLines_ :: LogLaunch
-> (IDELog -> LogLaunch -> ToolOutput -> IDEM a)
-> C.Sink ToolOutput IDEM ()
logOutputLines_ logLaunch lineLogger = do
logOutputLines logLaunch lineLogger
return ()
logOutputLines_Default :: (IDELog -> LogLaunch -> ToolOutput -> IDEM a)
-> C.Sink ToolOutput IDEM ()
logOutputLines_Default lineLogger = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutputLines_ defaultLogLaunch lineLogger
logOutput :: LogLaunch
-> C.Sink ToolOutput IDEM ()
logOutput logLaunch = do
logOutputLines logLaunch defaultLineLogger
return ()
logOutputDefault :: C.Sink ToolOutput IDEM ()
logOutputDefault = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutput defaultLogLaunch
logOutputPane :: Text -> IORef [Text] -> C.Sink ToolOutput IDEM ()
logOutputPane command buffer = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
result <- catMaybes <$> logOutputLines defaultLogLaunch paneLineLogger
when (not $ null result) $ do
new <- liftIO . atomicModifyIORef buffer $ \x -> let new = x ++ result in (new, new)
mbURI <- lift $ readIDE autoURI
unless (isJust mbURI) . lift . postSyncIDE . setOutput command $ T.unlines new
logOutputForBuild :: IDEPackage
-> Bool
-> Bool
-> C.Sink ToolOutput IDEM [LogRef]
logOutputForBuild package backgroundBuild jumpToWarnings = do
log <- lift getLog
logLaunch <- lift $ Log.getDefaultLogLaunch
(_, _, errs) <- CL.foldM (readAndShow logLaunch) (log, False, [])
ideR <- lift ask
liftIO $ postGUISync $ reflectIDE (do
setErrorList $ reverse errs
triggerEventIDE (Sensitivity [(SensitivityError,not (null errs))])
let errorNum = length (filter isError errs)
let warnNum = length errs - errorNum
triggerEventIDE (StatusbarChanged [CompartmentState
(T.pack $ show errorNum ++ " Errors, " ++ show warnNum ++ " Warnings"), CompartmentBuild False])
unless (backgroundBuild || (not jumpToWarnings && errorNum == 0)) nextError
return errs) ideR
where
readAndShow :: LogLaunch -> (IDELog, Bool, [LogRef]) -> ToolOutput -> IDEM (IDELog, Bool, [LogRef])
readAndShow logLaunch (log, inError, errs) output = do
ideR <- ask
liftIO $ postGUISync $ case output of
ToolError line -> do
let parsed = parse buildLineParser "" $ T.unpack line
let nonErrorPrefixes = ["Linking ", "ar:", "ld:", "ld warning:"]
tag <- case parsed of
Right BuildLine -> return InfoTag
Right (OtherLine text) | "Linking " `T.isPrefixOf` text -> do
-- when backgroundBuild $ lift interruptProcess
reflectIDE (do
setErrorList $ reverse errs
) ideR
return InfoTag
Right (OtherLine text) | any (`T.isPrefixOf` text) nonErrorPrefixes -> do
return InfoTag
_ -> return ErrorTag
lineNr <- Log.appendLog log logLaunch (line <> "\n") tag
case (parsed, errs) of
(Left e,_) -> do
sysMessage Normal . T.pack $ show e
return (log, False, errs)
(Right ne@(ErrorLine span refType str),_) ->
return (log, True, ((LogRef span package str (lineNr,lineNr) refType):errs))
(Right (OtherLine str1),(LogRef span rootPath str (l1,l2) refType):tl) ->
if inError
then return (log, True, ((LogRef span
rootPath
(if T.null str
then line
else str <> "\n" <> line)
(l1,lineNr) refType) : tl))
else return (log, False, errs)
(Right (WarningLine str1),(LogRef span rootPath str (l1,l2) isError):tl) ->
if inError
then return (log, True, ((LogRef span
rootPath
(if T.null str
then line
else str <> "\n" <> line)
(l1,lineNr) WarningRef) : tl))
else return (log, False, errs)
otherwise -> return (log, False, errs)
ToolOutput line -> do
Log.appendLog log logLaunch (line <> "\n") LogTag
return (log, inError, errs)
ToolInput line -> do
Log.appendLog log logLaunch (line <> "\n") InputTag
return (log, inError, errs)
ToolPrompt line -> do
unless (T.null line) $ Log.appendLog log logLaunch (line <> "\n") LogTag >> return ()
let errorNum = length (filter isError errs)
let warnNum = length errs - errorNum
case errs of
[] -> defaultLineLogger' log logLaunch output
_ -> Log.appendLog log logLaunch (T.pack $ "- - - " ++ show errorNum ++ " errors - "
++ show warnNum ++ " warnings - - -\n") FrameTag
return (log, inError, errs)
ToolExit _ -> do
let errorNum = length (filter isError errs)
let warnNum = length errs - errorNum
case errs of
[] -> defaultLineLogger' log logLaunch output
_ -> Log.appendLog log logLaunch (T.pack $ "----- " ++ show errorNum ++ " errors -- "
++ show warnNum ++ " warnings -----\n") FrameTag
return (log, inError, errs)
--logOutputLines :: Text -- ^ logLaunch
-- -> (LogLaunch -> ToolOutput -> IDEM a)
-- -> [ToolOutput]
-- -> IDEM [a]
logOutputForBreakpoints :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> C.Sink ToolOutput IDEM ()
logOutputForBreakpoints package logLaunch = do
breaks <- logOutputLines logLaunch (\log logLaunch out -> do
case out of
ToolOutput line -> do
logLineNumber <- liftIO $ Log.appendLog log logLaunch (line <> "\n") LogTag
case parse breaksLineParser "" $ T.unpack line of
Right (BreakpointDescription n span) ->
return $ Just $ LogRef span package line (logLineNumber, logLineNumber) BreakpointRef
_ -> return Nothing
_ -> do
defaultLineLogger log logLaunch out
return Nothing)
lift $ setBreakpointList $ catMaybes breaks
logOutputForSetBreakpoint :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> C.Sink ToolOutput IDEM ()
logOutputForSetBreakpoint package logLaunch = do
breaks <- logOutputLines logLaunch (\log logLaunch out -> do
case out of
ToolOutput line -> do
logLineNumber <- liftIO $ Log.appendLog log logLaunch (line <> "\n") LogTag
case parse setBreakpointLineParser "" $ T.unpack line of
Right (BreakpointDescription n span) ->
return $ Just $ LogRef span package line (logLineNumber, logLineNumber) BreakpointRef
_ -> return Nothing
_ -> do
defaultLineLogger log logLaunch out
return Nothing)
lift $ addLogRefs $ catMaybes breaks
logOutputForSetBreakpointDefault :: IDEPackage
-> C.Sink ToolOutput IDEM ()
logOutputForSetBreakpointDefault package = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutputForSetBreakpoint package defaultLogLaunch
logOutputForContext :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> (Text -> [SrcSpan])
-> C.Sink ToolOutput IDEM ()
logOutputForContext package loglaunch getContexts = do
refs <- fmap catMaybes $ logOutputLines loglaunch (\log logLaunch out -> do
case out of
ToolOutput line -> do
logLineNumber <- liftIO $ Log.appendLog log logLaunch (line <> "\n") LogTag
let contexts = getContexts line
if null contexts
then return Nothing
else return $ Just $ LogRef (last contexts) package line (logLineNumber, logLineNumber) ContextRef
_ -> do
defaultLineLogger log logLaunch out
return Nothing)
lift $ unless (null refs) $ do
addLogRefs [last refs]
lastContext
logOutputForLiveContext :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> C.Sink ToolOutput IDEM ()
logOutputForLiveContext package logLaunch = logOutputForContext package logLaunch (getContexts . T.unpack)
where
getContexts [] = []
getContexts line@(x:xs) = case stripPrefix "Stopped at " line of
Just rest -> case parse srcSpanParser "" rest of
Right desc -> desc : getContexts xs
_ -> getContexts xs
_ -> getContexts xs
logOutputForLiveContextDefault :: IDEPackage
-> C.Sink ToolOutput IDEM ()
logOutputForLiveContextDefault package = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutputForLiveContext package defaultLogLaunch
logOutputForHistoricContext :: IDEPackage
-> LogLaunch -- ^ loglaunch
-> C.Sink ToolOutput IDEM ()
logOutputForHistoricContext package logLaunch = logOutputForContext package logLaunch getContexts
where
getContexts line = case T.stripPrefix "Logged breakpoint at " line of
Just rest -> case parse srcSpanParser "" $ T.unpack rest of
Right desc -> [desc]
_ -> []
_ -> []
logOutputForHistoricContextDefault :: IDEPackage
-> C.Sink ToolOutput IDEM ()
logOutputForHistoricContextDefault package = do
defaultLogLaunch <- lift $ getDefaultLogLaunch
logOutputForHistoricContext package defaultLogLaunch
| juhp/leksah | src/IDE/LogRef.hs | gpl-2.0 | 25,553 | 0 | 28 | 8,980 | 7,157 | 3,474 | 3,683 | 576 | 18 |
{-# OPTIONS_GHC -Wall #-}
module Type.Constrain.Effects (constrain) where
import qualified Data.Map as Map
import qualified AST.Effects as Effects
import qualified AST.Module.Name as ModuleName
import qualified Reporting.Annotation as A
import qualified Reporting.Error.Type as Error
import qualified Type.Environment as Env
import Type.Type
( Variable, Type, TypeConstraint, Constraint(..), Scheme(Scheme)
, TermN(VarN), (==>), (<|), mkVar
)
constrain
:: Env.Environment
-> ModuleName.Canonical
-> Effects.Canonical
-> IO TypeConstraint
constrain env moduleName effects =
case effects of
Effects.None ->
return CSaveEnv
Effects.Port _ ->
return CSaveEnv
Effects.Manager _ info ->
constrainHelp env moduleName info
constrainHelp
:: Env.Environment
-> ModuleName.Canonical
-> Effects.Info
-> IO TypeConstraint
constrainHelp env moduleName (Effects.Info tagRegion r0 r1 r2 managerType) =
let
task t =
Env.getType env "Platform.Task" <| Env.getType env "Basics.Never" <| VarN t
router msg selfMsg =
Env.getType env "Platform.Router" <| VarN msg <| VarN selfMsg
in
do v0 <- mkVar Nothing
v1 <- mkVar Nothing
v2 <- mkVar Nothing
state0 <- mkVar Nothing
state1 <- mkVar Nothing
state2 <- mkVar Nothing
msg1 <- mkVar Nothing
msg2 <- mkVar Nothing
selfMsg1 <- mkVar Nothing
selfMsg2 <- mkVar Nothing
let
vars =
[v0, v1, v2, state0, state1, state2, msg1, msg2, selfMsg1, selfMsg2]
onEffectsType =
router msg1 selfMsg1
==> addEffectArgs env moduleName managerType msg1
(VarN state1 ==> task state1)
onSelfMsgType =
router msg2 selfMsg2 ==> VarN selfMsg2 ==> VarN state2 ==> task state2
constrains =
[ CInstance tagRegion "init" (VarN v0)
, CInstance tagRegion "onEffects" (VarN v1)
, CInstance tagRegion "onSelfMsg" (VarN v2)
, CEqual (Error.Manager "init") r0 (VarN v0) (task state0)
, CEqual (Error.Manager "onEffects") r1 (VarN v1) onEffectsType
, CEqual (Error.Manager "onSelfMsg") r2 (VarN v2) onSelfMsgType
, CEqual (Error.State "onEffects") r1 (VarN state0) (VarN state1)
, CEqual (Error.State "onSelfMsg") r2 (VarN state0) (VarN state2)
, CEqual Error.SelfMsg r2 (VarN selfMsg1) (VarN selfMsg2)
]
return $
CLet [Scheme [] vars (CAnd constrains) Map.empty] CSaveEnv
addEffectArgs
:: Env.Environment
-> ModuleName.Canonical
-> Effects.ManagerType
-> Variable
-> Type
-> Type
addEffectArgs env moduleName managerType msg result =
let
toTypeName (A.A _ name) =
ModuleName.canonicalToString moduleName ++ "." ++ name
effectList fxName =
Env.getType env "List" <|
(Env.getType env (toTypeName fxName) <| VarN msg)
in
case managerType of
Effects.CmdManager cmd ->
effectList cmd ==> result
Effects.SubManager sub ->
effectList sub ==> result
Effects.FxManager cmd sub ->
effectList cmd ==> effectList sub ==> result
| mgold/Elm | src/Type/Constrain/Effects.hs | bsd-3-clause | 3,227 | 0 | 16 | 896 | 986 | 498 | 488 | 87 | 3 |
module ListCollaborators where
import qualified Github.Repos.Collaborators as Github
import Data.List
main = do
possibleCollaborators <- Github.collaboratorsOn "thoughtbot" "paperclip"
case possibleCollaborators of
(Left error) -> putStrLn $ "Error: " ++ (show error)
(Right collaborators) ->
putStrLn $ intercalate "\n" $ map formatAuthor collaborators
formatAuthor :: Github.Owner -> String
formatAuthor user =
(Github.githubOwnerLogin user) ++ " (" ++ (Github.githubOwnerUrl user) ++ ")"
| jwiegley/github | samples/Repos/Collaborators/ListCollaborators.hs | bsd-3-clause | 514 | 0 | 12 | 83 | 145 | 75 | 70 | 12 | 2 |
module SimpleFormula ( Formula(..)
, parseString
, interpretFormula
, interpretFormulaDefault
) where
import SimpleFormula.Parser
import SimpleFormula.Types
xor :: Bool -> Bool -> Bool
True `xor` True = False
False `xor` False = False
_ `xor` _ = True
impl :: Bool -> Bool -> Bool
True `impl` True = True
True `impl` False = False
False `impl` True = True
False `impl` False = True
biimpl :: Bool -> Bool -> Bool
True `biimpl` True = True
False `biimpl` False = True
_ `biimpl` _ = False
-- | Interpret the given formula @f@ with the given @assignment@ of
-- boolean values to variables.
--
-- If a variable is not found in the assignment, it assumes the
-- default value.
interpretFormulaDefault :: Bool -> Formula -> [(Int, Bool)] -> Bool
interpretFormulaDefault dflt f assign =
case genericInterpretFormula (maybe (Just dflt) Just) f assign of
Nothing -> error "interpretFormulaDefault should not be able to generate Nothing"
Just b -> b
-- | Interpret the given formula @f@ with the given @assignment@ of boolean
-- values to variables.
--
-- The function will return Nothing if there are free variables after
-- the assignment.
interpretFormula :: Formula -> [(Int, Bool)] -> Maybe Bool
interpretFormula = genericInterpretFormula (maybe Nothing Just)
genericInterpretFormula :: (Maybe Bool -> Maybe Bool) -> Formula ->
([(Int, Bool)] -> Maybe Bool)
genericInterpretFormula lookupWrapper f assignment = translate f
where
binOp op f1 f2 = do
f1' <- translate f1
f2' <- translate f2
return (f1' `op` f2')
translate (Var i) = lookupWrapper $ lookup i assignment
translate (Not f1) = do
f' <- translate f1
return (not f')
translate (And f1 f2) = binOp (&&) f1 f2
translate (Xor f1 f2) = binOp xor f1 f2
translate (Or f1 f2) = binOp (||) f1 f2
translate (Impl f1 f2) = binOp impl f1 f2
translate (BiImpl f1 f2) = binOp biimpl f1 f2
| m4lvin/robbed | tests/SimpleFormula.hs | bsd-3-clause | 2,024 | 0 | 11 | 508 | 620 | 325 | 295 | 42 | 7 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
module ETA.BasicTypes.VarSet (
-- * Var, Id and TyVar set types
VarSet, IdSet, TyVarSet, CoVarSet,
-- ** Manipulating these sets
emptyVarSet, unitVarSet, mkVarSet,
extendVarSet, extendVarSetList, extendVarSet_C,
elemVarSet, varSetElems, subVarSet,
unionVarSet, unionVarSets, mapUnionVarSet,
intersectVarSet, intersectsVarSet, disjointVarSet,
isEmptyVarSet, delVarSet, delVarSetList, delVarSetByKey,
minusVarSet, foldVarSet, filterVarSet, fixVarSet,
lookupVarSet_Directly, lookupVarSet, mapVarSet, sizeVarSet, seqVarSet,
elemVarSetByKey, partitionVarSet
) where
import ETA.BasicTypes.Var ( Var, TyVar, CoVar, Id )
import ETA.BasicTypes.Unique
import ETA.Utils.UniqSet
{-
************************************************************************
* *
\subsection{@VarSet@s}
* *
************************************************************************
-}
type VarSet = UniqSet Var
type IdSet = UniqSet Id
type TyVarSet = UniqSet TyVar
type CoVarSet = UniqSet CoVar
emptyVarSet :: VarSet
intersectVarSet :: VarSet -> VarSet -> VarSet
unionVarSet :: VarSet -> VarSet -> VarSet
unionVarSets :: [VarSet] -> VarSet
mapUnionVarSet :: (a -> VarSet) -> [a] -> VarSet
-- ^ map the function oer the list, and union the results
varSetElems :: VarSet -> [Var]
unitVarSet :: Var -> VarSet
extendVarSet :: VarSet -> Var -> VarSet
extendVarSetList :: VarSet -> [Var] -> VarSet
elemVarSet :: Var -> VarSet -> Bool
delVarSet :: VarSet -> Var -> VarSet
delVarSetList :: VarSet -> [Var] -> VarSet
minusVarSet :: VarSet -> VarSet -> VarSet
isEmptyVarSet :: VarSet -> Bool
mkVarSet :: [Var] -> VarSet
foldVarSet :: (Var -> a -> a) -> a -> VarSet -> a
lookupVarSet_Directly :: VarSet -> Unique -> Maybe Var
lookupVarSet :: VarSet -> Var -> Maybe Var
-- Returns the set element, which may be
-- (==) to the argument, but not the same as
mapVarSet :: (Var -> Var) -> VarSet -> VarSet
sizeVarSet :: VarSet -> Int
filterVarSet :: (Var -> Bool) -> VarSet -> VarSet
extendVarSet_C :: (Var->Var->Var) -> VarSet -> Var -> VarSet
delVarSetByKey :: VarSet -> Unique -> VarSet
elemVarSetByKey :: Unique -> VarSet -> Bool
fixVarSet :: (VarSet -> VarSet) -> VarSet -> VarSet
partitionVarSet :: (Var -> Bool) -> VarSet -> (VarSet, VarSet)
emptyVarSet = emptyUniqSet
unitVarSet = unitUniqSet
extendVarSet = addOneToUniqSet
extendVarSetList = addListToUniqSet
intersectVarSet = intersectUniqSets
intersectsVarSet :: VarSet -> VarSet -> Bool -- True if non-empty intersection
disjointVarSet :: VarSet -> VarSet -> Bool -- True if empty intersection
subVarSet :: VarSet -> VarSet -> Bool -- True if first arg is subset of second
-- (s1 `intersectsVarSet` s2) doesn't compute s2 if s1 is empty;
-- ditto disjointVarSet, subVarSet
unionVarSet = unionUniqSets
unionVarSets = unionManyUniqSets
varSetElems = uniqSetToList
elemVarSet = elementOfUniqSet
minusVarSet = minusUniqSet
delVarSet = delOneFromUniqSet
delVarSetList = delListFromUniqSet
isEmptyVarSet = isEmptyUniqSet
mkVarSet = mkUniqSet
foldVarSet = foldUniqSet
lookupVarSet_Directly = lookupUniqSet_Directly
lookupVarSet = lookupUniqSet
mapVarSet = mapUniqSet
sizeVarSet = sizeUniqSet
filterVarSet = filterUniqSet
extendVarSet_C = addOneToUniqSet_C
delVarSetByKey = delOneFromUniqSet_Directly
elemVarSetByKey = elemUniqSet_Directly
partitionVarSet = partitionUniqSet
mapUnionVarSet get_set xs = foldr (unionVarSet . get_set) emptyVarSet xs
-- See comments with type signatures
intersectsVarSet s1 s2 = not (s1 `disjointVarSet` s2)
disjointVarSet s1 s2 = isEmptyVarSet (s1 `intersectVarSet` s2)
subVarSet s1 s2 = isEmptyVarSet (s1 `minusVarSet` s2)
-- Iterate f to a fixpoint
fixVarSet f s | new_s `subVarSet` s = s
| otherwise = fixVarSet f new_s
where
new_s = f s
seqVarSet :: VarSet -> ()
seqVarSet s = sizeVarSet s `seq` ()
| pparkkin/eta | compiler/ETA/BasicTypes/VarSet.hs | bsd-3-clause | 4,447 | 0 | 8 | 1,140 | 919 | 528 | 391 | 80 | 1 |
{- |
Module : $Header$
Description : creating Haskell modules via translations
Copyright : (c) C. Maeder, Uni Bremen 2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable(Logic)
dumping a LibEnv to a Haskell module
-}
module Haskell.CreateModules where
import Common.Result
import Common.ExtSign
import Common.Doc
import Logic.Coerce
import Logic.Logic
import Logic.Comorphism
import Static.GTheory
import Logic.Prover
import CASL.Logic_CASL
import HasCASL.Logic_HasCASL
import Haskell.Logic_Haskell
import Comorphisms.HasCASL2Haskell
import Comorphisms.CASL2HasCASL
import Comorphisms.HasCASL2HasCASL
printModule :: G_theory -> Maybe Doc
printModule (G_theory lid _ (ExtSign sign0 _) _ sens0 _) =
let th = (sign0, toNamedList sens0)
r1 = do
th0 <- coerceBasicTheory lid CASL "" th
th1 <- wrapMapTheory CASL2HasCASL th0
th2 <- wrapMapTheory HasCASL2HasCASL th1
wrapMapTheory HasCASL2Haskell th2
r2 = do
th0 <- coerceBasicTheory lid HasCASL "" th
th2 <- wrapMapTheory HasCASL2HasCASL th0
wrapMapTheory HasCASL2Haskell th2
r3 = case maybeResult r1 of
Nothing -> r2
_ -> r1
in case maybeResult r3 of
Nothing -> Nothing
Just (_, sens) -> Just $
vcat $ map (print_named Haskell) sens
| keithodulaigh/Hets | Haskell/CreateModules.hs | gpl-2.0 | 1,648 | 0 | 13 | 557 | 306 | 155 | 151 | 34 | 3 |
module StoryMode.Episode where
import Data.Maybe
import Data.Initial
import qualified Data.Map as Map
import System.FilePath
import Utils
import Base
import Editor.Pickle.LevelFile
import StoryMode.Types
import StoryMode.Configuration
loadEpisodes :: IO (Maybe [Episode LevelFile])
loadEpisodes = do
mEpPath <- getStoryModeLevelsPath
case mEpPath of
Nothing -> return Nothing
Just epPath ->
Just <$> fmapM (loadEpisode epPath) episodes
where
loadEpisode :: FilePath -> Episode String -> IO (Episode LevelFile)
loadEpisode epPath e = do
epF <- fmapM (loadFile epPath (epPathSnippet $ euid e)) e
let ep = fmap (\ f -> f ep) epF
return ep
loadFile :: FilePath -> String -> String -> IO (Episode LevelFile -> LevelFile)
loadFile epPath pathSnippet name =
let levelPath = epPath </> pathSnippet
levelFile = levelPath </> name <.> "nl"
in mkEpisodeLevel levelPath levelFile
getEpisodeScore :: EpisodeUID -> IO EpisodeScore
getEpisodeScore euid = do
m <- episodeScores <$> getScores
return $ fromMaybe initial (Map.lookup euid m)
setEpisodeScore :: EpisodeUID -> EpisodeScore -> IO ()
setEpisodeScore euid score = do
s <- getScores
setScores s{episodeScores = Map.insert euid score (episodeScores s)}
| geocurnoff/nikki | src/StoryMode/Episode.hs | lgpl-3.0 | 1,329 | 0 | 15 | 306 | 413 | 206 | 207 | 35 | 2 |
data T
= Add T T
| Mul T T
| Div T T
| Sub T T
| Num Int
eval :: T -> Int
eval x = case x of
Add a b -> eval a + eval b
Mul a b -> eval a + eval b
Div a b -> eval a + eval b
Sub a b -> eval a + eval b
Num a -> a
| FranklinChen/write-you-a-haskell | chapter10/eval.hs | mit | 235 | 0 | 9 | 95 | 152 | 72 | 80 | 13 | 5 |
----------------------------------------------------------------------------
-- |
-- Module : Test.Cabal.CheckArMetadata
-- Created : 8 July 2017
--
-- Check well-formedness of metadata of .a files that @ar@ command produces.
-- One of the crucial properties of .a files is that they must be
-- deterministic - i.e. they must not include creation date as their
-- contents to facilitate deterministic builds.
----------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Test.Cabal.CheckArMetadata (checkMetadata) where
import Test.Cabal.Prelude
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BS8
import Data.Char (isSpace)
import System.IO
import Distribution.Compiler (CompilerFlavor(..), CompilerId(..))
import Distribution.Package (getHSLibraryName)
import Distribution.Version (mkVersion)
import Distribution.Simple.Compiler (compilerId)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo, compiler, localUnitId)
-- Almost a copypasta of Distribution.Simple.Program.Ar.wipeMetadata
checkMetadata :: LocalBuildInfo -> FilePath -> IO ()
checkMetadata lbi dir = withBinaryFile path ReadMode $ \ h ->
hFileSize h >>= checkArchive h
where
path = dir </> "lib" ++ getHSLibraryName (localUnitId lbi) ++ ".a"
_ghc_7_10 = case compilerId (compiler lbi) of
CompilerId GHC version | version >= mkVersion [7, 10] -> True
_ -> False
checkError msg = assertFailure (
"PackageTests.DeterministicAr.checkMetadata: " ++ msg ++
" in " ++ path) >> undefined
archLF = "!<arch>\x0a" -- global magic, 8 bytes
x60LF = "\x60\x0a" -- header magic, 2 bytes
metadata = BS.concat
[ "0 " -- mtime, 12 bytes
, "0 " -- UID, 6 bytes
, "0 " -- GID, 6 bytes
, "0644 " -- mode, 8 bytes
]
headerSize = 60
-- http://en.wikipedia.org/wiki/Ar_(Unix)#File_format_details
checkArchive :: Handle -> Integer -> IO ()
checkArchive h archiveSize = do
global <- BS.hGet h (BS.length archLF)
unless (global == archLF) $ checkError "Bad global header"
checkHeader (toInteger $ BS.length archLF)
where
checkHeader :: Integer -> IO ()
checkHeader offset = case compare offset archiveSize of
EQ -> return ()
GT -> checkError (atOffset "Archive truncated")
LT -> do
header <- BS.hGet h headerSize
unless (BS.length header == headerSize) $
checkError (atOffset "Short header")
let magic = BS.drop 58 header
unless (magic == x60LF) . checkError . atOffset $
"Bad magic " ++ show magic ++ " in header"
unless (metadata == BS.take 32 (BS.drop 16 header))
. checkError . atOffset $ "Metadata has changed"
let size = BS.take 10 $ BS.drop 48 header
objSize <- case reads (BS8.unpack size) of
[(n, s)] | all isSpace s -> return n
_ -> checkError (atOffset "Bad file size in header")
let nextHeader = offset + toInteger headerSize +
-- Odd objects are padded with an extra '\x0a'
if odd objSize then objSize + 1 else objSize
hSeek h AbsoluteSeek nextHeader
checkHeader nextHeader
where
atOffset msg = msg ++ " at offset " ++ show offset
| themoritz/cabal | cabal-testsuite/Test/Cabal/CheckArMetadata.hs | bsd-3-clause | 3,651 | 0 | 23 | 1,103 | 795 | 411 | 384 | 57 | 6 |
import System.Posix.User
main :: IO ()
main = getUserEntryForName "thisIsNotMeantToExist" >> return ()
| jimenezrick/unix | tests/getUserEntryForName.hs | bsd-3-clause | 105 | 0 | 7 | 15 | 34 | 17 | 17 | 3 | 1 |
{-# LANGUAGE Safe #-}
-- | Import all modules from prelude that should be safe
module Main where
import Numeric
import Prelude
-- import Foreign
import Control.Applicative
import Control.Arrow
import Control.Category
-- import Control.ST
-- import Control.ST.Lazy
-- import Control.ST.Strict
import Control.Concurrent
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Control.Exception
import Control.Exception.Base
import Control.Monad
import Control.Monad.Fix
import Control.Monad.Zip
import Data.Bits
import Data.Bool
import Data.Char
import Data.Complex
import Data.Either
import Data.Eq
import Data.Fixed
import Data.Foldable
import Data.Function
import Data.Functor
import Data.IORef
import Data.Int
import Data.Ix
import Data.List
import Data.Maybe
import Data.Monoid
import Data.Ord
import Data.Ratio
import Data.String
import Data.Traversable
import Data.Tuple
import Data.Typeable
import Data.Unique
import Data.Version
import Data.Word
import Data.STRef
import Data.STRef.Lazy
import Data.STRef.Strict
-- import Debug.Trace
import Foreign.Concurrent
-- import Foreign.ForeignPtr
import Foreign.Ptr
import Foreign.StablePtr
import Foreign.Storable
import Foreign.C
import Foreign.C.Error
import Foreign.C.String
import Foreign.C.Types
-- import Foreign.Marshal
import Foreign.Marshal.Alloc
import Foreign.Marshal.Array
import Foreign.Marshal.Error
import Foreign.Marshal.Pool
import Foreign.Marshal.Utils
import System.CPUTime
import System.Environment
import System.Exit
import System.Info
import System.Mem
import System.Timeout
import System.Console.GetOpt
import System.IO
import System.IO.Error
import System.Mem.StableName
import System.Mem.Weak
import System.Posix.Internals
import System.Posix.Types
import Text.Printf
import Text.Read
import Text.Read.Lex
import Text.Show
import Text.Show.Functions
import Text.ParserCombinators.ReadP
import Text.ParserCombinators.ReadPrec
-- import Unsafe.Coerce
f :: Int
f = 2
main :: IO ()
main = putStrLn $ "X is: " ++ show f
| frantisekfarka/ghc-dsi | testsuite/tests/safeHaskell/unsafeLibs/GoodImport03.hs | bsd-3-clause | 2,028 | 0 | 6 | 235 | 441 | 277 | 164 | 80 | 1 |
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.Parser.Comments
-- Copyright : (c) Phil Freeman 2015
-- License : MIT
--
-- Maintainer : Phil Freeman <[email protected]>
-- Stability : experimental
-- Portability :
--
-- |
-- Defines the types of source code comments
--
-----------------------------------------------------------------------------
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
module Language.PureScript.Comments where
import Data.Aeson.TH
import qualified Data.Data as D
data Comment
= LineComment String
| BlockComment String
deriving (Show, Read, Eq, Ord, D.Data, D.Typeable)
$(deriveJSON (defaultOptions { sumEncoding = ObjectWithSingleField }) ''Comment)
| michaelficarra/purescript | src/Language/PureScript/Comments.hs | mit | 795 | 0 | 10 | 115 | 107 | 68 | 39 | 10 | 0 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UndecidableInstances #-}
module Betfair.APING.Types.ErrorData
( ErrorData(..)
) where
import Betfair.APING.Types.APINGException (APINGException)
import Data.Aeson.TH (Options (fieldLabelModifier, omitNothingFields),
defaultOptions, deriveJSON)
import Protolude
import Text.PrettyPrint.GenericPretty
data ErrorData = ErrorData
{ exceptionname :: Text
, aPINGException :: APINGException
} deriving (Eq, Read, Show, Generic, Pretty)
-- from http://stackoverflow.com/questions/30696089/how-to-handle-capital-case-in-json
$(deriveJSON
defaultOptions
{ omitNothingFields = True
, fieldLabelModifier =
let f "aPINGException" = "APINGException"
f other = other
in f
}
''ErrorData)
| joe9/betfair-api | src/Betfair/APING/Types/ErrorData.hs | mit | 1,052 | 0 | 14 | 258 | 163 | 100 | 63 | 26 | 0 |
module Guguk where
| joom/Guguk | src/Guguk.hs | mit | 19 | 0 | 2 | 3 | 4 | 3 | 1 | 1 | 0 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGLength
(newValueSpecifiedUnits, convertToSpecifiedUnits,
pattern SVG_LENGTHTYPE_UNKNOWN, pattern SVG_LENGTHTYPE_NUMBER,
pattern SVG_LENGTHTYPE_PERCENTAGE, pattern SVG_LENGTHTYPE_EMS,
pattern SVG_LENGTHTYPE_EXS, pattern SVG_LENGTHTYPE_PX,
pattern SVG_LENGTHTYPE_CM, pattern SVG_LENGTHTYPE_MM,
pattern SVG_LENGTHTYPE_IN, pattern SVG_LENGTHTYPE_PT,
pattern SVG_LENGTHTYPE_PC, getUnitType, setValue, getValue,
setValueInSpecifiedUnits, getValueInSpecifiedUnits,
setValueAsString, getValueAsString, SVGLength(..), gTypeSVGLength)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLength.newValueSpecifiedUnits Mozilla SVGLength.newValueSpecifiedUnits documentation>
newValueSpecifiedUnits ::
(MonadDOM m) => SVGLength -> Word -> Float -> m ()
newValueSpecifiedUnits self unitType valueInSpecifiedUnits
= liftDOM
(void
(self ^. jsf "newValueSpecifiedUnits"
[toJSVal unitType, toJSVal valueInSpecifiedUnits]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLength.convertToSpecifiedUnits Mozilla SVGLength.convertToSpecifiedUnits documentation>
convertToSpecifiedUnits ::
(MonadDOM m) => SVGLength -> Word -> m ()
convertToSpecifiedUnits self unitType
= liftDOM
(void (self ^. jsf "convertToSpecifiedUnits" [toJSVal unitType]))
pattern SVG_LENGTHTYPE_UNKNOWN = 0
pattern SVG_LENGTHTYPE_NUMBER = 1
pattern SVG_LENGTHTYPE_PERCENTAGE = 2
pattern SVG_LENGTHTYPE_EMS = 3
pattern SVG_LENGTHTYPE_EXS = 4
pattern SVG_LENGTHTYPE_PX = 5
pattern SVG_LENGTHTYPE_CM = 6
pattern SVG_LENGTHTYPE_MM = 7
pattern SVG_LENGTHTYPE_IN = 8
pattern SVG_LENGTHTYPE_PT = 9
pattern SVG_LENGTHTYPE_PC = 10
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLength.unitType Mozilla SVGLength.unitType documentation>
getUnitType :: (MonadDOM m) => SVGLength -> m Word
getUnitType self
= liftDOM (round <$> ((self ^. js "unitType") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLength.value Mozilla SVGLength.value documentation>
setValue :: (MonadDOM m) => SVGLength -> Float -> m ()
setValue self val = liftDOM (self ^. jss "value" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLength.value Mozilla SVGLength.value documentation>
getValue :: (MonadDOM m) => SVGLength -> m Float
getValue self
= liftDOM (realToFrac <$> ((self ^. js "value") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLength.valueInSpecifiedUnits Mozilla SVGLength.valueInSpecifiedUnits documentation>
setValueInSpecifiedUnits ::
(MonadDOM m) => SVGLength -> Float -> m ()
setValueInSpecifiedUnits self val
= liftDOM (self ^. jss "valueInSpecifiedUnits" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLength.valueInSpecifiedUnits Mozilla SVGLength.valueInSpecifiedUnits documentation>
getValueInSpecifiedUnits :: (MonadDOM m) => SVGLength -> m Float
getValueInSpecifiedUnits self
= liftDOM
(realToFrac <$>
((self ^. js "valueInSpecifiedUnits") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLength.valueAsString Mozilla SVGLength.valueAsString documentation>
setValueAsString ::
(MonadDOM m, ToJSString val) => SVGLength -> val -> m ()
setValueAsString self val
= liftDOM (self ^. jss "valueAsString" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGLength.valueAsString Mozilla SVGLength.valueAsString documentation>
getValueAsString ::
(MonadDOM m, FromJSString result) => SVGLength -> m result
getValueAsString self
= liftDOM ((self ^. js "valueAsString") >>= fromJSValUnchecked)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SVGLength.hs | mit | 4,720 | 0 | 12 | 699 | 1,014 | 575 | 439 | 74 | 1 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.WebGLActiveInfo (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.WebGLActiveInfo
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.WebGLActiveInfo
#else
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/WebGLActiveInfo.hs | mit | 358 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
import MyTree
--class Eq a where
-- (==) :: a -> a -> Bool
-- (/=) :: a -> a -> Bool
-- x == y = not (x /= y)
-- x /= y = not (x == y)
data TrafficLight = Red | Yellow | Green
instance Eq TrafficLight where
Red == Red = True
Yellow == Yellow = True
Green == Green = True
_ == _ = False
instance Show TrafficLight where
show Red = "Red Light"
show Yellow = "Yellow Light"
show Green = "Green Light"
--instance Eq m => Eq (Maybe m) where
-- Just x == Just y = x == y
-- Nothing == Nothing = True
-- _ == _ = False
class YesNo a where
yesno :: a -> Bool
instance YesNo Int where
yesno 0 = False
yesno _ = True
instance YesNo [a] where
yesno [] = False
yesno _ = True
instance YesNo Bool where
yesno = id
instance YesNo (Maybe a) where
yesno Nothing = False
yesno (Just _) = True
instance YesNo (Tree a) where
yesno EmptyTree = False
yesno (Node _ _ _) = True
instance YesNo TrafficLight where
yesno Red = False
yesno Green = True
yesnoIf :: (YesNo y) => y -> a -> a -> a
yesnoIf yesnoVal yesResult noResult = if yesno yesnoVal then yesResult else noResult
instance Functor Tree where
fmap _ EmptyTree = EmptyTree
fmap f (Node x l r) = Node (f x) (fmap f l) (fmap f r)
--instance Functor (Either a) where
-- fmap f (Right x) = Right (f x)
-- fmap f (Left x) = Left x
--class Functor f where
-- fmap :: (a->b) -> f a -> f b
class Tofu t where
tofu :: j a -> t a j
data Frank a b = Frank {frankField :: b a} deriving | RAFIRAF/HASKELL | Eq.hs | mit | 1,486 | 1 | 9 | 386 | 463 | 241 | 222 | -1 | -1 |
module ThreaseBench (benchmarks) where
import Criterion
benchmarks :: [Benchmark]
benchmarks = []
| tfausak/threase | benchmark/ThreaseBench.hs | mit | 110 | 0 | 5 | 24 | 27 | 17 | 10 | 4 | 1 |
module Y2017.M03.D31.Exercise where
import Data.Array
import Data.Map (Map)
-- below import available via 1HaskellADay git repository
import Control.Scan.CSV
import Data.Percentage
{--
Take a look at Fig. 2.6 from the Road Safety Web Publication No. 16
Relationship between Speed and Risk of Fatal Injury: Pedestrians and Car
Occupants at the URL:
http://nacto.org/docs/usdg/relationship_between_speed_risk_fatal_injury_pedestrians_and_car_occupants_richards.pdf
(fig 2.6 is also screen-capped at this directory)
Okay, fine, you're doing a study of slight and severe injuries as well as
fatalities based on these data collected, however, you need to do your reports
in m/s, that is: meters per second, instead of in mph or miles per hour.
Today's Haskell problem. Write a mph2mps converter. Research the conversions
as needed
--}
type MPH = Int
mph2mps :: MPH -> Float
mph2mps milesperhour = undefined
-- Now, from the chart, extract the data points you need to answer the below.
data Impact = Slight | Serious | Fatal deriving (Eq, Ord, Show)
type Chart = Map Impact (Array MPH Percentage)
readChart :: FilePath -> IO Chart
readChart file = undefined
-- The function chart translates the slight/severe/fatal data sets to arrays
-- The data are located at this directory as impact-data.csv
-- 1. Between which to mph speeds do most severe injuries occur? What does that
-- translate to in m/s?
severe :: Chart -> (MPH, MPH)
severe impactData = undefined
-- 2. same question for fatalities
fatalities :: Chart -> (MPH, MPH)
fatalities impactData = undefined
-- 3. When do slight injuries make a prominent appearance when cars collide
-- with pedestrians?
slights :: Chart -> (MPH, MPH)
slights impactData = undefined
-- Be safe out there, Haskellers!
| geophf/1HaskellADay | exercises/HAD/Y2017/M03/D31/Exercise.hs | mit | 1,775 | 0 | 7 | 296 | 204 | 121 | 83 | 18 | 1 |
{-# LANGUAGE BangPatterns #-}
import Graphics.Gloss.Raster.Array
import System.Environment
import Data.Array.Repa.Algorithms.Randomish
import Data.Array.Repa as R
import Data.List
import Data.Bits
import Prelude as P
main :: IO ()
main
= do args <- getArgs
case args of
[] -> run 800 600 4 4
[sizeX, sizeY, scaleX, scaleY]
-> run (read sizeX) (read sizeY) (read scaleX) (read scaleY)
_ -> putStr $ unlines
[ "gloss-snow <sizeX::Int> <sizeY::Int> <scaleX::Int> <scaleY::Int>"
, " sizeX, sizeY - visualisation size (default 800, 600)"
, " scaleX, scaleY - pixel scaling factor (default 4, 4)" ]
run :: Int -> Int -> Int -> Int -> IO ()
run windowX windowY scaleX scaleY
= do
let !sizeX = windowX `div` scaleX
let !sizeY = windowY `div` scaleY
let frame time
= let seed1 = truncate (time * 10000)
arr1 = randomishIntArray (Z :. sizeY :. sizeX) 0 255 seed1
seed2 = truncate ((time * time) * 100)
arr2 = randomishIntArray (Z :. sizeY :. sizeX) 0 255 seed2
makePixel i j
= let x = i + j
x' = x .&. 0x0ff
in rgb8 x' x' x'
in R.zipWith makePixel arr1 arr2
animateArray
(InWindow "Digital Snow" (windowX, windowY) (10, 10))
(scaleX, scaleY)
frame
| gscalzo/HaskellTheHardWay | gloss-try/gloss-master/gloss-examples/raster/Snow/Main.hs | mit | 1,590 | 0 | 18 | 649 | 431 | 226 | 205 | 38 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Test.Hspec
import qualified Data.ByteString.Lazy as L
import Codec.Xlsx (toXlsx)
import Control.Lens
import Applicant
main :: IO ()
main = hspec $ do
describe "processSheet" $ do
let testExcelFile = "test/xlsx-test.xlsx"
(sheetsA :: IO [ApplicantSheet]) = do
bs <- L.readFile testExcelFile
return $ processXlsx (toXlsx bs)
it "loads both sheets" $ do
sheets <- sheetsA
length sheets `shouldBe` 2
it "takes the header row" $ do
sheets <- sheetsA
let (headersURLs :: Maybe [String]) = do
firstSheet <- sheets ^? ix 0
headers <- firstSheet ^. applicants ^? ix 0
return $ headers ^. downloadURLs
headersURLs `shouldBe` Just []
it "gets monteverdi" $ do
sheets <- sheetsA
let monteverdiURL :: Maybe String = do
sheet <- sheets ^? ix 1 -- note: sheets appear to be alphabeticized
monteverdi <- sheet ^. applicants ^? ix 1
monteverdi ^. downloadURLs ^? ix 0
monteverdiURL `shouldBe` Just "http://www.yellowbarn.org/mv1.doc"
it "gets the right name" $ do
sheets <- sheetsA
let monteverdiName :: Maybe String = do
sheet <- sheets ^? ix 1 -- note: sheets appear to be alphabeticized
monteverdi <- sheet ^. applicants ^? ix 1
monteverdi ^. name
monteverdiName `shouldBe` Just "Monteverdi"
| mjhoy/ybapp | test/Test.hs | mit | 1,472 | 0 | 21 | 429 | 427 | 201 | 226 | 38 | 1 |
-- | Module: Trans.CgrToStage1
-- Description: Translate from schema.capnp's codegenerator request to IR.Stage1.
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
module Trans.CgrToStage1 (cgrToCgr) where
import Data.Word
import Data.Function ((&))
import Data.Maybe (mapMaybe)
import Data.Text.Encoding (encodeUtf8)
import GHC.Float (castDoubleToWord64, castFloatToWord32)
import qualified Data.ByteString as BS
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import qualified Data.Vector as V
import Capnp.Fields (HasUnion(..))
import Capnp.New.Classes (toWord)
import Capnp.Repr.Parsed (Parsed)
import qualified Capnp.Gen.Capnp.Schema.New as Schema
import qualified Capnp.New.Basics as B
import qualified IR.Common as C
import qualified IR.Name as Name
import qualified IR.Stage1 as Stage1
type NodeMap v = M.Map Word64 v
nodesToNodes :: NodeMap (Parsed Schema.Node) -> NodeMap Stage1.Node
nodesToNodes inMap = outMap
where
outMap = M.map translate inMap
translate Schema.Node{scopeId, id, nestedNodes, union', parameters} = Stage1.Node
{ nodeCommon = Stage1.NodeCommon
{ nodeId = id
, nodeNested =
[ (Name.UnQ name, node)
| Schema.Node'NestedNode{name, id} <- V.toList nestedNodes
, Just node <- [M.lookup id outMap]
]
, nodeParent =
if scopeId == 0 then
Nothing
else
Just (outMap M.! id)
, nodeParams = V.fromList
[ Name.UnQ name
| Schema.Node'Parameter{name} <- V.toList parameters
]
}
, nodeUnion = case union' of
Schema.Node'enum Schema.Node'enum'{enumerants} ->
Stage1.NodeEnum $ map enumerantToName $ V.toList enumerants
Schema.Node'struct Schema.Node'struct'
{ dataWordCount
, pointerCount
, isGroup
, discriminantOffset
, fields
} ->
Stage1.NodeStruct Stage1.Struct
{ dataWordCount
, pointerCount
, isGroup
, tagOffset = discriminantOffset
, fields = map (fieldToField outMap) (V.toList fields)
}
Schema.Node'interface Schema.Node'interface'{ methods, superclasses } ->
Stage1.NodeInterface Stage1.Interface
{ methods = map (methodToMethod outMap) (V.toList methods)
, supers =
[ C.InterfaceType (outMap M.! id) (brandToBrand outMap brand)
| Schema.Superclass{id, brand} <- V.toList superclasses
]
}
Schema.Node'const Schema.Node'const'{ type_ = Schema.Type type_, value = Schema.Value value } ->
Stage1.NodeConstant $
let mismatch = error "ERROR: Constant's type and value do not agree"
in case value of
Schema.Value'void ->
C.VoidValue
Schema.Value'bool v ->
C.WordValue (C.PrimWord C.PrimBool) (toWord v)
Schema.Value'int8 v ->
C.WordValue (C.PrimWord $ C.PrimInt $ C.IntType C.Signed C.Sz8) (toWord v)
Schema.Value'int16 v ->
C.WordValue (C.PrimWord $ C.PrimInt $ C.IntType C.Signed C.Sz16) (toWord v)
Schema.Value'int32 v ->
C.WordValue (C.PrimWord $ C.PrimInt $ C.IntType C.Signed C.Sz32) (toWord v)
Schema.Value'int64 v ->
C.WordValue (C.PrimWord $ C.PrimInt $ C.IntType C.Signed C.Sz64) (toWord v)
Schema.Value'uint8 v ->
C.WordValue (C.PrimWord $ C.PrimInt $ C.IntType C.Unsigned C.Sz8) (toWord v)
Schema.Value'uint16 v ->
C.WordValue (C.PrimWord $ C.PrimInt $ C.IntType C.Unsigned C.Sz16) (toWord v)
Schema.Value'uint32 v ->
C.WordValue (C.PrimWord $ C.PrimInt $ C.IntType C.Unsigned C.Sz32) (toWord v)
Schema.Value'uint64 v ->
C.WordValue (C.PrimWord $ C.PrimInt $ C.IntType C.Unsigned C.Sz64) (toWord v)
Schema.Value'float32 v ->
C.WordValue (C.PrimWord C.PrimFloat32) (toWord v)
Schema.Value'float64 v ->
C.WordValue (C.PrimWord C.PrimFloat64) (toWord v)
Schema.Value'text v ->
C.PtrValue (C.PrimPtr C.PrimText) $ Just $ B.PtrList $ B.List8 $
encodeUtf8 v
& BS.unpack
& (++ [0])
& V.fromList
Schema.Value'data_ v ->
C.PtrValue (C.PrimPtr C.PrimText) $ Just $ B.PtrList $ B.List8 $
BS.unpack v
& V.fromList
Schema.Value'list v ->
case type_ of
Schema.Type'list (Schema.Type'list' (Schema.Type elementType)) ->
C.PtrValue
(C.ListOf (typeToType outMap elementType))
v
_ ->
mismatch
Schema.Value'enum v ->
case type_ of
-- TODO: brand
Schema.Type'enum Schema.Type'enum'{ typeId } ->
C.WordValue (C.EnumType (outMap M.! typeId)) (toWord v)
_ ->
mismatch
Schema.Value'struct v ->
case type_ of
-- TODO: brand
Schema.Type'struct Schema.Type'struct'{ typeId, brand } -> C.PtrValue
(C.PtrComposite $ C.StructType
(outMap M.! typeId)
(brandToBrand outMap brand)
)
v
_ ->
mismatch
Schema.Value'interface ->
case type_ of
Schema.Type'interface Schema.Type'interface'{ typeId, brand } ->
C.PtrValue
(C.PtrInterface (C.InterfaceType (outMap M.! typeId) (brandToBrand outMap brand)))
Nothing
_ ->
mismatch
Schema.Value'anyPointer v ->
C.PtrValue (C.PrimPtr (C.PrimAnyPtr C.Ptr)) v
Schema.Value'unknown' tag ->
error $ "Unknown variant for Value #" ++ show tag
_ ->
Stage1.NodeOther
}
brandToBrand :: NodeMap Stage1.Node -> Parsed Schema.Brand -> Stage1.Brand
brandToBrand nodeMap Schema.Brand{scopes} =
C.MapBrand $ M.fromList $ mapMaybe scopeToScope (V.toList scopes)
where
scopeToScope Schema.Brand'Scope{scopeId, union'} = case union' of
Schema.Brand'Scope'unknown' _ -> Nothing
Schema.Brand'Scope'inherit -> Nothing
Schema.Brand'Scope'bind bindings -> Just
( scopeId
, C.Bind $ bindings
& V.map (\(Schema.Brand'Binding b) -> case b of
Schema.Brand'Binding'type_ (Schema.Type typ) ->
case typeToType nodeMap typ of
C.PtrType t ->
C.BoundType t
C.CompositeType t ->
C.BoundType (C.PtrComposite t)
_ -> error
"Invalid schema: a type parameter was set to a non-pointer type."
Schema.Brand'Binding'unbound -> C.Unbound
Schema.Brand'Binding'unknown' _ -> C.Unbound
)
)
methodToMethod :: NodeMap Stage1.Node -> Parsed Schema.Method -> Stage1.Method
methodToMethod nodeMap Schema.Method
{ name
, paramStructType, paramBrand
, resultStructType, resultBrand
} =
Stage1.Method
{ name = Name.UnQ name
, paramType = structTypeToType nodeMap paramStructType paramBrand
, resultType = structTypeToType nodeMap resultStructType resultBrand
}
enumerantToName :: Parsed Schema.Enumerant -> Name.UnQ
enumerantToName Schema.Enumerant{name} = Name.UnQ name
fieldToField :: NodeMap Stage1.Node -> Parsed Schema.Field -> Stage1.Field
fieldToField nodeMap Schema.Field{name, discriminantValue, union'} =
Stage1.Field
{ name = Name.UnQ name
, tag =
if discriminantValue == Schema.field'noDiscriminant then
Nothing
else
Just discriminantValue
, locType = getFieldLocType nodeMap union'
}
getFieldLocType :: NodeMap Stage1.Node -> Parsed (Which Schema.Field) -> C.FieldLocType Stage1.Brand Stage1.Node
getFieldLocType nodeMap = \case
Schema.Field'slot Schema.Field'slot'
{ type_ = Schema.Type type_
, defaultValue = Schema.Value defaultValue
, offset
} ->
case typeToType nodeMap type_ of
C.VoidType ->
C.VoidField
C.PtrType ty ->
C.PtrField (fromIntegral offset) ty
C.WordType ty ->
case valueBits defaultValue of
Nothing -> error $
"Invlaid schema: a field in a struct's data section " ++
"had an illegal (non-data) default value."
Just defaultVal ->
C.DataField
(dataLoc offset ty defaultVal)
ty
C.CompositeType ty ->
C.PtrField (fromIntegral offset) (C.PtrComposite ty)
Schema.Field'group Schema.Field'group'{typeId} ->
C.HereField $ C.StructType
(nodeMap M.! typeId)
(C.MapBrand M.empty) -- groups are always monomorphic
Schema.Field'unknown' _ ->
-- Don't know how to interpret this; we'll have to leave the argument
-- opaque.
C.VoidField
-- | Given the offset field from the capnp schema, a type, and a
-- default value, return a DataLoc describing the location of a field.
dataLoc :: Word32 -> C.WordType Stage1.Node -> Word64 -> C.DataLoc
dataLoc offset ty defaultVal =
let bitsOffset = fromIntegral offset * C.dataFieldSize ty
in C.DataLoc
{ dataIdx = bitsOffset `div` 64
, dataOff = bitsOffset `mod` 64
, dataDef = defaultVal
}
-- | Return the raw bit-level representation of a value that is stored
-- in a struct's data section.
--
-- returns Nothing if the value is a non-word type.
valueBits :: Parsed (Which Schema.Value) -> Maybe Word64
valueBits = \case
Schema.Value'bool b -> Just $ fromIntegral $ fromEnum b
Schema.Value'int8 n -> Just $ fromIntegral n
Schema.Value'int16 n -> Just $ fromIntegral n
Schema.Value'int32 n -> Just $ fromIntegral n
Schema.Value'int64 n -> Just $ fromIntegral n
Schema.Value'uint8 n -> Just $ fromIntegral n
Schema.Value'uint16 n -> Just $ fromIntegral n
Schema.Value'uint32 n -> Just $ fromIntegral n
Schema.Value'uint64 n -> Just n
Schema.Value'float32 n -> Just $ fromIntegral $ castFloatToWord32 n
Schema.Value'float64 n -> Just $ castDoubleToWord64 n
Schema.Value'enum n -> Just $ fromIntegral n
_ -> Nothing -- some non-word type.
reqFileToReqFile :: NodeMap Stage1.Node -> Parsed Schema.CodeGeneratorRequest'RequestedFile -> Stage1.ReqFile
reqFileToReqFile nodeMap Schema.CodeGeneratorRequest'RequestedFile{id, filename} =
let Stage1.Node{nodeCommon=Stage1.NodeCommon{nodeNested}} = nodeMap M.! id
in Stage1.ReqFile
{ fileName = T.unpack filename
, file = Stage1.File
{ fileNodes = nodeNested
, fileId = id
}
}
cgrToCgr :: Parsed Schema.CodeGeneratorRequest -> Stage1.CodeGenReq
cgrToCgr Schema.CodeGeneratorRequest{nodes, requestedFiles} =
Stage1.CodeGenReq{allFiles, reqFiles}
where
nodeMap = nodesToNodes $ M.fromList [(id, node) | [email protected]{id} <- V.toList nodes]
reqFiles = map (reqFileToReqFile nodeMap) $ V.toList requestedFiles
allFiles =
[ let fileNodes =
[ (Name.UnQ name, nodeMap M.! id)
| Schema.Node'NestedNode{name, id} <- V.toList nestedNodes
-- If the file is an import (i.e. not part of requestedFiles), then
-- the code generator will sometimes omit parts of it that are not
-- used. We need to check that the nestedNodes are actually included;
-- if not, we omit them from the otuput as well.
, M.member id nodeMap
]
in
Stage1.File{fileId, fileNodes}
| Schema.Node{union'=Schema.Node'file, id=fileId, nestedNodes} <- V.toList nodes
]
structTypeToType
:: NodeMap Stage1.Node
-> Word64
-> Parsed Schema.Brand
-> C.CompositeType Stage1.Brand Stage1.Node
structTypeToType nodeMap typeId brand =
C.StructType (nodeMap M.! typeId) (brandToBrand nodeMap brand)
typeToType :: NodeMap Stage1.Node -> Parsed (Which Schema.Type) -> C.Type Stage1.Brand Stage1.Node
typeToType nodeMap = \case
Schema.Type'void -> C.VoidType
Schema.Type'bool -> C.WordType $ C.PrimWord C.PrimBool
Schema.Type'int8 -> C.WordType $ C.PrimWord $ C.PrimInt $ C.IntType C.Signed C.Sz8
Schema.Type'int16 -> C.WordType $ C.PrimWord $ C.PrimInt $ C.IntType C.Signed C.Sz16
Schema.Type'int32 -> C.WordType $ C.PrimWord $ C.PrimInt $ C.IntType C.Signed C.Sz32
Schema.Type'int64 -> C.WordType $ C.PrimWord $ C.PrimInt $ C.IntType C.Signed C.Sz64
Schema.Type'uint8 -> C.WordType $ C.PrimWord $ C.PrimInt $ C.IntType C.Unsigned C.Sz8
Schema.Type'uint16 -> C.WordType $ C.PrimWord $ C.PrimInt $ C.IntType C.Unsigned C.Sz16
Schema.Type'uint32 -> C.WordType $ C.PrimWord $ C.PrimInt $ C.IntType C.Unsigned C.Sz32
Schema.Type'uint64 -> C.WordType $ C.PrimWord $ C.PrimInt $ C.IntType C.Unsigned C.Sz64
Schema.Type'float32 -> C.WordType $ C.PrimWord C.PrimFloat32
Schema.Type'float64 -> C.WordType $ C.PrimWord C.PrimFloat64
Schema.Type'text -> C.PtrType $ C.PrimPtr C.PrimText
Schema.Type'data_ -> C.PtrType $ C.PrimPtr C.PrimData
Schema.Type'list Schema.Type'list'{elementType = Schema.Type t} ->
C.PtrType $ C.ListOf (typeToType nodeMap t)
-- nb. enum has a brand field, but it's not actually use for anything.
Schema.Type'enum Schema.Type'enum'{typeId, brand = _ } ->
C.WordType $ C.EnumType $ nodeMap M.! typeId
-- TODO: use 'brand' to generate type parameters.
Schema.Type'struct Schema.Type'struct'{typeId, brand} ->
C.CompositeType $ structTypeToType nodeMap typeId brand
Schema.Type'interface Schema.Type'interface'{typeId, brand} ->
C.PtrType $ C.PtrInterface (C.InterfaceType (nodeMap M.! typeId) (brandToBrand nodeMap brand))
Schema.Type'anyPointer (Schema.Type'anyPointer' p) ->
case p of
Schema.Type'anyPointer'parameter Schema.Type'anyPointer'parameter'{scopeId, parameterIndex} ->
let paramScope = nodeMap M.! scopeId in
C.PtrType $ C.PtrParam C.TypeParamRef
{ paramScope
, paramIndex = fromIntegral parameterIndex
, paramName = Stage1.nodeParams (Stage1.nodeCommon paramScope) V.! fromIntegral parameterIndex
}
Schema.Type'anyPointer'unconstrained
(Schema.Type'anyPointer'unconstrained' unconstrained) ->
C.PtrType $ C.PrimPtr $ C.PrimAnyPtr $ case unconstrained of
Schema.Type'anyPointer'unconstrained'anyKind -> C.Ptr
Schema.Type'anyPointer'unconstrained'struct -> C.Struct
Schema.Type'anyPointer'unconstrained'list -> C.List
Schema.Type'anyPointer'unconstrained'capability -> C.Cap
Schema.Type'anyPointer'unconstrained'unknown' _ -> C.Ptr
-- ^ Something we don't know about; assume it could be anything.
_ -> C.VoidType -- TODO: implicitMethodParameter
_ -> C.VoidType -- TODO: constrained anyPointers
| zenhack/haskell-capnp | cmd/capnpc-haskell/Trans/CgrToStage1.hs | mit | 17,254 | 0 | 28 | 6,242 | 4,199 | 2,125 | 2,074 | 297 | 29 |
-----------------------------------------------------------------------------
-- |
-- Module : Distributed.Data.Fifo
-- Copyright : (c) Phil Hargett 2014
-- License : MIT (see LICENSE file)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- (..... module description .....)
--
-----------------------------------------------------------------------------
module Data.Fifo (
Fifo,
empty,
enqueue,
enqueueAll,
dequeue,
dequeueAll,
size
) where
-- local imports
-- external imports
import Data.Serialize
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
data Fifo v = Fifo {
fifoLeft :: [v],
fifoRight :: [v]
} deriving (Eq,Show)
instance (Serialize v) => Serialize (Fifo v) where
get = do
left <- get
right <- get
return $ Fifo left right
put (Fifo left right) = do
put left
put right
empty :: Fifo v
empty = Fifo [] []
enqueue :: Fifo v -> v -> Fifo v
enqueue fifo value = fifo {
fifoLeft = (value:(fifoLeft fifo))
}
enqueueAll :: Fifo v -> [v] -> Fifo v
enqueueAll fifo values = foldl enqueue fifo values
dequeue :: Fifo v -> (Maybe v,Fifo v)
dequeue fifo =
if null $ fifoRight fifo
then if null $ fifoLeft fifo
then (Nothing,fifo)
else let (value:rest) = reverse $ fifoLeft fifo
in (Just value,fifo {fifoLeft = [],fifoRight = rest})
else let (value:rest) = fifoRight fifo
in (Just value,fifo {fifoRight = rest})
dequeueAll :: Fifo v -> ([v],Fifo v)
dequeueAll fifo = ( (fifoRight fifo) ++ (reverse $ fifoLeft fifo),empty)
size :: Fifo v -> Int
size fifo = (length $ fifoLeft fifo) + (length $ fifoRight fifo) | hargettp/distributed-containers | src/Data/Fifo.hs | mit | 1,902 | 0 | 12 | 456 | 543 | 293 | 250 | 41 | 3 |
module AyaScript.Types where
type Program = [Stmt]
data Stmt = Expr Expr
| Decl Expr Expr
| Assign Expr Expr
deriving (Show, Eq)
data Expr = Natural Integer
| Str String
| UnaryOp String Expr
| BinOp String Expr Expr
| Var String
| Fun String Expr
| If Expr Expr Expr
| Tuple [Expr]
| List [Expr]
deriving (Show, Eq)
| AyaMorisawa/aya-script | src/AyaScript/Types.hs | mit | 424 | 0 | 7 | 168 | 127 | 74 | 53 | 16 | 0 |
{-# htermination lookupFM :: FiniteMap Float b -> Float -> Maybe b #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_lookupFM_6.hs | mit | 88 | 0 | 3 | 15 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Rewriting.SRS.Apply where
import qualified Rewriting.Apply as A
import Rewriting.Derive.Instance
import Rewriting.SRS.Raw
import Rewriting.SRS.Step
import Rewriting.SRS.Steps
import Autolib.TES.Identifier
import Autolib.Reporter
import Autolib.ToDoc
import Autolib.Reader
import Control.Monad
import Data.Typeable
data For_SRS = For_SRS
deriving ( Eq, Ord, Typeable )
$(derives [makeReader, makeToDoc ] [''For_SRS])
instance A.Apply For_SRS ( SRS Identifier )
[ Identifier ]
( Step Identifier ) where
example tag = Instance
{ system = Rewriting.SRS.Raw.example
, from = read "[a,a,b,b]"
, to = read "[b,b,a,a]"
}
apply tag system object action = do
exec system object action
actions tag system object =
steps system object
-- local variables:
-- mode: haskell
-- end:
| florianpilz/autotool | src/Rewriting/SRS/Apply.hs | gpl-2.0 | 933 | 0 | 9 | 233 | 229 | 131 | 98 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, DataKinds #-}
module PostResponse where
import Text.Blaze ((!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Happstack.Server
import MakeElements
import MasterTemplate
import Scripts
postResponse :: ServerPart Response
postResponse =
ok $ toResponse $
masterTemplate "Courseography - Check My POSt!"
[H.meta ! A.name "keywords"
! A.content "",
postLinks
]
(do header "post"
checkPost
)
postScripts
checkPost :: H.Html
checkPost =
H.html $ do
H.head $
H.title "Check My Post!"
H.nav ! A.id "posts" $ H.ul $ do
H.li ! A.id "specialist" $ do
H.a ! A.href "" $ "Specialist"
H.div ! A.id "spec_creds" $ "(0/12.0)"
H.li ! A.id "major" $ do
H.a ! A.href "" $ "Major"
H.div ! A.id "maj_creds" $ "(0/8.0)"
H.li ! A.id "minor" $ do
H.a ! A.href "" $ "Minor"
H.div ! A.id "min_creds" $ "(0/4.0)"
H.div ! A.id "button_wrapper" $
H.button ! A.id "update" $ "Update POSts"
H.div ! A.id "div_specialist" $ do
H.h2 "First Year"
H.div ! A.id "spec_csc108" $ do
H.p ! A.class_ "code" $ "CSC108H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC108" $ "CSC108H (Introduction to Computer Programming)"
H.div ! A.id "spec_csc148" $ do
H.p ! A.class_ "code" $ "CSC148H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC148" $ "CSC148H (Introduction to Computer Science)"
H.div ! A.id "spec_csc165240" $ do
H.p ! A.class_ "code" $ "CSC165H or CSC240H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name CSC165240" $ "CSC165H (Mathematical Expression and Reasoning for Computer Science)"
H.p ! A.class_ "full_name CSC165240" $ "CSC240H (Enriched Introduction to the Theory of Computation)"
H.div ! A.id "spec_mat135136137157calc1" $ do
H.p ! A.class_ "code" $ "(MAT135H and MAT136H) or MAT137Y or MAT157Y"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name MAT135136137157Calc1" $ "MAT135H (Calculus 1(A))"
H.p ! A.class_ "full_name MAT135136137157Calc1" $ "MAT136H (Calculus 1(B))"
H.p ! A.class_ "full_name MAT135136137157Calc1" $ "MAT137Y (Calculus)"
H.p ! A.class_ "full_name MAT135136137157Calc1" $ "MAT157Y (Analysis 1)"
H.h2 "Second Year"
H.div ! A.id "spec_csc207" $ do
H.p ! A.class_ "code" $ "CSC207H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC207" $ "CSC207H (Software Design)"
H.div ! A.id "spec_csc209" $ do
H.p ! A.class_ "code" $ "CSC209H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC209" $ "CSC209H (Software Tools And System Programming)"
H.div ! A.id "spec_csc258" $ do
H.p ! A.class_ "code" $ "CSC258H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC258" $ "CSC258H (Computer Organization)"
H.div ! A.id "spec_csc236240" $ do
H.p ! A.class_ "code" $ "CSC236H or CSC240H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name CSC236240" $ "CSC236H (Introduction to the Theory of Computation)"
H.p ! A.class_ "full_name CSC236240" $ "CSC240H (Enriched Introduction to the Theory of Computation)"
H.div ! A.id "spec_csc263265" $ do
H.p ! A.class_ "code" $ "CSC263H or CSC265H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name CSC263265" $ "CSC263H (Data Structures and Analysis)"
H.p ! A.class_ "full_name CSC263265" $ "CSC265H (Enriched Data Structures and Analysis)"
H.div ! A.id "spec_mat221223240lin1" $ do
H.p ! A.class_ "code" $ "MAT221H or MAT223H or MAT240H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name MAT221223240Lin1" $ "MAT221H (Applied Linear Algebra)"
H.p ! A.class_ "full_name MAT221223240Lin1" $ "MAT223H (Linear Algebra 1)"
H.p ! A.class_ "full_name MAT221223240Lin1" $ "MAT240H (Algebra 1)"
H.div ! A.id "spec_sta247255257sta1" $ do
H.p ! A.class_ "code" $ "STA247H or STA255H or STA257H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name STA247255257Sta1" $ "STA247H (Probability with Computer Applications)"
H.p ! A.class_ "full_name STA247255257Sta1" $ "STA255H (Statistical Analysis)"
H.p ! A.class_ "full_name STA247255257Sta1" $ "STA257H (Probability and Statistics 1)"
H.h2 "Later Years"
H.div ! A.id "spec_csc369" $ do
H.p ! A.class_ "code" $ "CSC369H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC369" $ "CSC369H (Operating Systems)"
H.div ! A.id "spec_csc373" $ do
H.p ! A.class_ "code" $ "CSC373H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC373" $ "CSC373H (Algorithm Design, Analysis, and Complexity)"
H.div ! A.id "spec_400" $ do
H.p ! A.class_ "code" $ "Any 400-level CSC course, BCB410H, BCB420H, BCB430Y, ECE489H (1.5 FCEs)"
H.div ! A.id "spec400" ! A.class_ "more-info" $ do
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.div ! A.id "spec_300" $ do
H.p ! A.class_ "code" $ "Any 300+ level CSC course, BCB410H, BCB420H, BCB430Y, ECE385H, ECE489H (1.5 FCEs)"
H.div ! A.id "spec300" ! A.class_ "more-info" $ do
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.div ! A.id "spec_extra" $ do
H.p ! A.class_ "code" $ "Any 300+ level CSC course, BCB/ECE/MAT/STA course (2.0 FCEs) - \
\MAT: 224, 235/237/257, any 300+ except for 329, 390, & 391 \
\; STA: 248, 261, any 300+"
H.div ! A.id "specextra" ! A.class_ "more-info" $ do
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.div ! A.id "spec_misc" $ do
H.p ! A.class_ "code" $ H.em $ "Any from this list: CSC301H, CSC318H, CSC404H, CSC411H, CSC418H, CSC420H, \
\CSC428H, CSC454H, CSC485H, CSC490H, CSC491H, CSC494H, or PEY (0.5 FCEs) \
\ ** Note: Type 'PEY' for Check my POSt to recognize it **"
H.div ! A.class_ "more-info" $
H.input ! A.type_ "text"
H.h3 "Notes"
H.div ! A.id "notes" $
H.p "- No more than 1.0 FCE from CSC490H1, CSC491H1, CSC494H1, CSC495H1, BCB430Y1 may be used to fulfill program requirements"
H.div ! A.id "div_major" $ do
H.h2 "First Year"
H.div ! A.id "maj_csc108" $ do
H.p ! A.class_ "code" $ "CSC108H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC108" $ "CSC108H (Introduction to Computer Programming)"
H.div ! A.id "maj_csc148" $ do
H.p ! A.class_ "code" $ "CSC148H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC148" $ "CSC148H (Introduction to Computer Science)"
H.div ! A.id "maj_csc165240" $ do
H.p ! A.class_ "code" $ "CSC165H or CSC240H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name CSC165240" $ "CSC165H (Mathematical Expression and Reasoning for Computer Science)"
H.p ! A.class_ "full_name CSC165240" $ "CSC240H (Enriched Introduction to the Theory of Computation)"
H.div ! A.id "maj_mat135136137157calc1" $ do
H.p ! A.class_ "code" $ "(MAT135H and MAT136H) or MAT137Y or MAT157Y"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name MAT135136137157Calc1" $ "MAT135H (Calculus 1(A))"
H.p ! A.class_ "full_name MAT135136137157Calc1" $ "MAT136H (Calculus 1(B))"
H.p ! A.class_ "full_name MAT135136137157Calc1" $ "MAT137Y (Calculus)"
H.p ! A.class_ "full_name MAT135136137157Calc1" $ "MAT157Y (Analysis 1)"
H.h2 "Second Year"
H.div ! A.id "maj_csc207" $ do
H.p ! A.class_ "code" $ "CSC207H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC207" $ "CSC207H (Software Design)"
H.div ! A.id "maj_csc258" $ do
H.p ! A.class_ "code" $ "CSC258H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC258" $ "CSC258H (Computer Organization)"
H.div ! A.id "maj_csc236240" $ do
H.p ! A.class_ "code" $ "CSC236H or CSC240H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name CSC236240" $ "CSC236H (Introduction to the Theory of Computation)"
H.p ! A.class_ "full_name CSC236240" $ "CSC240H (Enriched Introduction to the Theory of Computation)"
H.div ! A.id "maj_csc263265" $ do
H.p ! A.class_ "code" $ "CSC263H or CSC265H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name CSC263265" $ "CSC263H (Data Structures and Analysis)"
H.p ! A.class_ "full_name CSC263265" $ "CSC265H (Enriched Data Structures and Analysis)"
H.div ! A.id "maj_sta247255257sta1" $ do
H.p ! A.class_ "code" $ "STA247H or STA255H or STA257H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name STA247255257Sta1" $ "STA247H (Probability with Computer Applications)"
H.p ! A.class_ "full_name STA247255257Sta1" $ "STA255H (Statistical Analysis)"
H.p ! A.class_ "full_name STA247255257Sta1" $ "STA257H (Probability and Statistics 1)"
H.h2 "Later Years"
H.div ! A.id "maj_400" $ do
H.p ! A.class_ "code" $ "Any 400-level CSC course, BCB410H, BCB420H, BCB430Y (0.5 FCEs)"
H.div ! A.id "maj400" ! A.class_ "more-info" $
H.input ! A.type_ "text"
H.div ! A.id "maj_300" $ do
H.p ! A.class_ "code" $ "Any 300+ level CSC course, BCB410H, BCB420H, BCB430Y, ECE385H, ECE489H (1.0 FCEs)"
H.div ! A.id "maj300" ! A.class_ "more-info" $ do
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.div ! A.id "maj_extra" $ do
H.p ! A.class_ "code" $ "Any 300+ level CSC course, BCB/ECE/MAT/STA course (1.5 FCEs) - \
\MAT: 221/223/240, 224, 235/237/257, any 300+ except for 329, 390, & 391 \
\; STA: 248, 261, any 300+"
H.div ! A.id "majextra" ! A.class_ "more-info" $ do
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.div ! A.id "maj_misc" $ do
H.p ! A.class_ "code" $ H.em $ "Any from this list: CSC301H, CSC318H, CSC404H, CSC411H, CSC418H, CSC420H, \
\CSC428H, CSC454H, CSC485H, CSC490H, CSC491H, CSC494H, or PEY (0.5 FCEs) \
\ ** Note: Type 'PEY' for Check my POSt to recognize it **"
H.div ! A.class_ "more-info" $
H.input ! A.type_ "text"
H.h3 "Notes"
H.div ! A.id "notes" $
H.p "- No more than 1.0 FCE from CSC490H1, CSC491H1, CSC494H1, CSC495H1, BCB430Y1 may be used to fulfill program requirements"
H.div ! A.id "div_minor" $ do
H.h2 "First Year"
H.div ! A.id "min_csc108" $ do
H.p ! A.class_ "code" $ "CSC108H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC108" $ "CSC108H (Introduction to Computer Programming)"
H.div ! A.id "min_csc148" $ do
H.p ! A.class_ "code" $ "CSC148H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC148" $ "CSC148H (Introduction to Computer Science)"
H.div ! A.id "min_csc165240" $ do
H.p ! A.class_ "code" $ "CSC165H or CSC240H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name CSC165240" $ "CSC165H (Mathematical Expression and Reasoning for Computer Science)"
H.p ! A.class_ "full_name CSC165240" $ "CSC240H (Enriched Introduction to the Theory of Computation)"
H.h2 "Later Years"
H.div ! A.id "min_csc207" $ do
H.p ! A.class_ "code" $ "CSC207H"
H.div ! A.class_ "more-info" $
H.p ! A.class_ "full_name CSC207" $ "CSC207H (Software Design)"
H.div ! A.id "min_csc236240" $ do
H.p ! A.class_ "code" $ "CSC236H or CSC240H"
H.div ! A.class_ "more-info" $ do
H.p ! A.class_ "full_name CSC236240" $ "CSC236H (Introduction to the Theory of Computation)"
H.p ! A.class_ "full_name CSC236240" $ "CSC240H (Enriched Introduction to the Theory of Computation)"
H.div ! A.id "min_misc" $ do
H.p ! A.class_ "code" $ "Any 300/400-level CSC course (atleast 1.0 FCE), CSC209H, CSC258H, CSC263H/CSC265H (1.5 FCEs)"
H.div ! A.id "minextra" ! A.class_ "more-info" $ do
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.input ! A.type_ "text"
H.h3 "Notes"
H.div ! A.id "notes" $
H.p "- You may take no more than three 300/400 level CSC/ECE courses"
| cchens/courseography | hs/PostResponse.hs | gpl-3.0 | 15,153 | 433 | 14 | 5,757 | 3,434 | 1,710 | 1,724 | 232 | 1 |
-- Static configuration for CelfToGraph (This is a dirty way... needs to be fixed in the future. TODO)
module CelfToGraphConf where
celf_cmd = "/home/jff/work/code/TeLLer/local/celf/celf"
| jff/TeLLer | src/CelfToGraphConf.hs | gpl-3.0 | 189 | 0 | 4 | 25 | 10 | 7 | 3 | 2 | 1 |
-- From Simon Peyton-Jones's "A taste of haskell"
module Stack(Stack,
-- swap1,
swap2,
swap3,
swap4,
swap5) where
type Stack w = [w]
-- swap1 :: Stack w -> Stack w
-- swap1 [] = []
-- swap1 (w : []) = w : []
-- swap1 (w1 : w2 : ws) = w2 : w1 : ws
swap2 :: Stack w -> Stack w
swap2 [] = []
swap2 [w] = [w]
swap2 (w1 : w2 : ws) = w2 : w1 : ws
swap3 :: Stack w -> Stack w
swap3 (w1 : w2 : ws) = w2 : w1 : ws
swap3 ws = ws
swap4 :: Stack w -> Stack w
swap4 ws = case ws of
[] -> []
[w] -> [w]
(w1:w2:ws) -> w2:w1:ws
swap5 :: Stack w -> Stack w
swap5 ws = case ws of
(w1:w2:ws) -> w2:w1:ws
ws -> ws
-- type TS = Stack Int
-- prop_swap1 :: TS -> Bool
-- prop_swap1 s = swap1 (swap1 s) == s
| wtanaka/haskell | Stack.hs | gpl-3.0 | 722 | 0 | 10 | 207 | 290 | 157 | 133 | 22 | 3 |
module Program (
processDirectory,
defaultConfig,
LogLevel(..),
Config(..),
Result(..)
) where
import Data.List(intercalate)
import Control.Monad (forM_, forM)
import Control.Concurrent (forkIO)
import Control.Concurrent.Chan (newChan, writeChan, readChan)
import FileFinder (listAllFiles)
import DepProcessors (processors)
import qualified DepProcessors.Data.Result as R
-- | Finds all the dependency definitions in the given directory and recursively downloads
-- them.
processDirectory ::
Config -- ^ Configures the operation of the program
-> FilePath -- ^ The directory to process
-> IO Result
processDirectory config targetRepo = do
let logger = (loggerFunc config)
repoContents <- getFilesInRepo targetRepo
depDownloaders <- mapM (buildProcessors repoContents) processors
forM_ depDownloaders (printDownloaderCount logger)
resultsChan <- newChan
executeDownloaders depDownloaders resultsChan
let totalExpectedResults = foldl (\c (_, xs) -> c + (length xs)) 0 depDownloaders
results <- accumulateResults logger resultsChan totalExpectedResults
printSummary logger results
return $ finalResult results
finalResult = foldl process Ok
where
process Ok (R.GenericError _) = GenericError
process _ (R.DependencyNotFound _) = DependencyNotFound
process c _ = c
printSummary logger results = logger Info summary
where
summary = concat [
"Processed " ++ (show totalResults) ++ " files, ",
(show successCount) ++ " " ++ (wereWas successCount) ++ " successful and ",
(show failureCount) ++ " " ++ (wereWas failureCount) ++ " not."
]
totalResults = length results
failureCount = length $ filter isFailureResult results
successCount = totalResults - failureCount
isFailureResult R.Ok = False
isFailureResult _ = True
wereWas count = if count == 1 then "was" else "were"
accumulateResults logger resultsChan downloaderCount =
forM [1..downloaderCount] $ \_ -> do
(filepath, result) <- readChan resultsChan
-- Print out the reults as they come in
case result of
R.Ok -> logger Info $ "Processed: " ++ filepath
R.GenericError err -> logger Error $
"Error processing: " ++ filepath ++ "\n\n" ++ err
R.DependencyNotFound deps -> logger Error $
missingDepsString filepath deps
return result
missingDepsString filepath missingDeps = out
where
out = concat ["Missing dependencies: ", joinedDeps, "\nIn file: ", filepath]
joinedDeps = intercalate ", " missingDeps
-- | Update each definition type concurrently (for speed). For each of the definitions within
-- a given type update sequentially (the update command may not be thread safe)
-- Accumulate the results in resultsChan
executeDownloaders depDownloaders resultsChan =
forM_ depDownloaders $ \(_, downloaders) -> forkIO $
forM_ downloaders $ \(filepath, downloader) -> do
result <- downloader
writeChan resultsChan (filepath, result)
printDownloaderCount logger (downloaderType, processors) = do
let numDefs = (show $ (length processors))
pluralized = if numDefs == "1" then " definition." else " definitions."
logger Notice $ "Found " ++ numDefs ++ " " ++ downloaderType ++ pluralized
-- | Builds a (downloaderType, proccessingResult) pair. processingResult is evaluated
-- later (downloading doesn't happen as part of this function).
buildProcessors files (downloaderType, builder) = do
fileProcessors <- builder files
return (downloaderType, fileProcessors)
getFilesInRepo targetDir = do
maybeFiles <- listAllFiles targetDir
case maybeFiles of
Just files -> return files
Nothing -> return . error $ "Could not extract a list of files from " ++ targetDir ++ " (is it a git repo?)"
-- | Nice defaults for Config
defaultConfig = Config {
loggerFunc = const putStrLn
}
-- | Describes the importance of a log message
data LogLevel = Notice | Info | Warn | Error deriving (Eq, Ord, Show)
-- | Configuration for the program
data Config = Config {
loggerFunc :: (LogLevel -> String -> IO ()) -- ^ Function to use as the program logger
}
data Result
-- | Successfully processed dependency definition
= Ok
-- | Some non-specific error
| GenericError
-- | A dependency could not be found, the case we're interested in
| DependencyNotFound
| splondike/depcache | src/Program.hs | gpl-3.0 | 4,489 | 0 | 16 | 1,007 | 1,032 | 542 | 490 | 82 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.IAM.UpdateServerCertificate
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the name and\/or the path of the specified server certificate.
--
-- You should understand the implications of changing a server
-- certificate\'s path or name. For more information, see
-- <http://docs.aws.amazon.com/IAM/latest/UserGuide/ManagingServerCerts.html Managing Server Certificates>
-- in the /Using IAM/ guide.
--
-- To change a server certificate name the requester must have appropriate
-- permissions on both the source object and the target object. For
-- example, to change the name from ProductionCert to ProdCert, the entity
-- making the request must have permission on ProductionCert and ProdCert,
-- or must have permission on all (*). For more information about
-- permissions, see
-- <http://docs.aws.amazon.com/IAM/latest/UserGuide/PermissionsAndPolicies.html Permissions and Policies>.
--
-- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_UpdateServerCertificate.html AWS API Reference> for UpdateServerCertificate.
module Network.AWS.IAM.UpdateServerCertificate
(
-- * Creating a Request
updateServerCertificate
, UpdateServerCertificate
-- * Request Lenses
, uNewServerCertificateName
, uNewPath
, uServerCertificateName
-- * Destructuring the Response
, updateServerCertificateResponse
, UpdateServerCertificateResponse
) where
import Network.AWS.IAM.Types
import Network.AWS.IAM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'updateServerCertificate' smart constructor.
data UpdateServerCertificate = UpdateServerCertificate'
{ _uNewServerCertificateName :: !(Maybe Text)
, _uNewPath :: !(Maybe Text)
, _uServerCertificateName :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateServerCertificate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'uNewServerCertificateName'
--
-- * 'uNewPath'
--
-- * 'uServerCertificateName'
updateServerCertificate
:: Text -- ^ 'uServerCertificateName'
-> UpdateServerCertificate
updateServerCertificate pServerCertificateName_ =
UpdateServerCertificate'
{ _uNewServerCertificateName = Nothing
, _uNewPath = Nothing
, _uServerCertificateName = pServerCertificateName_
}
-- | The new name for the server certificate. Include this only if you are
-- updating the server certificate\'s name. The name of the certificate
-- cannot contain any spaces.
uNewServerCertificateName :: Lens' UpdateServerCertificate (Maybe Text)
uNewServerCertificateName = lens _uNewServerCertificateName (\ s a -> s{_uNewServerCertificateName = a});
-- | The new path for the server certificate. Include this only if you are
-- updating the server certificate\'s path.
uNewPath :: Lens' UpdateServerCertificate (Maybe Text)
uNewPath = lens _uNewPath (\ s a -> s{_uNewPath = a});
-- | The name of the server certificate that you want to update.
uServerCertificateName :: Lens' UpdateServerCertificate Text
uServerCertificateName = lens _uServerCertificateName (\ s a -> s{_uServerCertificateName = a});
instance AWSRequest UpdateServerCertificate where
type Rs UpdateServerCertificate =
UpdateServerCertificateResponse
request = postQuery iAM
response
= receiveNull UpdateServerCertificateResponse'
instance ToHeaders UpdateServerCertificate where
toHeaders = const mempty
instance ToPath UpdateServerCertificate where
toPath = const "/"
instance ToQuery UpdateServerCertificate where
toQuery UpdateServerCertificate'{..}
= mconcat
["Action" =:
("UpdateServerCertificate" :: ByteString),
"Version" =: ("2010-05-08" :: ByteString),
"NewServerCertificateName" =:
_uNewServerCertificateName,
"NewPath" =: _uNewPath,
"ServerCertificateName" =: _uServerCertificateName]
-- | /See:/ 'updateServerCertificateResponse' smart constructor.
data UpdateServerCertificateResponse =
UpdateServerCertificateResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'UpdateServerCertificateResponse' with the minimum fields required to make a request.
--
updateServerCertificateResponse
:: UpdateServerCertificateResponse
updateServerCertificateResponse = UpdateServerCertificateResponse'
| fmapfmapfmap/amazonka | amazonka-iam/gen/Network/AWS/IAM/UpdateServerCertificate.hs | mpl-2.0 | 5,200 | 0 | 11 | 958 | 536 | 328 | 208 | 73 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Foldable (
Foldable(..)
) where
import Prelude ((.), id, flip)
import Maybe
import Either
import Monoid
import qualified Data.List as List
class Foldable t where
fold :: Monoid m => t m -> m
fold = foldMap id
-- foldMap's default combination order is from right to left
foldMap :: Monoid m => (a -> m) -> t a -> m
foldMap f = foldr (mappend . f) mempty
-- b is type of initial value
foldr :: (a -> b -> b) -> b -> t a -> b
foldr f z t = appEndo (foldMap (Endo . f) t) z
-- Dual reverse the combination order
foldl :: (b -> a -> b) -> b -> t a -> b
foldl f z t = appEndo (getDual (foldMap (Dual . Endo . flip f) t)) z
{-# MINIMAL foldMap | foldr #-}
instance Foldable Maybe where
foldr _ z Nothing = z
foldr f z (Just x) = f x z
foldl _ z Nothing = z
foldl f z (Just x) = f z x
instance Foldable [] where
foldr = List.foldr
foldl = List.foldl
instance Foldable (Either e) where
foldMap _ (Left _) = mempty
foldMap f (Right y) = f y
foldr _ z (Left _) = z
foldr f z (Right y) = f y z
| seckcoder/lang-learn | haskell/lambda-calculus/src/Foldable.hs | unlicense | 1,209 | 0 | 14 | 412 | 468 | 244 | 224 | 31 | 0 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
import Data.Reify
newtype Mu a = In (a (Mu a))
data List a b = Cons a b | Nil deriving Show
type MyList a = Mu (List a)
g = In (Cons 1 (In (Cons 2 (In (Cons 3 (In Nil))))))
instance (Traversable a) => MuRef (Mu a) where
type DeRef (Mu a) = a
mapDeRef f (In a) = traverse f a
instance Functor (List a) where
fmap _ Nil = Nil
fmap f (Cons a b) = Cons a (f b)
instance Foldable (List a) where
foldMap f Nil = mempty
foldMap f (Cons a b) = f b
instance Traversable (List a) where
traverse f (Cons a b) = Cons a <$> f b
traverse f Nil = pure Nil
instance MuRef [a] where
type DeRef [a] = List a
mapDeRef f (x:xs) = Cons x <$> f xs
mapDeRef f [] = pure Nil
main = do
let xs = In (Cons 98 (In (Cons 101 xs)))
reifyGraph xs >>= print
let xs = 99:100:xs
reifyGraph xs >>= print
| egaburov/funstuff | Haskell/sharing/example_lists.hs | apache-2.0 | 879 | 4 | 16 | 228 | 496 | 236 | 260 | 28 | 1 |
module {-# REL #-} Main where --pragma here is unecessary, but does not hurt
import {-#REL#-} Middle.MiddleExposed
import {-# REL #-} Middle.MiddleHidden
main = do
putStrLn "Hi there!"
putStrLn $ "The hidden middle value is: " ++ show someHiddenValue
putStrLn $ "The hidden bottom type is: " ++ show (Expose (Hide "I'm a shy type"))
putStrLn $ "The string is: " ++ show myString
| dimitri-xyz/relative-imports-test-top-exec-A | Main.hs | apache-2.0 | 398 | 0 | 12 | 83 | 83 | 42 | 41 | 8 | 1 |
{-# OPTIONS -fglasgow-exts -#include "../include/gui/qtc_hs_QItemEditorCreatorBase.h" #-}
-----------------------------------------------------------------------------
{-| Module : QItemEditorCreatorBase.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:30
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QItemEditorCreatorBase (
QqItemEditorCreatorBase(..)
,QqItemEditorCreatorBase_nf(..)
,qItemEditorCreatorBase_delete, qItemEditorCreatorBase_delete1
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
instance QuserMethod (QItemEditorCreatorBase ()) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QItemEditorCreatorBase_userMethod cobj_qobj (toCInt evid)
foreign import ccall "qtc_QItemEditorCreatorBase_userMethod" qtc_QItemEditorCreatorBase_userMethod :: Ptr (TQItemEditorCreatorBase a) -> CInt -> IO ()
instance QuserMethod (QItemEditorCreatorBaseSc a) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QItemEditorCreatorBase_userMethod cobj_qobj (toCInt evid)
instance QuserMethod (QItemEditorCreatorBase ()) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QItemEditorCreatorBase_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
foreign import ccall "qtc_QItemEditorCreatorBase_userMethodVariant" qtc_QItemEditorCreatorBase_userMethodVariant :: Ptr (TQItemEditorCreatorBase a) -> CInt -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
instance QuserMethod (QItemEditorCreatorBaseSc a) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QItemEditorCreatorBase_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
class QqItemEditorCreatorBase x1 where
qItemEditorCreatorBase :: x1 -> IO (QItemEditorCreatorBase ())
instance QqItemEditorCreatorBase (()) where
qItemEditorCreatorBase ()
= withQItemEditorCreatorBaseResult $
qtc_QItemEditorCreatorBase
foreign import ccall "qtc_QItemEditorCreatorBase" qtc_QItemEditorCreatorBase :: IO (Ptr (TQItemEditorCreatorBase ()))
instance QqItemEditorCreatorBase ((QItemEditorCreatorBase t1)) where
qItemEditorCreatorBase (x1)
= withQItemEditorCreatorBaseResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QItemEditorCreatorBase1 cobj_x1
foreign import ccall "qtc_QItemEditorCreatorBase1" qtc_QItemEditorCreatorBase1 :: Ptr (TQItemEditorCreatorBase t1) -> IO (Ptr (TQItemEditorCreatorBase ()))
class QqItemEditorCreatorBase_nf x1 where
qItemEditorCreatorBase_nf :: x1 -> IO (QItemEditorCreatorBase ())
instance QqItemEditorCreatorBase_nf (()) where
qItemEditorCreatorBase_nf ()
= withObjectRefResult $
qtc_QItemEditorCreatorBase
instance QqItemEditorCreatorBase_nf ((QItemEditorCreatorBase t1)) where
qItemEditorCreatorBase_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QItemEditorCreatorBase1 cobj_x1
instance QcreateWidget (QItemEditorCreatorBase ()) ((QWidget t1)) where
createWidget x0 (x1)
= withQWidgetResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QItemEditorCreatorBase_createWidget_h cobj_x0 cobj_x1
foreign import ccall "qtc_QItemEditorCreatorBase_createWidget_h" qtc_QItemEditorCreatorBase_createWidget_h :: Ptr (TQItemEditorCreatorBase a) -> Ptr (TQWidget t1) -> IO (Ptr (TQWidget ()))
instance QcreateWidget (QItemEditorCreatorBaseSc a) ((QWidget t1)) where
createWidget x0 (x1)
= withQWidgetResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QItemEditorCreatorBase_createWidget_h cobj_x0 cobj_x1
instance QvaluePropertyName (QItemEditorCreatorBase ()) (()) where
valuePropertyName x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QItemEditorCreatorBase_valuePropertyName_h cobj_x0
foreign import ccall "qtc_QItemEditorCreatorBase_valuePropertyName_h" qtc_QItemEditorCreatorBase_valuePropertyName_h :: Ptr (TQItemEditorCreatorBase a) -> IO (Ptr (TQString ()))
instance QvaluePropertyName (QItemEditorCreatorBaseSc a) (()) where
valuePropertyName x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QItemEditorCreatorBase_valuePropertyName_h cobj_x0
qItemEditorCreatorBase_delete :: QItemEditorCreatorBase a -> IO ()
qItemEditorCreatorBase_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QItemEditorCreatorBase_delete cobj_x0
foreign import ccall "qtc_QItemEditorCreatorBase_delete" qtc_QItemEditorCreatorBase_delete :: Ptr (TQItemEditorCreatorBase a) -> IO ()
qItemEditorCreatorBase_delete1 :: QItemEditorCreatorBase a -> IO ()
qItemEditorCreatorBase_delete1 x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QItemEditorCreatorBase_delete1 cobj_x0
foreign import ccall "qtc_QItemEditorCreatorBase_delete1" qtc_QItemEditorCreatorBase_delete1 :: Ptr (TQItemEditorCreatorBase a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QItemEditorCreatorBase.hs | bsd-2-clause | 5,401 | 0 | 14 | 712 | 1,278 | 655 | 623 | -1 | -1 |
module Assembler where
import Parser
import qualified Data.ByteString.Lazy as B
import Data.Binary.Put
import Data.Bits
import Data.Word
import Data.Char as C
import qualified Data.Map as Map
import Control.Monad (forM)
import System.IO (openBinaryFile, IOMode(..), hClose)
type LabelMap = Map.Map String Int
genRegisterHex :: Expr -> Word16
genRegisterHex (Register "A") = 0x00 :: Word16
genRegisterHex (Register "B") = 0x01 :: Word16
genRegisterHex (Register "C") = 0x02 :: Word16
genRegisterHex (Register "X") = 0x03 :: Word16
genRegisterHex (Register "Y") = 0x04 :: Word16
genRegisterHex (Register "Z") = 0x05 :: Word16
genRegisterHex (Register "I") = 0x06 :: Word16
genRegisterHex (Register "J") = 0x07 :: Word16
genMemLocHex :: Expr -> Word16
genMemLocHex (MemLocation "A") = 0x08 :: Word16
genMemLocHex (MemLocation "B") = 0x09 :: Word16
genMemLocHex (MemLocation "C") = 0x0a :: Word16
genMemLocHex (MemLocation "X") = 0x0b :: Word16
genMemLocHex (MemLocation "Y") = 0x0c :: Word16
genMemLocHex (MemLocation "Z") = 0x0d :: Word16
genMemLocHex (MemLocation "I") = 0x0e :: Word16
genMemLocHex (MemLocation "J") = 0x0f :: Word16
genMemLocHex (MemLocation i) = read i :: Word16
genMemOffsetHex :: Expr -> Word16
genMemOffsetHex (MemOffset i "A") = 0x10 :: Word16
genMemOffsetHex (MemOffset i "B") = 0x11 :: Word16
genMemOffsetHex (MemOffset i "C") = 0x12 :: Word16
genMemOffsetHex (MemOffset i "X") = 0x13 :: Word16
genMemOffsetHex (MemOffset i "Y") = 0x14 :: Word16
genMemOffsetHex (MemOffset i "Z") = 0x15 :: Word16
genMemOffsetHex (MemOffset i "I") = 0x16 :: Word16
genMemOffsetHex (MemOffset i "J") = 0x17 :: Word16
genLiteralHex (Literal i) = (read i :: Word16) + 32
genAddressHex (Address i) = read i :: Word16
genIdentHex ident = case ident of
MemLocation _ -> genMemLocHex ident
Register _ -> genRegisterHex ident
Literal _ -> genLiteralHex ident
Address _ -> genAddressHex ident
genCmdHex :: Binop -> Word16
genCmdHex SET = 0x1 :: Word16
genCmdHex ADD = 0x2 :: Word16
genCmdHex SUB = 0x3 :: Word16
genCmdHex MUL = 0x4 :: Word16
genCmdHex DIV = 0x5 :: Word16
genCmdHex MOD = 0x6 :: Word16
genCmdHex SHL = 0x7 :: Word16
genCmdHex SHR = 0x8 :: Word16
genCmdHex AND = 0x9 :: Word16
genCmdHex BOR = 0xa :: Word16
genCmdHex XOR = 0xb :: Word16
genCmdHex IFE = 0xc :: Word16
genCmdHex IFN = 0xd :: Word16
genCmdHex IFG = 0xe :: Word16
genCmdHex IFB = 0xf :: Word16
genUnCmdHex :: Unop -> Word16
genUnCmdHex JSR = (0x01 :: Word16) `shiftL` 4
assembleOpcode a = [genCmdHex a]
assembleUnOpcode a = [genUnCmdHex a]
assembleOperand :: Int -> Expr -> [Word16] -> [Word16]
assembleOperand shft a (op:rest) =
case a of
MemLocation a ->
case isAlpha $ head a of
False ->
(op .|. (0x1E `shiftL` shft)) : rest ++
[(genMemLocHex (MemLocation a))]
True ->
(op .|. (genMemLocHex (MemLocation a)) `shiftL` shft)
: rest
Address a ->
(op .|. (0x1F `shiftL` shft)) : rest ++
[genAddressHex (Address a)]
MemOffset a b ->
(op .|. ((genMemOffsetHex (MemOffset a b)) `shiftL` shft)) : rest ++
[genAddressHex (Address a)]
otherwise ->
[op .|. ((genIdentHex a) `shiftL` shft)]
assembleFst = assembleOperand 4
assembleSnd = assembleOperand 10
assemble :: Expr -> [Word16]
assemble (Bin cmd (BinArg a b)) =
assembleSnd b $ assembleFst a $ assembleOpcode cmd
assemble (Un cmd (OneArg a)) =
assembleSnd a $ assembleUnOpcode cmd
assemble (Label name exprs) = concat $ map assemble exprs
calculateLabelLocs labels =
foldl calcLabel [] labels
where calcLabel lst (Label name exprs) =
(name, (length $ assemble (Label name exprs))
+ (snd $ head lst)) : lst
isLabel (Label _ _) = True
isLabel _ = False
assembleFromFile path = do
instructions <- parseAssemblerFile path
case (head instructions) of
Error err -> return []
otherwise -> return (concat $ map assemble instructions)
writeInstruction instr = do
return $ putWord16host instr
serializeInstructions instrs = do
return $ map runPut instrs
writeAssembledFile inPath = do
instrs <- assembleFromFile inPath
written <- forM instrs writeInstruction
serialized <- serializeInstructions written
outh <- openBinaryFile (genPath inPath) WriteMode
forM serialized (B.hPut outh)
hClose outh
where genPath inp = concat $ (takeWhile notPeriod inp) : [".bin"]
notPeriod c =
if c /= '.' then True else False
| mrgaaron/dcpu-16 | src/haskell/Assembler.hs | bsd-2-clause | 4,802 | 0 | 18 | 1,250 | 1,641 | 854 | 787 | 119 | 5 |
{-# language CPP #-}
-- | = Name
--
-- VK_NVX_multiview_per_view_attributes - device extension
--
-- == VK_NVX_multiview_per_view_attributes
--
-- [__Name String__]
-- @VK_NVX_multiview_per_view_attributes@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 98
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_multiview@
--
-- [__Contact__]
--
-- - Jeff Bolz
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_NVX_multiview_per_view_attributes] @jeffbolznv%0A<<Here describe the issue or question you have about the VK_NVX_multiview_per_view_attributes extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2017-01-13
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/NV/SPV_NVX_multiview_per_view_attributes.html SPV_NVX_multiview_per_view_attributes>
--
-- - This extension provides API support for
-- <https://github.com/KhronosGroup/GLSL/blob/master/extensions/nvx/GL_NVX_multiview_per_view_attributes.txt GL_NVX_multiview_per_view_attributes>
--
-- - This extension interacts with @VK_NV_viewport_array2@.
--
-- [__Contributors__]
--
-- - Jeff Bolz, NVIDIA
--
-- - Daniel Koch, NVIDIA
--
-- == Description
--
-- This extension adds a new way to write shaders to be used with multiview
-- subpasses, where the attributes for all views are written out by a
-- single invocation of the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#pipeline-graphics-subsets-pre-rasterization pre-rasterization shader stages>.
-- Related SPIR-V and GLSL extensions
-- @SPV_NVX_multiview_per_view_attributes@ and
-- @GL_NVX_multiview_per_view_attributes@ introduce per-view position and
-- viewport mask attributes arrays, and this extension defines how those
-- per-view attribute arrays are interpreted by Vulkan. Pipelines using
-- per-view attributes /may/ only execute the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#pipeline-graphics-subsets-pre-rasterization pre-rasterization shader stages>
-- once for all views rather than once per-view, which reduces redundant
-- shading work.
--
-- A subpass creation flag controls whether the subpass uses this
-- extension. A subpass /must/ either exclusively use this extension or not
-- use it at all.
--
-- Some Vulkan implementations only support the position attribute varying
-- between views in the X component. A subpass can declare via a second
-- creation flag whether all pipelines compiled for this subpass will obey
-- this restriction.
--
-- Shaders that use the new per-view outputs (e.g. @gl_PositionPerViewNV@)
-- /must/ also write the non-per-view output (@gl_Position@), and the
-- values written /must/ be such that @gl_Position =
-- gl_PositionPerViewNV[gl_ViewIndex]@ for all views in the subpass.
-- Implementations are free to either use the per-view outputs or the
-- non-per-view outputs, whichever would be more efficient.
--
-- If @VK_NV_viewport_array2@ is not also supported and enabled, the
-- per-view viewport mask /must/ not be used.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2':
--
-- - 'PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX'
--
-- == New Enum Constants
--
-- - 'NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME'
--
-- - 'NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX'
--
-- - Extending
-- 'Vulkan.Core10.Enums.SubpassDescriptionFlagBits.SubpassDescriptionFlagBits':
--
-- - 'Vulkan.Core10.Enums.SubpassDescriptionFlagBits.SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX'
--
-- - 'Vulkan.Core10.Enums.SubpassDescriptionFlagBits.SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX'
--
-- == New Built-In Variables
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-positionperview PositionPerViewNV>
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#interfaces-builtin-variables-viewportmaskperview ViewportMaskPerViewNV>
--
-- == New SPIR-V Capabilities
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#spirvenv-capabilities-table-PerViewAttributesNV PerViewAttributesNV>
--
-- == Examples
--
-- > #version 450 core
-- >
-- > #extension GL_KHX_multiview : enable
-- > #extension GL_NVX_multiview_per_view_attributes : enable
-- >
-- > layout(location = 0) in vec4 position;
-- > layout(set = 0, binding = 0) uniform Block { mat4 mvpPerView[2]; } buf;
-- >
-- > void main()
-- > {
-- > // Output both per-view positions and gl_Position as a function
-- > // of gl_ViewIndex
-- > gl_PositionPerViewNV[0] = buf.mvpPerView[0] * position;
-- > gl_PositionPerViewNV[1] = buf.mvpPerView[1] * position;
-- > gl_Position = buf.mvpPerView[gl_ViewIndex] * position;
-- > }
--
-- == Version History
--
-- - Revision 1, 2017-01-13 (Jeff Bolz)
--
-- - Internal revisions
--
-- == See Also
--
-- 'PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_NVX_multiview_per_view_attributes Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_NVX_multiview_per_view_attributes ( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(..)
, NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION
, pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION
, NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME
, pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX))
-- | VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX - Structure
-- describing multiview limits that can be supported by an implementation
--
-- = Description
--
-- If the 'PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX' structure
-- is included in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2',
-- it is filled in with each corresponding implementation-dependent
-- property.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_NVX_multiview_per_view_attributes VK_NVX_multiview_per_view_attributes>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
{ -- | #limits-perViewPositionAllComponents# @perViewPositionAllComponents@ is
-- 'Vulkan.Core10.FundamentalTypes.TRUE' if the implementation supports
-- per-view position values that differ in components other than the X
-- component.
perViewPositionAllComponents :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX)
#endif
deriving instance Show PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
instance ToCStruct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (perViewPositionAllComponents))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
peekCStruct p = do
perViewPositionAllComponents <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
(bool32ToBool perViewPositionAllComponents)
instance Storable PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
zero = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
zero
type NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION"
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION :: forall a . Integral a => a
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION = 1
type NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME = "VK_NVX_multiview_per_view_attributes"
-- No documentation found for TopLevel "VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME"
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME = "VK_NVX_multiview_per_view_attributes"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_NVX_multiview_per_view_attributes.hs | bsd-3-clause | 11,223 | 0 | 14 | 1,654 | 1,020 | 656 | 364 | -1 | -1 |
{-|
Module :
Description :
Copyright : (c) Alexander Vieth, 2015
Licence : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (GHC only)
-}
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
module Types.SomeFunctorial (
SomeFunctorial
, discardValue
) where
-- | A class to indicate that a value is functorial with a particular parameter
-- specified. Some example:
--
-- SomeFunctorial [Bool] []
-- SomeFunctorial (IO ()) IO
-- SomeFunctorial (Maybe Int) Maybe
--
class Functor f => SomeFunctorial (v :: *) (f :: * -> *) where
discardValue :: v -> f ()
instance Functor f => SomeFunctorial (f t) f where
discardValue term = fmap (const ()) term
| avieth/Relational | Types/SomeFunctorial.hs | bsd-3-clause | 833 | 0 | 9 | 179 | 118 | 67 | 51 | 11 | 0 |
module Data.Kiln.Examples where
import Data.Kiln
import Control.Arrow
import Data.List
import Data.Functor.Compose
import Data.Traversable
import Data.Foldable
import Control.Applicative
-- | Apply a function to the value inside a Compose.
composedly :: (f (g a) -> f' (g' a')) -> Compose f g a -> Compose f' g' a'
composedly f = Compose . f . getCompose
-- Mutable singly-linked lists built from cons-cells
type MSLL s a = Clay s (Compose ((,) a) Maybe)
type SLL a = Fix (Compose ((,) a) Maybe)
cons :: a -> Maybe (MSLL s a) -> Squishy s (MSLL s a)
cons car cdr = newClay (Compose (car, cdr))
setCar :: MSLL s a -> a -> Squishy s ()
setCar x = modifyClay x . composedly . first . const
setCdr :: MSLL s a -> Maybe (MSLL s a) -> Squishy s ()
setCdr x = modifyClay x . composedly . second . const
list1 :: SLL Char
list1 = runKilning $ do
a <- cons 'a' Nothing
b <- cons 'b' (Just a)
c <- cons 'c' (Just b)
setCdr a $ Just c
return c
sllToList :: SLL a -> [a]
sllToList sll = case (getCompose . unFix) sll of
(x,Nothing) -> [x]
(x,Just xs) -> x : sllToList xs
-- Mutable graphs with node and edge labels
type MNode s n e = Clay s (Compose (Compose ((,) n) []) ((,) e))
type Node n e = Fix (Compose (Compose ((,) n) []) ((,) e))
node :: n -> [(e, MNode s n e)] -> Squishy s (MNode s n e)
node n list = newClay (Compose (Compose (n,list)))
emptyNode :: n -> Squishy s (MNode s n e)
emptyNode n = node n []
readNode :: MNode s n e -> Squishy s (n, [(e, MNode s n e)])
readNode = fmap (getCompose . getCompose) . readClay
relabelNode :: n -> MNode s n e -> Squishy s ()
relabelNode n = flip modifyClay (composedly . composedly . first . const $ n)
editEdges :: ([(e, MNode s n e)] -> [(e, MNode s n e)]) -> MNode s n e -> Squishy s ()
editEdges f = flip modifyClay (composedly . composedly . second $ f)
addEdge :: e -> MNode s n e -> MNode s n e -> Squishy s ()
addEdge label from to = editEdges ((label, to) :) from
graph1 :: Node String String
graph1 = runKilning $ do
a <- emptyNode "a"
b <- emptyNode "b"
c <- emptyNode "c"
d <- emptyNode "d"
addEdge "a -> b" a b
addEdge "b -> c" b c
addEdge "c -> d" c d
addEdge "c -> a" c a
addEdge "d -> a" d a
return a
| kwf/data-kiln | Data/Kiln/Examples.hs | bsd-3-clause | 2,240 | 0 | 12 | 536 | 1,104 | 557 | 547 | 55 | 2 |
-- | Tools for propositional logic, including an interface to the MiniSat SAT solver.
module Language.TRS.Prop
( Formula (Var, Const, Not, And, Or)
, xor
, imply
, equiv
, mux
, fullAdd
, fullSub
, sat
) where
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Maybe
import System.Cmd
import System.IO
import System.Process
import Language.TRS.Utils
data Formula a
= Var a
| Const Bool
| Label (Formula a)
| Not (Formula a)
| And (Formula a) (Formula a)
| Or (Formula a) (Formula a) deriving (Ord, Eq, Show)
xor :: Formula a -> Formula a -> Formula a
xor a b = Or (And a (Not b)) (And (Not a) b)
imply :: Formula a -> Formula a -> Formula a
imply a b = Or (Not a) b
equiv :: Formula a -> Formula a -> Formula a
equiv a b = And (imply a b) (imply b a)
mux :: Formula a -> Formula a -> Formula a -> Formula a
mux s h l = Or (And s h) (And (Not s) l)
fullAdd :: Formula a -> Formula a -> Formula a -> (Formula a, Formula a)
fullAdd a b c = (xor (xor a b) c, Or (And (xor a b) c) (And a b))
fullSub :: Formula a -> Formula a -> Formula a -> (Formula a, Formula a)
fullSub a b c = (xor (xor a b) c, Or (And (Not (xor a b)) c) (And (Not a) b))
sat :: (Show a, Ord a) => Formula a -> IO (Maybe [a])
sat f = case cnf of
Const True -> return $ Just []
Const False -> return Nothing
_ -> do
h <- openFile "minisat_input" WriteMode
formatCNF h cnf
hClose h
(i,o,e,p) <- runInteractiveCommand "minisat minisat_input minisat_output"
waitForProcess p
hClose i
hClose o
hClose e
r <- readFileNow "minisat_output"
system "rm minisat_input minisat_output"
return $ results r
where
cnf = toCNF f
(vars, labels) = variables cnf
varNum = Set.size vars
varIds = Map.fromList $ zip (Set.toList vars) [1 .. varNum]
idVars = Map.fromList $ zip [1 .. varNum] (Set.toList vars)
labIds = Map.fromList $ zip (Set.toList labels) [varNum + 1 .. varNum + Set.size labels]
results s = if l !! 0 == "UNSAT" then Nothing else Just $ mapMaybe f w
where
l = lines s
w = init $ words $ l !! 1
f n = if n' < 0 || n' > varNum then Nothing else Just $ idVars Map.! n'
where
n' :: Int
n' = read n
formatCNF h f = ands f
where
ands (And a b) = ands a >> ands b
ands f = ors f >> hPutStrLn h " 0"
ors (Or a b) = ors a >> hPutStr h " " >> ors b
ors (Var a) = hPutStr h (show (varIds Map.! a))
ors (Not (Var a)) = hPutStr h ("-" ++ show (varIds Map.! a))
ors (Label a) = hPutStr h (show (labIds Map.! a))
ors (Not (Label a)) = hPutStr h ("-" ++ show (labIds Map.! a))
ors _ = error $ "Formula is not in CNF."
-- | Variables of 'Formula'.
variables :: Ord a => Formula a -> (Set.Set a, Set.Set (Formula a))
variables f = case f of
Const _ -> (Set.empty,Set.empty)
Var a -> (Set.singleton a, Set.empty)
Label a -> (Set.empty, Set.singleton a)
Not a -> variables a
And a b -> (Set.union a1 b1, Set.union a2 b2)
where
(a1,a2) = variables a
(b1,b2) = variables b
Or a b -> (Set.union a1 b1, Set.union a2 b2)
where
(a1,a2) = variables a
(b1,b2) = variables b
-- | Converts a 'Formula' to CNF.
toCNF :: Ord a => Formula a -> Formula a
toCNF f = if Set.null supportCNF then f' else And f' support
where
(f', supportCNF) = tseitin $ buryNot $ constProp f
support = foldl1 And $ map (foldl1 Or . Set.toList) $ Set.toList supportCNF
-- | Labeled conversion to CNF. (newFormula, cnfAccumulation)
tseitin :: Ord a => Formula a -> (Formula a, Set.Set (Set.Set (Formula a)))
tseitin f = case f of
Const _ -> (f, Set.empty)
Var _ -> (f, Set.empty)
Label _ -> (f, Set.empty)
Not (Var _) -> (f, Set.empty)
Not (Label _) -> (f, Set.empty)
Not _ -> error "tseitin: constProp or buryNot was not applied before tseitin."
And a b -> (x, Set.union xSet (Set.union aSet bSet))
where
(a',aSet) = tseitin a
(b',bSet) = tseitin b
x = Label $ And a' b'
xSet = Set.fromList
[ Set.fromList [Not x, a']
, Set.fromList [Not x, b']
, Set.fromList [x, buryNot (Not a'), buryNot (Not b')]
]
Or a b -> (x, Set.union xSet (Set.union aSet bSet))
where
(a',aSet) = tseitin a
(b',bSet) = tseitin b
x = Label $ Or a' b'
xSet = Set.fromList
[ Set.fromList [x, buryNot (Not a')]
, Set.fromList [x, buryNot (Not b')]
, Set.fromList [Not x, a', b']
]
{-
-- | Converts a 'Formula' to CNF.
toCNF :: Ord a => Formula a -> Formula a
toCNF f = if Set.null supportCNF then f' else And f' support
where
f' = tseitinRename f
supportCNF = tseitin (buryNot $ constProp f) Set.empty
support = foldl1 And $ map (foldl1 Or . Set.toList) $ Set.toList supportCNF
-- | Labeled conversion to CNF. Returns CNF accumulation.
tseitin :: Ord a => Formula a -> Set.Set (Set.Set (Formula a)) -> Set.Set (Set.Set (Formula a))
tseitin f s = case f of
Const _ -> s
Var _ -> s
Label _ -> s
Not (Var _) -> s
Not (Label _) -> s
Not _ -> error "tseitin: constProp or buryNot was not applied before tseitin."
And a b -> tseitin a $ tseitin b $ Set.union s xSet
where
a' = tseitinRename a
b' = tseitinRename b
x' = tseitinRename $ And a' b'
xSet = Set.fromList
[ Set.fromList [Not x', a']
, Set.fromList [Not x', b']
, Set.fromList [x', buryNot (Not a'), buryNot (Not b')]
]
Or a b -> tseitin a $ tseitin b $ Set.union s xSet
where
a' = tseitinRename a
b' = tseitinRename b
x' = tseitinRename $ Or a' b'
xSet = Set.fromList
[ Set.fromList [x', buryNot (Not a')]
, Set.fromList [x', buryNot (Not b')]
, Set.fromList [Not x', a', b']
]
tseitinRename :: Formula a -> Formula a
tseitinRename f = case f of
--And a b -> Label $ And (tseitinRename a) (tseitinRename b)
--Or a b -> Label $ Or (tseitinRename a) (tseitinRename b)
And _ _ -> Label f
Or _ _ -> Label f
f -> f
-}
-- | Pushes negations down to variables.
buryNot :: Formula a -> Formula a
buryNot f = case f of
Var i -> Var i
Const a -> Const a
Label a -> Label a
Not (Var a) -> Not (Var a)
Not (Const a) -> Not (Const a)
Not (Label a) -> Not (Label a)
Not (Not a) -> buryNot a
And a b -> And (buryNot a) (buryNot b)
Or a b -> Or (buryNot a) (buryNot b)
Not (And a b) -> Or (buryNot (Not a)) (buryNot (Not b))
Not (Or a b) -> And (buryNot (Not a)) (buryNot (Not b))
-- | Constant propagation.
constProp :: Formula a -> Formula a
constProp f = case f of
Var a -> Var a
Const a -> Const a
Label a -> Label a
Not a -> case constProp a of
(Const a) -> Const $ not a
a' -> Not a'
And a b -> case (constProp a, constProp b) of
(Const True, b') -> b'
(Const False, _) -> Const False
(a', Const True) -> a'
(_, Const False) -> Const False
(a',b') -> And a' b'
Or a b -> case (constProp a, constProp b) of
(Const True, _) -> Const True
(Const False, b') -> b'
(_, Const True) -> Const True
(a', Const False) -> a'
(a',b') -> Or a' b'
| tomahawkins/trs | Language/TRS/Prop.hs | bsd-3-clause | 7,172 | 0 | 16 | 2,019 | 2,795 | 1,395 | 1,400 | 147 | 15 |
{-
The License datatype. For more information about these and other
open-source licenses, you may visit <http://www.opensource.org/>.
The @.faction@ file allows you to specify a license file. Of course you can
use any license you like but people often pick common open source licenses
and it's useful if we can automatically recognise that (eg so we can display
it on the hackage web pages). So you can also specify the license itself in
the @.faction@ file from a short enumeration defined in this module. It
includes 'GPL', 'LGPL' and 'BSD3' licenses.
-}
module Distribution.License (
License(..),
knownLicenses,
) where
import Distribution.Version (Version(Version))
import Distribution.Text (Text(..), display)
import qualified Distribution.Compat.ReadP as Parse
import qualified Text.PrettyPrint as Disp
import Text.PrettyPrint ((<>))
import qualified Data.Char as Char (isAlphaNum)
-- |This datatype indicates the license under which your package is
-- released. It is also wise to add your license to each source file
-- using the license-file field. The 'AllRightsReserved' constructor
-- is not actually a license, but states that you are not giving
-- anyone else a license to use or distribute your work. The comments
-- below are general guidelines. Please read the licenses themselves
-- and consult a lawyer if you are unsure of your rights to release
-- the software.
--
data License =
--TODO: * remove BSD4
-- | GNU Public License. Source code must accompany alterations.
GPL (Maybe Version)
-- | Lesser GPL, Less restrictive than GPL, useful for libraries.
| LGPL (Maybe Version)
-- | 3-clause BSD license, newer, no advertising clause. Very free license.
| BSD3
-- | 4-clause BSD license, older, with advertising clause. You almost
-- certainly want to use the BSD3 license instead.
| BSD4
-- | The MIT license, similar to the BSD3. Very free license.
| MIT
-- | Holder makes no claim to ownership, least restrictive license.
| PublicDomain
-- | No rights are granted to others. Undistributable. Most restrictive.
| AllRightsReserved
-- | Some other license.
| OtherLicense
-- | Not a recognised license.
-- Allows us to deal with future extensions more gracefully.
| UnknownLicense String
deriving (Read, Show, Eq)
knownLicenses :: [License]
knownLicenses = [ GPL unversioned, GPL (version [2]), GPL (version [3])
, LGPL unversioned, LGPL (version [2,1]), LGPL (version [3])
, BSD3, MIT
, PublicDomain, AllRightsReserved, OtherLicense]
where
unversioned = Nothing
version v = Just (Version v [])
instance Text License where
disp (GPL version) = Disp.text "GPL" <> dispOptVersion version
disp (LGPL version) = Disp.text "LGPL" <> dispOptVersion version
disp (UnknownLicense other) = Disp.text other
disp other = Disp.text (show other)
parse = do
name <- Parse.munch1 (\c -> Char.isAlphaNum c && c /= '-')
version <- Parse.option Nothing (Parse.char '-' >> fmap Just parse)
return $! case (name, version :: Maybe Version) of
("GPL", _ ) -> GPL version
("LGPL", _ ) -> LGPL version
("BSD3", Nothing) -> BSD3
("BSD4", Nothing) -> BSD4
("MIT", Nothing) -> MIT
("PublicDomain", Nothing) -> PublicDomain
("AllRightsReserved", Nothing) -> AllRightsReserved
("OtherLicense", Nothing) -> OtherLicense
_ -> UnknownLicense $ name
++ maybe "" (('-':) . display) version
dispOptVersion :: Maybe Version -> Disp.Doc
dispOptVersion Nothing = Disp.empty
dispOptVersion (Just v) = Disp.char '-' <> disp v
| IreneKnapp/Faction | libfaction/Distribution/License.hs | bsd-3-clause | 3,837 | 0 | 16 | 959 | 683 | 384 | 299 | 49 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Bort.Application
( runBot
, ApplicationSettings
, sendCommand
) where
import Control.Applicative ((<$>), pure)
import Control.Lens (view,(^.))
import Control.Monad (forM_)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Reader (runReaderT)
import Data.Conduit
import Data.Conduit.Network
import Data.Traversable (sequenceA)
import Bort.ServerMessage (serverMessage, serverPing)
import Bort.Types
import qualified Data.ByteString.Char8 as C
runBot :: ApplicationSettings -> IO ()
runBot appSettings = let cs = clientSettings (appSettings^.port) (appSettings^.host)
in runTCPClient cs (ircApp appSettings)
ircApp :: ApplicationSettings -> AppData -> IO ()
ircApp settings appData = flip runReaderT settings $ do
let source = appSource appData
sink = appSink appData
yield "" $= attach $$ sink
source $= botHandler $$ sink
cmdPrefix :: Bot C.ByteString C.ByteString C.ByteString
cmdPrefix = lift $ C.concat <$> sequenceA [pure ":", view name, pure "!~", view name]
-- Awaits input from the source forever, passing PRIVMSG commands to each of the
-- provided handlers.
botHandler :: BotConduit C.ByteString C.ByteString
botHandler = do
hs <- lift $ view handlers
awaitForever $ \bs -> do
liftIO $ C.putStr bs
let ping = serverPing (C.words bs)
maybe (return ()) respondToPing ping
let msg = serverMessage (C.words bs)
forM_ hs $ \handle -> maybe (return ()) handle msg
respondToPing :: C.ByteString -> BotConduit C.ByteString C.ByteString
respondToPing ping = sendCommand (Pong ping)
sendCommand :: IrcCommand C.ByteString -> BotConduit C.ByteString C.ByteString
sendCommand cmd = do
prefix <- cmdPrefix
yield . (`C.append` "\r\n") $ case cmd of
Nick n -> C.unwords [prefix, "NICK", C.cons ':' n]
User u r -> C.unwords [prefix, "USER", u, "0", "*", C.cons ':' r]
Ping p -> C.unwords [prefix, "PING", C.cons ':' p]
Join r -> C.unwords [prefix, "JOIN", C.cons ':' r]
PrivMsg m -> C.unwords [prefix, "PRIVMSG", roomName m, C.cons ':' (message m)]
Quit (Just r) -> C.unwords [prefix, "QUIT", C.cons ':' r]
Quit Nothing -> C.unwords [prefix, "QUIT" ]
Topic t -> C.unwords [prefix, "TOPIC", C.cons ':' t]
Pong p -> C.unwords [prefix, "PONG", C.cons ':' p]
attach :: BotConduit C.ByteString C.ByteString
attach = do
n <- lift $ view name
rs <- lift $ view rooms
mapM_ sendCommand $ [ Nick n
, User n n
, Ping "ping"
] ++ map Join rs
| breestanwyck/bort | src/Bort/Application.hs | bsd-3-clause | 2,766 | 0 | 17 | 701 | 967 | 496 | 471 | 60 | 9 |
module Physics.Falling2d.RigidBody2d
(
RigidBody2d
, OrderedRigidBody2d
)
where
import Data.Vect.Double.Base
import Physics.Falling.RigidBody.RigidBody
import Physics.Falling.RigidBody.OrderedRigidBody
import Physics.Falling2d.InertiaTensor2d
import Physics.Falling2d.Vec1
import Physics.Falling2d.Shape2d
type RigidBody2d = RigidBody Proj3
Vec2
Vec1
InertiaTensor2d
InverseInertiaTensor2d
DynamicShape2d
StaticShape2d
TransformedDynamicShape2d
TransformedStaticShape2d
type OrderedRigidBody2d identifierType = OrderedRigidBody identifierType
Proj3
Vec2
Vec1
InertiaTensor2d
InverseInertiaTensor2d
DynamicShape2d
StaticShape2d
TransformedDynamicShape2d
TransformedStaticShape2d
| sebcrozet/falling2d | Physics/Falling2d/RigidBody2d.hs | bsd-3-clause | 1,435 | 0 | 5 | 805 | 107 | 65 | 42 | 29 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Network.HTTP2.Arch.HPACK (
hpackEncodeHeader
, hpackEncodeHeaderLoop
, hpackDecodeHeader
, hpackDecodeTrailer
, just
, fixHeaders
) where
import qualified Control.Exception as E
import Network.ByteOrder
import qualified Network.HTTP.Types as H
import Imports
import Network.HPACK
import Network.HPACK.Token
import Network.HTTP2.Arch.Context
import Network.HTTP2.Frame
-- $setup
-- >>> :set -XOverloadedStrings
----------------------------------------------------------------
fixHeaders :: H.ResponseHeaders -> H.ResponseHeaders
fixHeaders hdr = deleteUnnecessaryHeaders hdr
deleteUnnecessaryHeaders :: H.ResponseHeaders -> H.ResponseHeaders
deleteUnnecessaryHeaders hdr = filter del hdr
where
del (k,_) = k `notElem` headersToBeRemoved
headersToBeRemoved :: [H.HeaderName]
headersToBeRemoved = [ H.hConnection
, "Transfer-Encoding"
-- Keep-Alive
-- Proxy-Connection
-- Upgrade
]
----------------------------------------------------------------
strategy :: EncodeStrategy
strategy = EncodeStrategy { compressionAlgo = Linear, useHuffman = False }
-- Set-Cookie: contains only one cookie value.
-- So, we don't need to split it.
hpackEncodeHeader :: Context -> Buffer -> BufferSize
-> TokenHeaderList
-> IO (TokenHeaderList, Int)
hpackEncodeHeader Context{..} buf siz ths =
encodeTokenHeader buf siz strategy True encodeDynamicTable ths
hpackEncodeHeaderLoop :: Context -> Buffer -> BufferSize
-> TokenHeaderList
-> IO (TokenHeaderList, Int)
hpackEncodeHeaderLoop Context{..} buf siz hs =
encodeTokenHeader buf siz strategy False encodeDynamicTable hs
----------------------------------------------------------------
hpackDecodeHeader :: HeaderBlockFragment -> Context -> IO HeaderTable
hpackDecodeHeader hdrblk ctx = do
tbl@(_,vt) <- hpackDecodeTrailer hdrblk ctx
if isClient ctx || checkRequestHeader vt then
return tbl
else
E.throwIO $ ConnectionError ProtocolError "the header key is illegal"
hpackDecodeTrailer :: HeaderBlockFragment -> Context -> IO HeaderTable
hpackDecodeTrailer hdrblk Context{..} = decodeTokenHeader decodeDynamicTable hdrblk `E.catch` handl
where
handl IllegalHeaderName =
E.throwIO $ ConnectionError ProtocolError "the header key is illegal"
handl _ =
E.throwIO $ ConnectionError CompressionError "cannot decompress the header"
{-# INLINE checkRequestHeader #-}
checkRequestHeader :: ValueTable -> Bool
checkRequestHeader reqvt
| just mMethod (== "CONNECT") = isNothing mPath && isNothing mScheme
| isJust mStatus = False
| isNothing mMethod = False
| isNothing mScheme = False
| isNothing mPath = False
| mPath == Just "" = False
| isJust mConnection = False
| just mTE (/= "trailers") = False
| otherwise = True
where
mStatus = getHeaderValue tokenStatus reqvt
mScheme = getHeaderValue tokenScheme reqvt
mPath = getHeaderValue tokenPath reqvt
mMethod = getHeaderValue tokenMethod reqvt
mConnection = getHeaderValue tokenConnection reqvt
mTE = getHeaderValue tokenTE reqvt
{-# INLINE just #-}
just :: Maybe a -> (a -> Bool) -> Bool
just Nothing _ = False
just (Just x) p
| p x = True
| otherwise = False
| kazu-yamamoto/http2 | Network/HTTP2/Arch/HPACK.hs | bsd-3-clause | 3,574 | 0 | 10 | 854 | 777 | 408 | 369 | 73 | 2 |
module Main where
import Test.Hspec
import Day2
spec :: Spec
spec = do
describe "Day2" $ do
context "parseInstrs" $ do
it "should return correct result" $ do
parseInstrs ["UDRL", "DDUULLRR"] `shouldBe` [[U,D,R,L],[D,D,U,U,L,L,R,R]]
context "getFinalPosition" $ do
it "should return (0,0) for ULL starting at (1,1)" $ do
getFinalPosition nextPos9sq (1,1) [U,L,L] `shouldBe` (0,0)
it "should return (2,2) for RRDD starting at (0,0)" $ do
getFinalPosition nextPos9sq (0,0) [R,R,D,D] `shouldBe` (2,2)
it "should return (2,1) for LURDL starting at (2,2)" $ do
getFinalPosition nextPos9sq (2,2) [L,U,R,D,L] `shouldBe` (2,1)
it "should return (1,1) for UUUUD starting at (2,1)" $ do
getFinalPosition nextPos9sq (2,1) [U,U,U,U,D] `shouldBe` (1,1)
it "should return (2,0) for ULL starting at (2,0)" $ do
getFinalPosition nextPos25sq (2,0) [U,L,L] `shouldBe` (2,0)
it "should return (4,2) for RRDD starting at (2,0)" $ do
getFinalPosition nextPos25sq (2,0) [R,R,D,D] `shouldBe` (4,2)
context "getCode" $ do
it "should return correct result" $ do
getCode (parseInstrs ["ULL","RRDD","LURDL","UUUUD"]) `shouldBe` [1,9,8,5]
it "should return correct result for day2Input" $ do
getCode (parseInstrs day2Input) `shouldBe` [9,5,5,4,9]
it "should return correct result for day2Input with rot pad" $ do
getCodeRot (parseInstrs day2Input) `shouldBe` ["d","8","7","a","d"]
main :: IO ()
main = hspec spec
day2Input = [
"DLRURUDLULRDRUDDRLUUUDLDLDLRLRRDRRRLLLLLDDRRRDRRDRRRLRRURLRDUULRLRRDDLULRLLDUDLULURRLRLDUDLURURLDRDDULDRDRDLDLLULULLDDLRRUDULLUULRRLLLURDRLDDLDDLDRLRRLLRURRUURRRRLUDLRDDDDRDULRLLDDUURDUDRLUDULLUDLUDURRDRDUUUUDDUDLLLRLUULRUURDLRLLRRLRLLDLLRLLRRRURLRRLURRLDLLLUUDURUDDLLUURRDRDRRDLLDDLLRDRDRRLURLDLDRDLURLDULDRURRRUDLLULDUDRURULDUDLULULRRRUDLUURRDURRURRLRRLLRDDUUUUUDUULDRLDLLRRUDRRDULLLDUDDUDUURLRDLULUUDLDRDUUUDDDUDLDURRULUULUUULDRUDDLLLDLULLRLRLUDULLDLLRLDLDDDUUDURDDDLURDRRDDLDRLLRLRR",
"RLDUDURDRLLLLDDRRRURLLLRUUDDLRDRDDDUDLLUDDLRDURLDRDLLDRULDDRLDDDRLDRDDDRLLULDURRRLULDRLRDRDURURRDUDRURLDRLURDRLUULLULLDLUDUDRDRDDLDDDDRDURDLUDRDRURUDDLLLRLDDRURLLUDULULDDLLLDLUDLDULUUDLRLURLDRLURURRDUUDLRDDDDDRLDULUDLDDURDLURLUURDLURLDRURRLDLLRRUDRUULLRLDUUDURRLDURRLRUULDDLDLDUUDDRLDLLRRRUURLLUURURRURRLLLUDLDRRDLUULULUDDULLUDRLDDRURDRDUDULUDRLRRRUULLDRDRLULLLDURURURLURDLRRLLLDRLDUDLLLLDUUURULDDLDLLRRUDDDURULRLLUDLRDLUUDDRDDLLLRLUURLDLRUURDURDDDLLLLLULRRRURRDLUDLUURRDRLRUDUUUURRURLRDRRLRDRDULLDRDRLDURDDUURLRUDDDDDLRLLRUDDDDDURURRLDRRUUUDLURUUDRRDLLULDRRLRRRLUUUD",
"RDRURLLUUDURURDUUULLRDRLRRLRUDDUDRURLLDLUUDLRLLDDURRURLUDUDDURLURLRRURLLURRUDRUDLDRLLURLRUUURRUDDDURRRLULLLLURDLRLLDDRLDRLLRRDLURDLRDLDUDRUULLDUUUDLURRLLRUDDDUUURLURUUDRLRULUURLLRLUDDLLDURULLLDURDLULDLDDUDULUDDULLRDRURDRRLLDLDDDDRUDLDRRLLLRLLLRRULDLRLRLRLLDLRDRDLLUDRDRULDUURRDDDRLLRLDLDRDUDRULUDRDLDLDDLLRULURLLURDLRRDUDLULLDLULLUDRRDDRLRURRLDUDLRRUUDLDRLRLDRLRRDURRDRRDDULURUUDDUUULRLDRLLDURRDLUULLUDRDDDLRUDLRULLDDDLURLURLRDRLLURRRUDLRRLURDUUDRLRUUDUULLRUUUDUUDDUURULDLDLURLRURLRUDLULLULRULDRDRLLLRRDLU",
"RRRRDRLUUULLLRLDDLULRUUURRDRDRURRUURUDUULRULULRDRLRRLURDRRRULUUULRRUUULULRDDLLUURRLLDUDRLRRLDDLDLLDURLLUDLDDRRURLDLULRDUULDRLRDLLDLRULLRULLUDUDUDDUULDLUUDDLUDDUULLLLLURRDRULURDUUUDULRUDLLRUUULLUULLLRUUDDRRLRDUDDRULRDLDLLLLRLDDRRRULULLLDLRLURRDULRDRDUDDRLRLDRRDLRRRLLDLLDULLUDDUDDRULLLUDDRLLRRRLDRRURUUURRDLDLURRDLURULULRDUURLLULDULDUDLLULDDUURRRLDURDLUDURLDDRDUDDLLUULDRRLDLLUDRDURLLDRLDDUDURDLUUUUURRUULULLURLDUUULLRURLLLUURDULLUULDRULLUULRDRUULLRUDLDDLRLURRUUDRLRRRULRUUULRULRRLDLUDRRLL",
"ULRLDLLURDRRUULRDUDDURDDDLRRRURLDRUDDLUDDDLLLRDLRLLRRUUDRRDRUULLLULULUUDRRRDRDRUUUUULRURUULULLULDULURRLURUDRDRUDRURURUDLDURUDUDDDRLRLLLLURULUDLRLDDLRUDDUUDURUULRLLLDDLLLLRRRDDLRLUDDUULRRLLRDUDLLDLRRUUULRLRDLRDUDLLLDLRULDRURDLLULLLRRRURDLLUURUDDURLDUUDLLDDRUUDULDRDRDRDDUDURLRRRRUDURLRRUDUDUURDRDULRLRLLRLUDLURUDRUDLULLULRLLULRUDDURUURDLRUULDURDRRRLLLLLUUUULUULDLDULLRURLUDLDRLRLRLRDLDRUDULDDRRDURDDULRULDRLRULDRLDLLUDLDRLRLRUDRDDR"]
| reidwilbur/aoc2016 | test/Day2Spec.hs | bsd-3-clause | 4,071 | 2 | 35 | 379 | 619 | 337 | 282 | 37 | 1 |
-- © 2001, 2002 Peter Thiemann
module Main where
import Prelude hiding (map, span, head, div)
import WASH.CGI.CGI
import qualified Persistent2 as P
counterStore :: CGI (P.T Int)
counterStore = P.init "Counter" 0
main =
run mainCGI
mainCGI =
forever counter
counter = do
counterHandle <- counterStore
counterValue <- P.get counterHandle
standardQuery "Counter" $ p $
do text "Current counter value "
text (show counterValue)
br empty
submit0 (count counterHandle (counterValue+1)) (fieldVALUE "Increment")
submit0 (count counterHandle (counterValue-1)) (fieldVALUE "Decrement")
count h n = do
r <- P.set h n
case r of
Just _ ->
return ()
Nothing ->
standardQuery "CounterMistake" $ p $
do text "Your attempt to set the counter to "
text (show n)
text " was not successful. "
text "Someone else was quicker :-)"
submit0 (return ()) (fieldVALUE "Continue")
| nh2/WashNGo | Examples/old/CounterWithBoundedLog.hs | bsd-3-clause | 942 | 2 | 16 | 222 | 315 | 151 | 164 | 31 | 2 |
import System.Environment (getArgs)
dsig :: Int -> Int
dsig 0 = 0
dsig x | mod x 10 > 0 = 2^(3 * mod x 10) + dsig (div x 10)
| otherwise = dsig (div x 10)
nextNu :: Int -> Int -> Int
nextNu x y | x == dsig y = y
| otherwise = nextNu x (y + 9)
nextNum :: Int -> Int
nextNum x = nextNu (dsig x) (x + 9)
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (show . nextNum . read) $ lines input
| nikai3d/ce-challenges | hard/next_number.hs | bsd-3-clause | 477 | 0 | 12 | 148 | 261 | 126 | 135 | 15 | 1 |
{-# LANGUAGE DeriveDataTypeable
, ScopedTypeVariables
, FlexibleInstances
, FlexibleContexts
#-}
{-# OPTIONS -IControl/Workflow #-}
{- | This module contains monadic combinators that express some workflow patterns.
see the docAprobal.hs example included in the package
EXAMPLE:
This fragment below describes the approbal procedure of a document.
First the document reference is sent to a list of bosses trough a queue.
ithey return a boolean in a return queue ( askUser)
the booleans are summed up according with a monoid instance (sumUp)
if the resullt is false, the correctWF workflow is executed
If the result is True, the pipeline continues to the next stage (`checkValidated`)
the next stage is the same process with a new list of users (superbosses).
There is a timeout of seven days. The result of the users that voted is summed
up according with the same monoid instance
if the result is true the document is added to the persistent list of approbed documents
if the result is false, the document is added to the persistent list of rejectec documents (@checlkValidated1@)
The program can be interrupted at any moment. The Workflow monad will restartWorkflows
it at the point where it was interrupted.
This example uses queues from "Data.Persistent.Queue"
@docApprobal :: Document -> Workflow IO ()
docApprobal doc = `getWFRef` \>>= docApprobal1
docApprobal1 rdoc=
return True \>>=
log \"requesting approbal from bosses\" \>>=
`sumUp` 0 (map (askUser doc rdoc) bosses) \>>=
checkValidated \>>=
log \"requesting approbal from superbosses or timeout\" \>>=
`sumUp` (7*60*60*24) (map(askUser doc rdoc) superbosses) \>>=
checkValidated1
askUser _ _ user False = return False
askUser doc rdoc user True = do
`step` $ `push` (quser user) rdoc
`logWF` (\"wait for any response from the user: \" ++ user)
`step` . `pop` $ qdocApprobal (title doc)
log txt x = `logWF` txt >> return x
checkValidated :: Bool -> `Workflow` IO Bool
checkValidated val =
case val of
False -> correctWF (title doc) rdoc >> return False
_ -> return True
checkValidated1 :: Bool -> Workflow IO ()
checkValidated1 val = step $ do
case val of
False -> `push` qrejected doc
_ -> `push` qapproved doc
mapM (\u ->deleteFromQueue (quser u) rdoc) superbosses@
-}
module Control.Workflow.Patterns(
-- * Low level combinators
split, merge, select,
-- * High level conbinators
vote, sumUp, Select(..)
) where
import Control.Concurrent.STM
import Data.Monoid
import qualified Control.Monad.Catch as CMC
import Control.Workflow.Stat
import Control.Workflow
import Data.Typeable
import Prelude hiding (catch)
import Control.Monad
import Control.Monad.Trans
import Control.Concurrent
import Control.Exception.Extensible (Exception,SomeException)
import Data.RefSerialize
import Control.Workflow.Stat
import qualified Data.Vector as V
import Data.TCache
import Debug.Trace
import Data.Maybe
data ActionWF a= ActionWF (WFRef(Maybe a)) ThreadId -- (WFRef (String, Bool))
-- | spawn a list of independent workflow 'actions' with a seed value 'a'
-- The results are reduced by `merge` or `select`
split :: ( Typeable b
, Serialize b
, HasFork io
, CMC.MonadMask io)
=> [a -> Workflow io b] -> a -> Workflow io [ActionWF b]
split actions a = mapM (\ac ->
do
mv <- newWFRef Nothing
th<- fork (ac a >>= \v -> (step . liftIO . atomicallySync . writeWFRef mv . Just) v )
return $ ActionWF mv th )
actions
-- | wait for the results and apply the cond to produce a single output in the Workflow monad
merge :: ( MonadIO io
, Typeable a
, Typeable b
, Serialize a, Serialize b)
=> ([a] -> io b) -> [ActionWF a] -> Workflow io b
merge cond results= step $ mapM (\(ActionWF mv _ ) -> liftIO (atomically $ readWFRef1 mv) ) results >>= cond -- !> "cond"
readWFRef1 :: ( Serialize a
, Typeable a)
=> WFRef (Maybe a) -> STM a
readWFRef1 r = do
mv <- readWFRef r
case mv of
Just(Just v) -> return v -- !> "return v"
Just Nothing -> retry -- !> "retry"
Nothing -> error $ "readWFRef1: workflow not found "++ show r
data Select
= Select -- ^ select the source output
| Discard -- ^ Discard the source output
| Continue -- ^ Continue the source process
| FinishDiscard -- ^ Discard this output, kill all and return the selected outputs
| FinishSelect -- ^ Select this output, kill all and return the selected outputs
deriving(Typeable, Read, Show)
instance Exception Select
-- | select the outputs of the workflows produced by `split` constrained within a timeout.
-- The check filter, can select , discard or finish the entire computation before
-- the timeout is reached. When the computation finalizes, it kill all
-- the pending workflows and return the list of selected outputs
-- the timeout is in seconds and it is is in the workflow monad,
-- so it is possible to restart the process if interrupted,
-- so it can proceed for years.
--
-- This is necessary for the modelization of real-life institutional cycles such are political elections
-- A timeout of 0 means no timeout.
select ::
( Serialize a
-- , Serialize [a]
, Typeable a
, HasFork io
, CMC.MonadMask io)
=> Integer
-> (a -> STM Select)
-> [ActionWF a]
-> Workflow io [a]
select timeout check actions= do
res <- liftIO $ newTVarIO $ V.generate(length actions) (const Nothing)
flag <- getTimeoutFlag timeout
parent <- liftIO myThreadId
checThreads <- liftIO $ newEmptyMVar
count <- liftIO $ newMVar 1
let process = do
let check' (ActionWF ac _) i = do
liftIO . atomically $ do
r <- readWFRef1 ac
b <- check r
case b of
Discard -> return ()
Select -> addRes i r
Continue -> addRes i r >> retry
FinishDiscard -> do
unsafeIOToSTM $ throwTo parent FinishDiscard
FinishSelect -> do
addRes i r
unsafeIOToSTM $ throwTo parent FinishDiscard
n <- liftIO $ do -- liftIO $ CMC.block $ do
n <- takeMVar count
putMVar count (n+1)
return n -- !> ("SELECT" ++ show n)
if ( n == length actions)
then liftIO $ throwTo parent FinishDiscard
else return ()
`CMC.catch` (\(e :: Select) -> liftIO $ throwTo parent e)
ws <- mapM (\(ac,i) -> fork $ check' ac i) $ zip actions [0..]
liftIO $ putMVar checThreads ws
liftIO $ atomically $ do
v <- readTVar flag -- wait fo timeout
case v of
False -> retry
True -> return ()
throw FinishDiscard
where
addRes i r= do
l <- readTVar res
writeTVar res $ l V.// [(i, Just r)]
let killall = liftIO $ do
ws <- readMVar checThreads
liftIO $ mapM_ killThread ws
liftIO $ mapM_ (\(ActionWF _ th) -> killThread th)actions -- !> "KILLALL"
step $ CMC.catch process -- (WF $ \s -> process >>= \ r -> return (s, r))
(\(e :: Select)-> do
liftIO $ return . catMaybes . V.toList =<< atomically ( readTVar res)
)
`CMC.finally` killall
justify str Nothing = error str
justify _ (Just x) = return x
-- | spawn a list of workflows and reduces the results according with the 'comp' parameter within a given timeout
--
-- @
-- vote timeout actions comp x=
-- split actions x >>= select timeout (const $ return Select) >>= comp
-- @
vote
:: ( Serialize b
-- , Serialize [b]
, Typeable b
, HasFork io
, CMC.MonadMask io)
=> Integer
-> [a -> Workflow io b]
-> ([b] -> Workflow io c)
-> a
-> Workflow io c
vote timeout actions comp x=
split actions x >>= select timeout (const $ return Continue) >>= comp
-- | sum the outputs of a list of workflows according with its monoid definition
--
-- @ sumUp timeout actions = vote timeout actions (return . mconcat) @
sumUp
:: ( Serialize b
-- , Serialize [b]
, Typeable b
, Monoid b
, HasFork io
, CMC.MonadMask io)
=> Integer
-> [a -> Workflow io b]
-> a
-> Workflow io b
sumUp timeout actions = vote timeout actions (return . mconcat)
main= do
syncWrite SyncManual
r <- exec1 "sumup" $ sumUp 0 [f 1, f 2] "0"
print r
`CMC.catch` \(e:: SomeException) -> syncCache -- !> "syncCache"
f :: Int -> String -> Workflow IO String
f n s= step ( threadDelay ( 5000000 * n)) >> return ( s ++"1")
main2=do
syncWrite SyncManual
exec1 "split" $ split (take 10 $ repeat (step . print)) "hi" >>= merge (const $ return True)
main3=do
-- syncWrite SyncManual
refs <- exec1 "WFRef" $ do
refs <- replicateM 20 $ newWFRef Nothing --"bye initial valoe"
mapM (\r -> fork $ unsafeIOtoWF $ atomically $ writeWFRef r $ Just "hi final value") refs
return refs
mapM (\r -> liftIO (atomically $ readWFRef1 r) >>= print) refs
| agocorona/Workflow | Control/Workflow/Patterns.hs | bsd-3-clause | 9,653 | 0 | 27 | 2,937 | 1,903 | 966 | 937 | 157 | 7 |
module Data.Quantities.DefinitionParserSpec (spec) where
import Data.Quantities.Data (Definition(..), SimpleUnit(..), baseQuant)
import Data.Quantities.DefinitionParser
import Test.Hspec
{-# ANN module "HLint: ignore Redundant do" #-}
spec :: Spec
spec = do
describe "parseDefinitions" $ do
let mLine = "milli- = 1e-3 = m-"
milli = head $ parseDefinitions mLine
milliDef = PrefixDefinition "milli" 1e-3 ["m"]
it "read prefix definition" $ do
milli `shouldBe` milliDef
let lenLine = "meter = [length] = m"
len = head $ parseDefinitions lenLine
lenDef = BaseDefinition "meter" "length" ["m"]
it "read base definition" $ do
len `shouldBe` lenDef
let feetLine = "foot = 3.21 m = ft = feet"
feet = head $ parseDefinitions feetLine
feetDef = UnitDefinition "foot" q ["ft", "feet"]
q = baseQuant 3.21 [SimpleUnit "m" "" 1]
it "read unit definition" $ do
feet `shouldBe` feetDef
let allLines = unlines [mLine, lenLine, feetLine]
allDefs = [milliDef, lenDef, feetDef]
it "read multiple definitions" $ do
parseDefinitions allLines `shouldBe` allDefs
| jdreaver/quantities | test-suite/Data/Quantities/DefinitionParserSpec.hs | bsd-3-clause | 1,241 | 0 | 15 | 353 | 314 | 166 | 148 | 28 | 1 |
module Lens where
import Control.Lens
data P a = P String a deriving Show
val :: Lens (P a) (P b) a b
val f (P k v) = fmap (\b -> P k b) (f v)
-- liftPV :: Applicative f => P a -> P (f a)
-- liftPV = val %~ pure
-- liftL :: Applicative f => Lens s t a b -> s -> t
-- liftL l = over l pure
| notae/haskell-exercise | cp/Lens.hs | bsd-3-clause | 293 | 0 | 8 | 81 | 96 | 53 | 43 | 5 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
module Control.Category.Structural.Rules where
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Utilities
import Control.Arrow (arr)
import Control.Category.Structural
import Control.Category.Structural.Free
import Control.Category
import Prelude hiding (id,(.),fst,snd)
import Control.Category.Rules
import Control.Category.Free
import Control.Categorical.Bifunctor.Rules
import Control.Categorical.Bifunctor.Free
import Control.Categorical.Bifunctor
import Control.Category.Associative.Rules
import Control.Arrow.CCA.Free(category)
structural :: QuasiQuoter
structural = category $ [ bifunctor_ruleset ++ assoc_ruleset ++ struct_ruleset ++ category_ruleset',
category_ruleset' ++ bifunctor_ruleset ++ assoc_ruleset ++ struct_ruleset,
category_ruleset ++ bifunctor_ruleset ++ assoc_ruleset ++ struct_ruleset]
struct_ruleset :: [RuleE]
struct_ruleset =[struct_rules,struct_rules_bi,struct_weak,struct_rules_trav,struct_rules_rare]
struct_rules :: RuleE
struct_rules [rule| arr snd |] = into [| snd |]
struct_rules [rule| arr fst |] = into [| fst |]
struct_rules [rule| arr (snd >>> f) |] = into [| snd >>> arr $f |]
struct_rules [rule| arr (fst >>> f) |] = into [| fst >>> arr $f |]
struct_rules [rule| (\(x,y) -> z) |] | x_ == z_ = into [| fst |]
| y_ == z_ = into [| snd |]
| case x_ of
VarE a -> not $ nameOccursIn a z_
_ -> False
= into [| (snd >>> (\ $y -> $z )) |]
| case y_ of
VarE a -> not $ nameOccursIn a z_
_ -> False
= into [| (fst >>> (\ $x -> $z )) |]
| otherwise = nothing
struct_rules _ = nothing
struct_weak :: RuleE
struct_weak [rule| (\((a,b),(c,d)) -> (x,y)) |] | a_ == x_ && c_ == y_ = into [| fst *** fst |]
| a_ == x_ && d_ == y_ = into [| fst *** snd |]
| b_ == x_ && c_ == y_ = into [| snd *** fst |]
| b_ == x_ && d_ == y_ = into [| snd *** snd |]
| otherwise = return Nothing
struct_weak [rule| (\(x,y) -> (a,z)) |] | x_ == z_ = into [| swap >>> first (\ $y -> $a) |]
| y_ == a_ = into [| swap >>> second (\ $x -> $z) |]
| otherwise = return Nothing
struct_weak [rule| arr swap |] = into [| swap |]
struct_weak [rule| (f *** g) >>> snd |] = into [| snd >>> $g |]
struct_weak [rule| (f *** g) >>> fst |] = into [| fst >>> $f |]
struct_weak _ = return Nothing
struct_rules_bi :: RuleE
struct_rules_bi [rule| (fst >>> f) &&& (snd >>> g) |] = into [| $f *** $g |]
struct_rules_bi [rule| (snd >>> f) &&& (fst >>> g) |] = into [| swap >>> ($f *** $g) |]
struct_rules_bi [rule| fst &&& (snd >>> g) |] = into [| id *** $g |]
struct_rules_bi [rule| (fst >>> f) &&& snd |] = into [| $f *** id |]
struct_rules_bi [rule| snd &&& (fst >>> g) |] = into [| swap >>> (id *** $g) |]
struct_rules_bi [rule| (snd >>> f) &&& fst |] = into [| swap >>> ($f *** id) |]
struct_rules_bi [rule| (f &&& g) >>> fst |] = into [| $f |]
struct_rules_bi [rule| (f &&& g) >>> snd |] = into [| $g |]
struct_rules_bi [rule| diag >>> (f *** g) |] = into [| $f &&& $g |] -- TODO: is this sound?
struct_rules_bi [rule| fst &&& snd |] = into [| id |] -- or should this be `id *** id`
struct_rules_bi [rule| snd &&& fst |] = into [| swap |] -- or should this be `swap >>> id *** id`
struct_rules_bi [rule| id &&& id |] = into [| diag |]
struct_rules_bi [rule| f &&& id |] = into [| diag >>> first $f |]
struct_rules_bi [rule| id &&& g |] = into [| diag >>> second $g |]
struct_rules_bi [rule| (f &&& g) >>> swap |] = into [| $g &&& $f |]
struct_rules_bi [rule| (a >>> f) &&& (b >>> g) |] | a_ == b_ = into [| $a >>> ($f &&& $g) |] -- sound forall equalities?
| otherwise = return Nothing
struct_rules_bi [rule| a &&& (b >>> g) |] | a_ == b_ = into [| $a >>> (id &&& $g) |] -- sound forall equalities?
| otherwise = return Nothing
struct_rules_bi [rule| (a >>> f) &&& b |] | a_ == b_ = into [| $a >>> ($f &&& id) |] -- sound forall equalities?
| otherwise = return Nothing
struct_rules_bi [rule| diag >>> arr f |] = into [| arr ( $f . diag ) |] -- There are/should be no rules with diag on the right, this this is sound
struct_rules_bi [rule| diag >>> first f >>> swap |] = into [| diag >>> second $f |]
struct_rules_bi [rule| diag >>> second f >>> swap |] = into [| diag >>> first $f |]
struct_rules_bi [rule| swap >>> first f |] = into [| second $f >>> swap |] -- bubble all swaps to the right
struct_rules_bi [rule| swap >>> second f |] = into [| first $f >>> swap |] -- bubble all swaps to the right
struct_rules_bi [rule| swap >>> (f *** g) |] = into [| ($g *** $f) >>> swap |] -- bubble all swaps to the right
struct_rules_bi [rule| first f >>> fst |] = into [| fst >>> $f |] -- bubble all fst to the left
struct_rules_bi [rule| second f >>> snd |] = into [| snd >>> $f |] -- bubble all snd to the left
struct_rules_bi [rule| first f >>> snd |] = into [| snd |]
struct_rules_bi [rule| second f >>> fst |] = into [| fst |]
struct_rules_bi [rule| swap >>> fst |] = into [| snd |]
struct_rules_bi [rule| swap >>> snd |] = into [| fst |]
struct_rules_bi [rule| diag >>> swap |] = into [| diag |]
struct_rules_bi [rule| diag >>> (swap >>> f) |] = into [| diag >>> $f |]
struct_rules_bi _ = return Nothing
struct_rules_trav :: RuleE
struct_rules_trav [rule| (f &&& g) >>> (h *** i) |] = into [| ($f >>> $h) &&& ($g >>> $i) |]
struct_rules_trav _ = nothing
-- | Perhaps do right assoc, then right swap?, this seems like too much "special case"
struct_rules_rare :: RuleE
struct_rules_rare [rule| swap >>> arr (\(a,b) -> c) |] = into [| arr (\($b,$a) -> $c) |]
struct_rules_rare _ = return Nothing
instance (Weaken p cat) => Trans2' (FreeWeaken p) cat where
drop2 (FreeWeakenBaseOp a) = a
drop2 (FreeWeakenCategoryOp Id) = id
drop2 (FreeWeakenCategoryOp (a :>>> b)) = drop2 a >>> drop2 b
drop2 (FreeWeakenBifunctorOp (a :*** b)) = drop2 a *** drop2 b
drop2 (WeakenOp Fst) = fst
drop2 (WeakenOp Snd) = snd
instance (Contract p cat) => Trans2' (FreeContract p) cat where
drop2 (FreeContractBaseOp a) = a
drop2 (FreeContractCategoryOp Id) = id
drop2 (FreeContractCategoryOp (a :>>> b)) = drop2 a >>> drop2 b
drop2 (FreeContractBifunctorOp (a :*** b)) = drop2 a *** drop2 b
drop2 (ContractOp Diag) = diag
instance (Symmetric p cat) => Trans2' (FreeSymmetric p) cat where
drop2 (FreeSymmetricBaseOp a) = a
drop2 (FreeSymmetricCategoryOp Id) = id
drop2 (FreeSymmetricCategoryOp (a :>>> b)) = drop2 a >>> drop2 b
drop2 (FreeSymmetricBifunctorOp (a :*** b)) = drop2 a *** drop2 b
drop2 (SymmetricOp Swap) = swap
{-
-}
| tomberek/rulestesting | src/Control/Category/Structural/Rules.hs | bsd-3-clause | 7,518 | 2 | 12 | 2,142 | 1,847 | 1,088 | 759 | 122 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
--import Prelude hiding (FilePath)
import Network.DNS.Pocket
import Options.Applicative
import qualified Data.ByteString.Char8 as B8
import qualified Data.Text as T
import Control.Monad
import System.Exit
default (T.Text)
data Command
= Set FilePath String [String]
| Get FilePath String
| List FilePath
| Delete FilePath String
| Daemon FilePath Port
deriving Show
set :: Parser Command
set = Set
<$> option str (long "conf" <> value "conf.yml" <> metavar "CONFILE")
<*> (argument str (metavar "DOMAIN"))
<*> many (argument str (metavar "IP..."))
get :: Parser Command
get = Get
<$> option str (long "conf" <> value "conf.yml" <> metavar "CONFILE")
<*> (argument str (metavar "DOMAIN"))
list :: Parser Command
list = List
<$> option str (long "conf" <> value "conf.yml" <> metavar "CONFILE")
delete :: Parser Command
delete = Delete
<$> option str (long "conf" <> value "conf.yml" <> metavar "CONFILE")
<*> (argument str (metavar "DOMAIN"))
daemon :: Parser Command
daemon = Daemon
<$> option str (long "conf" <> value "conf.yml" <> metavar "CONFILE")
<*> option auto (long "port" <> value 53 <> metavar "PORT")
parse :: Parser Command
parse = subparser $ foldr1 (<>) [
command "set" (info set (progDesc "set domain and ip"))
, command "get" (info get (progDesc "get ip from domain"))
, command "list" (info list (progDesc "list domain's ip"))
, command "delete" (info delete (progDesc "delete domain"))
, command "daemon" (info daemon (progDesc "start daemon"))
]
runCmd :: Command -> IO ()
runCmd (Set conf domain ips) = do
v <- setDomain conf (B8.pack domain) $ map read ips
if v
then print "OK"
else do
print "Failed"
exitWith $ ExitFailure 1
runCmd (Get conf domain) = do
v <- getDomain conf $ B8.pack domain
print v
runCmd (List conf) = do
v <- listDomain conf
forM_ v $ \(domain,ips) -> do
B8.putStrLn domain
forM_ ips $ \ip -> do
putStr $ " "
putStrLn $ show ip
runCmd (Delete conf domain) = do
deleteDomain conf $ B8.pack domain
runCmd (Daemon conf port) = runServer conf port
opts :: ParserInfo Command
opts = info (parse <**> helper) idm
main :: IO ()
main = execParser opts >>= runCmd
| junjihashimoto/pocket-dns | Main.hs | bsd-3-clause | 2,423 | 5 | 16 | 568 | 871 | 426 | 445 | 69 | 2 |
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[Simplify]{The main module of the simplifier}
-}
{-# LANGUAGE CPP #-}
module Simplify ( simplTopBinds, simplExpr, simplRule ) where
#include "HsVersions.h"
import DynFlags
import SimplMonad
import Type hiding ( substTy, extendTvSubst, substTyVar )
import SimplEnv
import SimplUtils
import FamInstEnv ( FamInstEnv )
import Literal ( litIsLifted ) --, mkMachInt ) -- temporalily commented out. See #8326
import Id
import MkId ( seqId, voidPrimId )
import MkCore ( mkImpossibleExpr, castBottomExpr )
import IdInfo
import Name ( Name, mkSystemVarName, isExternalName )
import Coercion hiding ( substCo, substTy, substCoVar, extendTvSubst )
import OptCoercion ( optCoercion )
import FamInstEnv ( topNormaliseType_maybe )
import DataCon ( DataCon, dataConWorkId, dataConRepStrictness
, isMarkedStrict ) --, dataConTyCon, dataConTag, fIRST_TAG )
--import TyCon ( isEnumerationTyCon ) -- temporalily commented out. See #8326
import CoreMonad ( Tick(..), SimplifierMode(..) )
import CoreSyn
import Demand ( StrictSig(..), dmdTypeDepth, isStrictDmd )
import PprCore ( pprCoreExpr )
import CoreUnfold
import CoreUtils
import CoreArity
--import PrimOp ( tagToEnumKey ) -- temporalily commented out. See #8326
import Rules ( mkSpecInfo, lookupRule, getRules )
import TysPrim ( voidPrimTy ) --, intPrimTy ) -- temporalily commented out. See #8326
import BasicTypes ( TopLevelFlag(..), isTopLevel, RecFlag(..) )
import MonadUtils ( foldlM, mapAccumLM, liftIO )
import Maybes ( orElse )
--import Unique ( hasKey ) -- temporalily commented out. See #8326
import Control.Monad
import Outputable
import FastString
import Pair
import Util
import ErrUtils
{-
The guts of the simplifier is in this module, but the driver loop for
the simplifier is in SimplCore.hs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
The simplifier used to guarantee that the output had no shadowing, but
it does not do so any more. (Actually, it never did!) The reason is
documented with simplifyArgs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
Many parts of the simplifier return a bunch of "floats" as well as an
expression. This is wrapped as a datatype SimplUtils.FloatsWith.
All "floats" are let-binds, not case-binds, but some non-rec lets may
be unlifted (with RHS ok-for-speculation).
-----------------------------------------
ORGANISATION OF FUNCTIONS
-----------------------------------------
simplTopBinds
- simplify all top-level binders
- for NonRec, call simplRecOrTopPair
- for Rec, call simplRecBind
------------------------------
simplExpr (applied lambda) ==> simplNonRecBind
simplExpr (Let (NonRec ...) ..) ==> simplNonRecBind
simplExpr (Let (Rec ...) ..) ==> simplify binders; simplRecBind
------------------------------
simplRecBind [binders already simplfied]
- use simplRecOrTopPair on each pair in turn
simplRecOrTopPair [binder already simplified]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
Returns:
- check for PreInlineUnconditionally
- simplLazyBind
simplNonRecBind
Used for: non-top-level non-recursive bindings
beta reductions (which amount to the same thing)
Because it can deal with strict arts, it takes a
"thing-inside" and returns an expression
- check for PreInlineUnconditionally
- simplify binder, including its IdInfo
- if strict binding
simplStrictArg
mkAtomicArgs
completeNonRecX
else
simplLazyBind
addFloats
simplNonRecX: [given a *simplified* RHS, but an *unsimplified* binder]
Used for: binding case-binder and constr args in a known-constructor case
- check for PreInLineUnconditionally
- simplify binder
- completeNonRecX
------------------------------
simplLazyBind: [binder already simplified, RHS not]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
non-top-level, but *lazy* non-recursive bindings
[must not be strict or unboxed]
Returns floats + an augmented environment, not an expression
- substituteIdInfo and add result to in-scope
[so that rules are available in rec rhs]
- simplify rhs
- mkAtomicArgs
- float if exposes constructor or PAP
- completeBind
completeNonRecX: [binder and rhs both simplified]
- if the the thing needs case binding (unlifted and not ok-for-spec)
build a Case
else
completeBind
addFloats
completeBind: [given a simplified RHS]
[used for both rec and non-rec bindings, top level and not]
- try PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
Right hand sides and arguments
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In many ways we want to treat
(a) the right hand side of a let(rec), and
(b) a function argument
in the same way. But not always! In particular, we would
like to leave these arguments exactly as they are, so they
will match a RULE more easily.
f (g x, h x)
g (+ x)
It's harder to make the rule match if we ANF-ise the constructor,
or eta-expand the PAP:
f (let { a = g x; b = h x } in (a,b))
g (\y. + x y)
On the other hand if we see the let-defns
p = (g x, h x)
q = + x
then we *do* want to ANF-ise and eta-expand, so that p and q
can be safely inlined.
Even floating lets out is a bit dubious. For let RHS's we float lets
out if that exposes a value, so that the value can be inlined more vigorously.
For example
r = let x = e in (x,x)
Here, if we float the let out we'll expose a nice constructor. We did experiments
that showed this to be a generally good thing. But it was a bad thing to float
lets out unconditionally, because that meant they got allocated more often.
For function arguments, there's less reason to expose a constructor (it won't
get inlined). Just possibly it might make a rule match, but I'm pretty skeptical.
So for the moment we don't float lets out of function arguments either.
Eta expansion
~~~~~~~~~~~~~~
For eta expansion, we want to catch things like
case e of (a,b) -> \x -> case a of (p,q) -> \y -> r
If the \x was on the RHS of a let, we'd eta expand to bring the two
lambdas together. And in general that's a good thing to do. Perhaps
we should eta expand wherever we find a (value) lambda? Then the eta
expansion at a let RHS can concentrate solely on the PAP case.
************************************************************************
* *
\subsection{Bindings}
* *
************************************************************************
-}
simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simplTopBinds env0 binds0
= do { -- Put all the top-level binders into scope at the start
-- so that if a transformation rule has unexpectedly brought
-- anything into scope, then we don't get a complaint about that.
-- It's rather as if the top-level binders were imported.
-- See note [Glomming] in OccurAnal.
; env1 <- simplRecBndrs env0 (bindersOfBinds binds0)
; env2 <- simpl_binds env1 binds0
; freeTick SimplifierDone
; return env2 }
where
-- We need to track the zapped top-level binders, because
-- they should have their fragile IdInfo zapped (notably occurrence info)
-- That's why we run down binds and bndrs' simultaneously.
--
simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simpl_binds env [] = return env
simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind
; simpl_binds env' binds }
simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs
simpl_bind env (NonRec b r) = do { (env', b') <- addBndrRules env b (lookupRecBndr env b)
; simplRecOrTopPair env' TopLevel NonRecursive b b' r }
{-
************************************************************************
* *
\subsection{Lazy bindings}
* *
************************************************************************
simplRecBind is used for
* recursive bindings only
-}
simplRecBind :: SimplEnv -> TopLevelFlag
-> [(InId, InExpr)]
-> SimplM SimplEnv
simplRecBind env0 top_lvl pairs0
= do { (env_with_info, triples) <- mapAccumLM add_rules env0 pairs0
; env1 <- go (zapFloats env_with_info) triples
; return (env0 `addRecFloats` env1) }
-- addFloats adds the floats from env1,
-- _and_ updates env0 with the in-scope set from env1
where
add_rules :: SimplEnv -> (InBndr,InExpr) -> SimplM (SimplEnv, (InBndr, OutBndr, InExpr))
-- Add the (substituted) rules to the binder
add_rules env (bndr, rhs)
= do { (env', bndr') <- addBndrRules env bndr (lookupRecBndr env bndr)
; return (env', (bndr, bndr', rhs)) }
go env [] = return env
go env ((old_bndr, new_bndr, rhs) : pairs)
= do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs
; go env' pairs }
{-
simplOrTopPair is used for
* recursive bindings (whether top level or not)
* top-level non-recursive bindings
It assumes the binder has already been simplified, but not its IdInfo.
-}
simplRecOrTopPair :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutBndr -> InExpr -- Binder and rhs
-> SimplM SimplEnv -- Returns an env that includes the binding
simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs
= do { dflags <- getDynFlags
; trace_bind dflags $
if preInlineUnconditionally dflags env top_lvl old_bndr rhs
-- Check for unconditional inline
then do tick (PreInlineUnconditionally old_bndr)
return (extendIdSubst env old_bndr (mkContEx env rhs))
else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env }
where
trace_bind dflags thing_inside
| not (dopt Opt_D_verbose_core2core dflags)
= thing_inside
| otherwise
= pprTrace "SimplBind" (ppr old_bndr) thing_inside
-- trace_bind emits a trace for each top-level binding, which
-- helps to locate the tracing for inlining and rule firing
{-
simplLazyBind is used for
* [simplRecOrTopPair] recursive bindings (whether top level or not)
* [simplRecOrTopPair] top-level non-recursive bindings
* [simplNonRecE] non-top-level *lazy* non-recursive bindings
Nota bene:
1. It assumes that the binder is *already* simplified,
and is in scope, and its IdInfo too, except unfolding
2. It assumes that the binder type is lifted.
3. It does not check for pre-inline-unconditionally;
that should have been done already.
-}
simplLazyBind :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutId -- Binder, both pre-and post simpl
-- The OutId has IdInfo, except arity, unfolding
-> InExpr -> SimplEnv -- The RHS and its environment
-> SimplM SimplEnv
-- Precondition: rhs obeys the let/app invariant
simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se
= -- pprTrace "simplLazyBind" ((ppr bndr <+> ppr bndr1) $$ ppr rhs $$ ppr (seIdSubst rhs_se)) $
do { let rhs_env = rhs_se `setInScope` env
(tvs, body) = case collectTyBinders rhs of
(tvs, body) | not_lam body -> (tvs,body)
| otherwise -> ([], rhs)
not_lam (Lam _ _) = False
not_lam (Tick t e) | not (tickishFloatable t)
= not_lam e -- eta-reduction could float
not_lam _ = True
-- Do not do the "abstract tyyvar" thing if there's
-- a lambda inside, because it defeats eta-reduction
-- f = /\a. \x. g a x
-- should eta-reduce.
; (body_env, tvs') <- simplBinders rhs_env tvs
-- See Note [Floating and type abstraction] in SimplUtils
-- Simplify the RHS
; let rhs_cont = mkRhsStop (substTy body_env (exprType body))
; (body_env1, body1) <- simplExprF body_env body rhs_cont
-- ANF-ise a constructor or PAP rhs
; (body_env2, body2) <- prepareRhs top_lvl body_env1 bndr1 body1
; (env', rhs')
<- if not (doFloatFromRhs top_lvl is_rec False body2 body_env2)
then -- No floating, revert to body1
do { rhs' <- mkLam tvs' (wrapFloats body_env1 body1) rhs_cont
; return (env, rhs') }
else if null tvs then -- Simple floating
do { tick LetFloatFromLet
; return (addFloats env body_env2, body2) }
else -- Do type-abstraction first
do { tick LetFloatFromLet
; (poly_binds, body3) <- abstractFloats tvs' body_env2 body2
; rhs' <- mkLam tvs' body3 rhs_cont
; env' <- foldlM (addPolyBind top_lvl) env poly_binds
; return (env', rhs') }
; completeBind env' top_lvl bndr bndr1 rhs' }
{-
A specialised variant of simplNonRec used when the RHS is already simplified,
notably in knownCon. It uses case-binding where necessary.
-}
simplNonRecX :: SimplEnv
-> InId -- Old binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
simplNonRecX env bndr new_rhs
| isDeadBinder bndr -- Not uncommon; e.g. case (a,b) of c { (p,q) -> p }
= return env -- Here c is dead, and we avoid creating
-- the binding c = (a,b)
| Coercion co <- new_rhs
= return (extendCvSubst env bndr co)
| otherwise
= do { (env', bndr') <- simplBinder env bndr
; completeNonRecX NotTopLevel env' (isStrictId bndr) bndr bndr' new_rhs }
-- simplNonRecX is only used for NotTopLevel things
completeNonRecX :: TopLevelFlag -> SimplEnv
-> Bool
-> InId -- Old binder
-> OutId -- New binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs
= do { (env1, rhs1) <- prepareRhs top_lvl (zapFloats env) new_bndr new_rhs
; (env2, rhs2) <-
if doFloatFromRhs NotTopLevel NonRecursive is_strict rhs1 env1
then do { tick LetFloatFromLet
; return (addFloats env env1, rhs1) } -- Add the floats to the main env
else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS
; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 }
{-
{- No, no, no! Do not try preInlineUnconditionally in completeNonRecX
Doing so risks exponential behaviour, because new_rhs has been simplified once already
In the cases described by the folowing commment, postInlineUnconditionally will
catch many of the relevant cases.
-- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
--
-- Similarly, single occurrences can be inlined vigourously
-- e.g. case (f x, g y) of (a,b) -> ....
-- If a,b occur once we can avoid constructing the let binding for them.
Furthermore in the case-binding case preInlineUnconditionally risks extra thunks
-- Consider case I# (quotInt# x y) of
-- I# v -> let w = J# v in ...
-- If we gaily inline (quotInt# x y) for v, we end up building an
-- extra thunk:
-- let w = J# (quotInt# x y) in ...
-- because quotInt# can fail.
| preInlineUnconditionally env NotTopLevel bndr new_rhs
= thing_inside (extendIdSubst env bndr (DoneEx new_rhs))
-}
----------------------------------
prepareRhs takes a putative RHS, checks whether it's a PAP or
constructor application and, if so, converts it to ANF, so that the
resulting thing can be inlined more easily. Thus
x = (f a, g b)
becomes
t1 = f a
t2 = g b
x = (t1,t2)
We also want to deal well cases like this
v = (f e1 `cast` co) e2
Here we want to make e1,e2 trivial and get
x1 = e1; x2 = e2; v = (f x1 `cast` co) v2
That's what the 'go' loop in prepareRhs does
-}
prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Adds new floats to the env iff that allows us to return a good RHS
prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions]
| Pair ty1 _ty2 <- coercionKind co -- Do *not* do this if rhs has an unlifted type
, not (isUnLiftedType ty1) -- see Note [Float coercions (unlifted)]
= do { (env', rhs') <- makeTrivialWithInfo top_lvl env sanitised_info rhs
; return (env', Cast rhs' co) }
where
sanitised_info = vanillaIdInfo `setStrictnessInfo` strictnessInfo info
`setDemandInfo` demandInfo info
info = idInfo id
prepareRhs top_lvl env0 _ rhs0
= do { (_is_exp, env1, rhs1) <- go 0 env0 rhs0
; return (env1, rhs1) }
where
go n_val_args env (Cast rhs co)
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Cast rhs' co) }
go n_val_args env (App fun (Type ty))
= do { (is_exp, env', rhs') <- go n_val_args env fun
; return (is_exp, env', App rhs' (Type ty)) }
go n_val_args env (App fun arg)
= do { (is_exp, env', fun') <- go (n_val_args+1) env fun
; case is_exp of
True -> do { (env'', arg') <- makeTrivial top_lvl env' arg
; return (True, env'', App fun' arg') }
False -> return (False, env, App fun arg) }
go n_val_args env (Var fun)
= return (is_exp, env, Var fun)
where
is_exp = isExpandableApp fun n_val_args -- The fun a constructor or PAP
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in
-- OccurAnal.occAnalApp
go n_val_args env (Tick t rhs)
-- We want to be able to float bindings past this
-- tick. Non-scoping ticks don't care.
| tickishScoped t == NoScope
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Tick t rhs') }
-- On the other hand, for scoping ticks we need to be able to
-- copy them on the floats, which in turn is only allowed if
-- we can obtain non-counting ticks.
| not (tickishCounts t) || tickishCanSplit t
= do { (is_exp, env', rhs') <- go n_val_args (zapFloats env) rhs
; let tickIt (id, expr) = (id, mkTick (mkNoCount t) expr)
floats' = seFloats $ env `addFloats` mapFloats env' tickIt
; return (is_exp, env' { seFloats = floats' }, Tick t rhs') }
go _ env other
= return (False, env, other)
{-
Note [Float coercions]
~~~~~~~~~~~~~~~~~~~~~~
When we find the binding
x = e `cast` co
we'd like to transform it to
x' = e
x = x `cast` co -- A trivial binding
There's a chance that e will be a constructor application or function, or something
like that, so moving the coercion to the usage site may well cancel the coercions
and lead to further optimisation. Example:
data family T a :: *
data instance T Int = T Int
foo :: Int -> Int -> Int
foo m n = ...
where
x = T m
go 0 = 0
go n = case x of { T m -> go (n-m) }
-- This case should optimise
Note [Preserve strictness when floating coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the Note [Float coercions] transformation, keep the strictness info.
Eg
f = e `cast` co -- f has strictness SSL
When we transform to
f' = e -- f' also has strictness SSL
f = f' `cast` co -- f still has strictness SSL
Its not wrong to drop it on the floor, but better to keep it.
Note [Float coercions (unlifted)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT don't do [Float coercions] if 'e' has an unlifted type.
This *can* happen:
foo :: Int = (error (# Int,Int #) "urk")
`cast` CoUnsafe (# Int,Int #) Int
If do the makeTrivial thing to the error call, we'll get
foo = case error (# Int,Int #) "urk" of v -> v `cast` ...
But 'v' isn't in scope!
These strange casts can happen as a result of case-of-case
bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of
(# p,q #) -> p+q
-}
makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec)
makeTrivialArg env (ValArg e) = do { (env', e') <- makeTrivial NotTopLevel env e
; return (env', ValArg e') }
makeTrivialArg env arg = return (env, arg) -- CastBy, TyArg
makeTrivial :: TopLevelFlag -> SimplEnv -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Binds the expression to a variable, if it's not trivial, returning the variable
makeTrivial top_lvl env expr = makeTrivialWithInfo top_lvl env vanillaIdInfo expr
makeTrivialWithInfo :: TopLevelFlag -> SimplEnv -> IdInfo
-> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Propagate strictness and demand info to the new binder
-- Note [Preserve strictness when floating coercions]
-- Returned SimplEnv has same substitution as incoming one
makeTrivialWithInfo top_lvl env info expr
| exprIsTrivial expr -- Already trivial
|| not (bindingOk top_lvl expr expr_ty) -- Cannot trivialise
-- See Note [Cannot trivialise]
= return (env, expr)
| otherwise -- See Note [Take care] below
= do { uniq <- getUniqueM
; let name = mkSystemVarName uniq (fsLit "a")
var = mkLocalIdWithInfo name expr_ty info
; env' <- completeNonRecX top_lvl env False var var expr
; expr' <- simplVar env' var
; return (env', expr') }
-- The simplVar is needed becase we're constructing a new binding
-- a = rhs
-- And if rhs is of form (rhs1 |> co), then we might get
-- a1 = rhs1
-- a = a1 |> co
-- and now a's RHS is trivial and can be substituted out, and that
-- is what completeNonRecX will do
-- To put it another way, it's as if we'd simplified
-- let var = e in var
where
expr_ty = exprType expr
bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool
-- True iff we can have a binding of this expression at this level
-- Precondition: the type is the type of the expression
bindingOk top_lvl _ expr_ty
| isTopLevel top_lvl = not (isUnLiftedType expr_ty)
| otherwise = True
{-
Note [Cannot trivialise]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider tih
f :: Int -> Addr#
foo :: Bar
foo = Bar (f 3)
Then we can't ANF-ise foo, even though we'd like to, because
we can't make a top-level binding for the Addr# (f 3). And if
so we don't want to turn it into
foo = let x = f 3 in Bar x
because we'll just end up inlining x back, and that makes the
simplifier loop. Better not to ANF-ise it at all.
A case in point is literal strings (a MachStr is not regarded as
trivial):
foo = Ptr "blob"#
We don't want to ANF-ise this.
************************************************************************
* *
\subsection{Completing a lazy binding}
* *
************************************************************************
completeBind
* deals only with Ids, not TyVars
* takes an already-simplified binder and RHS
* is used for both recursive and non-recursive bindings
* is used for both top-level and non-top-level bindings
It does the following:
- tries discarding a dead binding
- tries PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
It does *not* attempt to do let-to-case. Why? Because it is used for
- top-level bindings (when let-to-case is impossible)
- many situations where the "rhs" is known to be a WHNF
(so let-to-case is inappropriate).
Nor does it do the atomic-argument thing
-}
completeBind :: SimplEnv
-> TopLevelFlag -- Flag stuck into unfolding
-> InId -- Old binder
-> OutId -> OutExpr -- New binder and RHS
-> SimplM SimplEnv
-- completeBind may choose to do its work
-- * by extending the substitution (e.g. let x = y in ...)
-- * or by adding to the floats in the envt
--
-- Precondition: rhs obeys the let/app invariant
completeBind env top_lvl old_bndr new_bndr new_rhs
| isCoVar old_bndr
= case new_rhs of
Coercion co -> return (extendCvSubst env old_bndr co)
_ -> return (addNonRec env new_bndr new_rhs)
| otherwise
= ASSERT( isId new_bndr )
do { let old_info = idInfo old_bndr
old_unf = unfoldingInfo old_info
occ_info = occInfo old_info
-- Do eta-expansion on the RHS of the binding
-- See Note [Eta-expanding at let bindings] in SimplUtils
; (new_arity, final_rhs) <- tryEtaExpandRhs env new_bndr new_rhs
-- Simplify the unfolding
; new_unfolding <- simplLetUnfolding env top_lvl old_bndr final_rhs old_unf
; dflags <- getDynFlags
; if postInlineUnconditionally dflags env top_lvl new_bndr occ_info
final_rhs new_unfolding
-- Inline and discard the binding
then do { tick (PostInlineUnconditionally old_bndr)
; return (extendIdSubst env old_bndr (DoneEx final_rhs)) }
-- Use the substitution to make quite, quite sure that the
-- substitution will happen, since we are going to discard the binding
else
do { let info1 = idInfo new_bndr `setArityInfo` new_arity
-- Unfolding info: Note [Setting the new unfolding]
info2 = info1 `setUnfoldingInfo` new_unfolding
-- Demand info: Note [Setting the demand info]
--
-- We also have to nuke demand info if for some reason
-- eta-expansion *reduces* the arity of the binding to less
-- than that of the strictness sig. This can happen: see Note [Arity decrease].
info3 | isEvaldUnfolding new_unfolding
|| (case strictnessInfo info2 of
StrictSig dmd_ty -> new_arity < dmdTypeDepth dmd_ty)
= zapDemandInfo info2 `orElse` info2
| otherwise
= info2
final_id = new_bndr `setIdInfo` info3
; -- pprTrace "Binding" (ppr final_id <+> ppr new_unfolding) $
return (addNonRec env final_id final_rhs) } }
-- The addNonRec adds it to the in-scope set too
------------------------------
addPolyBind :: TopLevelFlag -> SimplEnv -> OutBind -> SimplM SimplEnv
-- Add a new binding to the environment, complete with its unfolding
-- but *do not* do postInlineUnconditionally, because we have already
-- processed some of the scope of the binding
-- We still want the unfolding though. Consider
-- let
-- x = /\a. let y = ... in Just y
-- in body
-- Then we float the y-binding out (via abstractFloats and addPolyBind)
-- but 'x' may well then be inlined in 'body' in which case we'd like the
-- opportunity to inline 'y' too.
--
-- INVARIANT: the arity is correct on the incoming binders
addPolyBind top_lvl env (NonRec poly_id rhs)
= do { unfolding <- simplLetUnfolding env top_lvl poly_id rhs noUnfolding
-- Assumes that poly_id did not have an INLINE prag
-- which is perhaps wrong. ToDo: think about this
; let final_id = setIdInfo poly_id $
idInfo poly_id `setUnfoldingInfo` unfolding
; return (addNonRec env final_id rhs) }
addPolyBind _ env bind@(Rec _)
= return (extendFloats env bind)
-- Hack: letrecs are more awkward, so we extend "by steam"
-- without adding unfoldings etc. At worst this leads to
-- more simplifier iterations
{- Note [Arity decrease]
~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking the arity of a binding should not decrease. But it *can*
legitimately happen because of RULES. Eg
f = g Int
where g has arity 2, will have arity 2. But if there's a rewrite rule
g Int --> h
where h has arity 1, then f's arity will decrease. Here's a real-life example,
which is in the output of Specialise:
Rec {
$dm {Arity 2} = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm #-}
dInt = MkD .... opInt ...
opInt {Arity 1} = $dm dInt
$s$dm {Arity 0} = \x. op dInt }
Here opInt has arity 1; but when we apply the rule its arity drops to 0.
That's why Specialise goes to a little trouble to pin the right arity
on specialised functions too.
Note [Setting the demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the unfolding is a value, the demand info may
go pear-shaped, so we nuke it. Example:
let x = (a,b) in
case x of (p,q) -> h p q x
Here x is certainly demanded. But after we've nuked
the case, we'll get just
let x = (a,b) in h a b x
and now x is not demanded (I'm assuming h is lazy)
This really happens. Similarly
let f = \x -> e in ...f..f...
After inlining f at some of its call sites the original binding may
(for example) be no longer strictly demanded.
The solution here is a bit ad hoc...
************************************************************************
* *
\subsection[Simplify-simplExpr]{The main function: simplExpr}
* *
************************************************************************
The reason for this OutExprStuff stuff is that we want to float *after*
simplifying a RHS, not before. If we do so naively we get quadratic
behaviour as things float out.
To see why it's important to do it after, consider this (real) example:
let t = f x
in fst t
==>
let t = let a = e1
b = e2
in (a,b)
in fst t
==>
let a = e1
b = e2
t = (a,b)
in
a -- Can't inline a this round, cos it appears twice
==>
e1
Each of the ==> steps is a round of simplification. We'd save a
whole round if we float first. This can cascade. Consider
let f = g d
in \x -> ...f...
==>
let f = let d1 = ..d.. in \y -> e
in \x -> ...f...
==>
let d1 = ..d..
in \x -> ...(\y ->e)...
Only in this second round can the \y be applied, and it
might do the same again.
-}
simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr
simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty)
where
expr_out_ty :: OutType
expr_out_ty = substTy env (exprType expr)
simplExprC :: SimplEnv -> CoreExpr -> SimplCont -> SimplM CoreExpr
-- Simplify an expression, given a continuation
simplExprC env expr cont
= -- pprTrace "simplExprC" (ppr expr $$ ppr cont {- $$ ppr (seIdSubst env) -} $$ ppr (seFloats env) ) $
do { (env', expr') <- simplExprF (zapFloats env) expr cont
; -- pprTrace "simplExprC ret" (ppr expr $$ ppr expr') $
-- pprTrace "simplExprC ret3" (ppr (seInScope env')) $
-- pprTrace "simplExprC ret4" (ppr (seFloats env')) $
return (wrapFloats env' expr') }
--------------------------------------------------
simplExprF :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF env e cont
= {- pprTrace "simplExprF" (vcat
[ ppr e
, text "cont =" <+> ppr cont
, text "inscope =" <+> ppr (seInScope env)
, text "tvsubst =" <+> ppr (seTvSubst env)
, text "idsubst =" <+> ppr (seIdSubst env)
, text "cvsubst =" <+> ppr (seCvSubst env)
{- , ppr (seFloats env) -}
]) $ -}
simplExprF1 env e cont
simplExprF1 :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF1 env (Var v) cont = simplIdF env v cont
simplExprF1 env (Lit lit) cont = rebuild env (Lit lit) cont
simplExprF1 env (Tick t expr) cont = simplTick env t expr cont
simplExprF1 env (Cast body co) cont = simplCast env body co cont
simplExprF1 env (Coercion co) cont = simplCoercionF env co cont
simplExprF1 env (Type ty) cont = ASSERT( contIsRhsOrArg cont )
rebuild env (Type (substTy env ty)) cont
simplExprF1 env (App fun arg) cont
= simplExprF env fun $
case arg of
Type ty -> ApplyToTy { sc_arg_ty = substTy env ty
, sc_hole_ty = substTy env (exprType fun)
, sc_cont = cont }
_ -> ApplyToVal { sc_arg = arg, sc_env = env
, sc_dup = NoDup, sc_cont = cont }
simplExprF1 env expr@(Lam {}) cont
= simplLam env zapped_bndrs body cont
-- The main issue here is under-saturated lambdas
-- (\x1. \x2. e) arg1
-- Here x1 might have "occurs-once" occ-info, because occ-info
-- is computed assuming that a group of lambdas is applied
-- all at once. If there are too few args, we must zap the
-- occ-info, UNLESS the remaining binders are one-shot
where
(bndrs, body) = collectBinders expr
zapped_bndrs | need_to_zap = map zap bndrs
| otherwise = bndrs
need_to_zap = any zappable_bndr (drop n_args bndrs)
n_args = countArgs cont
-- NB: countArgs counts all the args (incl type args)
-- and likewise drop counts all binders (incl type lambdas)
zappable_bndr b = isId b && not (isOneShotBndr b)
zap b | isTyVar b = b
| otherwise = zapLamIdInfo b
simplExprF1 env (Case scrut bndr _ alts) cont
= simplExprF env scrut (Select NoDup bndr alts env cont)
simplExprF1 env (Let (Rec pairs) body) cont
= do { env' <- simplRecBndrs env (map fst pairs)
-- NB: bndrs' don't have unfoldings or rules
-- We add them as we go down
; env'' <- simplRecBind env' NotTopLevel pairs
; simplExprF env'' body cont }
simplExprF1 env (Let (NonRec bndr rhs) body) cont
= simplNonRecE env bndr (rhs, env) ([], body) cont
---------------------------------
simplType :: SimplEnv -> InType -> SimplM OutType
-- Kept monadic just so we can do the seqType
simplType env ty
= -- pprTrace "simplType" (ppr ty $$ ppr (seTvSubst env)) $
seqType new_ty `seq` return new_ty
where
new_ty = substTy env ty
---------------------------------
simplCoercionF :: SimplEnv -> InCoercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCoercionF env co cont
= do { co' <- simplCoercion env co
; rebuild env (Coercion co') cont }
simplCoercion :: SimplEnv -> InCoercion -> SimplM OutCoercion
simplCoercion env co
= let opt_co = optCoercion (getCvSubst env) co
in seqCo opt_co `seq` return opt_co
-----------------------------------
-- | Push a TickIt context outwards past applications and cases, as
-- long as this is a non-scoping tick, to let case and application
-- optimisations apply.
simplTick :: SimplEnv -> Tickish Id -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplTick env tickish expr cont
-- A scoped tick turns into a continuation, so that we can spot
-- (scc t (\x . e)) in simplLam and eliminate the scc. If we didn't do
-- it this way, then it would take two passes of the simplifier to
-- reduce ((scc t (\x . e)) e').
-- NB, don't do this with counting ticks, because if the expr is
-- bottom, then rebuildCall will discard the continuation.
-- XXX: we cannot do this, because the simplifier assumes that
-- the context can be pushed into a case with a single branch. e.g.
-- scc<f> case expensive of p -> e
-- becomes
-- case expensive of p -> scc<f> e
--
-- So I'm disabling this for now. It just means we will do more
-- simplifier iterations that necessary in some cases.
-- | tickishScoped tickish && not (tickishCounts tickish)
-- = simplExprF env expr (TickIt tickish cont)
-- For unscoped or soft-scoped ticks, we are allowed to float in new
-- cost, so we simply push the continuation inside the tick. This
-- has the effect of moving the tick to the outside of a case or
-- application context, allowing the normal case and application
-- optimisations to fire.
| tickish `tickishScopesLike` SoftScope
= do { (env', expr') <- simplExprF env expr cont
; return (env', mkTick tickish expr')
}
-- Push tick inside if the context looks like this will allow us to
-- do a case-of-case - see Note [case-of-scc-of-case]
| Select {} <- cont, Just expr' <- push_tick_inside
= simplExprF env expr' cont
-- We don't want to move the tick, but we might still want to allow
-- floats to pass through with appropriate wrapping (or not, see
-- wrap_floats below)
--- | not (tickishCounts tickish) || tickishCanSplit tickish
-- = wrap_floats
| otherwise
= no_floating_past_tick
where
-- Try to push tick inside a case, see Note [case-of-scc-of-case].
push_tick_inside =
case expr0 of
Case scrut bndr ty alts
-> Just $ Case (tickScrut scrut) bndr ty (map tickAlt alts)
_other -> Nothing
where (ticks, expr0) = stripTicksTop movable (Tick tickish expr)
movable t = not (tickishCounts t) ||
t `tickishScopesLike` NoScope ||
tickishCanSplit t
tickScrut e = foldr mkTick e ticks
-- Alternatives get annotated with all ticks that scope in some way,
-- but we don't want to count entries.
tickAlt (c,bs,e) = (c,bs, foldr mkTick e ts_scope)
ts_scope = map mkNoCount $
filter (not . (`tickishScopesLike` NoScope)) ticks
no_floating_past_tick =
do { let (inc,outc) = splitCont cont
; (env', expr') <- simplExprF (zapFloats env) expr inc
; let tickish' = simplTickish env tickish
; (env'', expr'') <- rebuild (zapFloats env')
(wrapFloats env' expr')
(TickIt tickish' outc)
; return (addFloats env env'', expr'')
}
-- Alternative version that wraps outgoing floats with the tick. This
-- results in ticks being duplicated, as we don't make any attempt to
-- eliminate the tick if we re-inline the binding (because the tick
-- semantics allows unrestricted inlining of HNFs), so I'm not doing
-- this any more. FloatOut will catch any real opportunities for
-- floating.
--
-- wrap_floats =
-- do { let (inc,outc) = splitCont cont
-- ; (env', expr') <- simplExprF (zapFloats env) expr inc
-- ; let tickish' = simplTickish env tickish
-- ; let wrap_float (b,rhs) = (zapIdStrictness (setIdArity b 0),
-- mkTick (mkNoCount tickish') rhs)
-- -- when wrapping a float with mkTick, we better zap the Id's
-- -- strictness info and arity, because it might be wrong now.
-- ; let env'' = addFloats env (mapFloats env' wrap_float)
-- ; rebuild env'' expr' (TickIt tickish' outc)
-- }
simplTickish env tickish
| Breakpoint n ids <- tickish
= Breakpoint n (map (getDoneId . substId env) ids)
| otherwise = tickish
-- Push type application and coercion inside a tick
splitCont :: SimplCont -> (SimplCont, SimplCont)
splitCont cont@(ApplyToTy { sc_cont = tail }) = (cont { sc_cont = inc }, outc)
where (inc,outc) = splitCont tail
splitCont (CastIt co c) = (CastIt co inc, outc)
where (inc,outc) = splitCont c
splitCont other = (mkBoringStop (contHoleType other), other)
getDoneId (DoneId id) = id
getDoneId (DoneEx e) = getIdFromTrivialExpr e -- Note [substTickish] in CoreSubst
getDoneId other = pprPanic "getDoneId" (ppr other)
-- Note [case-of-scc-of-case]
-- It's pretty important to be able to transform case-of-case when
-- there's an SCC in the way. For example, the following comes up
-- in nofib/real/compress/Encode.hs:
--
-- case scctick<code_string.r1>
-- case $wcode_string_r13s wild_XC w1_s137 w2_s138 l_aje
-- of _ { (# ww1_s13f, ww2_s13g, ww3_s13h #) ->
-- (ww1_s13f, ww2_s13g, ww3_s13h)
-- }
-- of _ { (ww_s12Y, ww1_s12Z, ww2_s130) ->
-- tick<code_string.f1>
-- (ww_s12Y,
-- ww1_s12Z,
-- PTTrees.PT
-- @ GHC.Types.Char @ GHC.Types.Int wild2_Xj ww2_s130 r_ajf)
-- }
--
-- We really want this case-of-case to fire, because then the 3-tuple
-- will go away (indeed, the CPR optimisation is relying on this
-- happening). But the scctick is in the way - we need to push it
-- inside to expose the case-of-case. So we perform this
-- transformation on the inner case:
--
-- scctick c (case e of { p1 -> e1; ...; pn -> en })
-- ==>
-- case (scctick c e) of { p1 -> scc c e1; ...; pn -> scc c en }
--
-- So we've moved a constant amount of work out of the scc to expose
-- the case. We only do this when the continuation is interesting: in
-- for now, it has to be another Case (maybe generalise this later).
{-
************************************************************************
* *
\subsection{The main rebuilder}
* *
************************************************************************
-}
rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- At this point the substitution in the SimplEnv should be irrelevant
-- only the in-scope set and floats should matter
rebuild env expr cont
= case cont of
Stop {} -> return (env, expr)
TickIt t cont -> rebuild env (mkTick t expr) cont
CastIt co cont -> rebuild env (mkCast expr co) cont
-- NB: mkCast implements the (Coercion co |> g) optimisation
Select _ bndr alts se cont -> rebuildCase (se `setFloats` env) expr bndr alts cont
StrictArg info _ cont -> rebuildCall env (info `addValArgTo` expr) cont
StrictBind b bs body se cont -> do { env' <- simplNonRecX (se `setFloats` env) b expr
-- expr satisfies let/app since it started life
-- in a call to simplNonRecE
; simplLam env' bs body cont }
ApplyToTy { sc_arg_ty = ty, sc_cont = cont}
-> rebuild env (App expr (Type ty)) cont
ApplyToVal { sc_arg = arg, sc_env = se, sc_dup = dup_flag, sc_cont = cont}
-- See Note [Avoid redundant simplification]
| isSimplified dup_flag -> rebuild env (App expr arg) cont
| otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg
; rebuild env (App expr arg') cont }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
-}
simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCast env body co0 cont0
= do { co1 <- simplCoercion env co0
; cont1 <- addCoerce co1 cont0
; simplExprF env body cont1 }
where
addCoerce co cont = add_coerce co (coercionKind co) cont
add_coerce _co (Pair s1 k1) cont -- co :: ty~ty
| s1 `eqType` k1 = return cont -- is a no-op
add_coerce co1 (Pair s1 _k2) (CastIt co2 cont)
| (Pair _l1 t1) <- coercionKind co2
-- e |> (g1 :: S1~L) |> (g2 :: L~T1)
-- ==>
-- e, if S1=T1
-- e |> (g1 . g2 :: S1~T1) otherwise
--
-- For example, in the initial form of a worker
-- we may find (coerce T (coerce S (\x.e))) y
-- and we'd like it to simplify to e[y/x] in one round
-- of simplification
, s1 `eqType` t1 = return cont -- The coerces cancel out
| otherwise = return (CastIt (mkTransCo co1 co2) cont)
add_coerce co (Pair s1s2 _t1t2) cont@(ApplyToTy { sc_arg_ty = arg_ty, sc_cont = tail })
-- (f |> g) ty ---> (f ty) |> (g @ ty)
-- This implements the PushT rule from the paper
| Just (tyvar,_) <- splitForAllTy_maybe s1s2
= ASSERT( isTyVar tyvar )
do { cont' <- addCoerce new_cast tail
; return (cont { sc_cont = cont' }) }
where
new_cast = mkInstCo co arg_ty
add_coerce co (Pair s1s2 t1t2) (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup, sc_cont = cont })
| isFunTy s1s2 -- This implements the Push rule from the paper
, isFunTy t1t2 -- Check t1t2 to ensure 'arg' is a value arg
-- (e |> (g :: s1s2 ~ t1->t2)) f
-- ===>
-- (e (f |> (arg g :: t1~s1))
-- |> (res g :: s2->t2)
--
-- t1t2 must be a function type, t1->t2, because it's applied
-- to something but s1s2 might conceivably not be
--
-- When we build the ApplyTo we can't mix the out-types
-- with the InExpr in the argument, so we simply substitute
-- to make it all consistent. It's a bit messy.
-- But it isn't a common case.
--
-- Example of use: Trac #995
= do { (dup', arg_se', arg') <- simplArg env dup arg_se arg
; cont' <- addCoerce co2 cont
; return (ApplyToVal { sc_arg = mkCast arg' (mkSymCo co1)
, sc_env = arg_se'
, sc_dup = dup'
, sc_cont = cont' }) }
where
-- we split coercion t1->t2 ~ s1->s2 into t1 ~ s1 and
-- t2 ~ s2 with left and right on the curried form:
-- (->) t1 t2 ~ (->) s1 s2
[co1, co2] = decomposeCo 2 co
add_coerce co _ cont = return (CastIt co cont)
simplArg :: SimplEnv -> DupFlag -> StaticEnv -> CoreExpr
-> SimplM (DupFlag, StaticEnv, OutExpr)
simplArg env dup_flag arg_env arg
| isSimplified dup_flag
= return (dup_flag, arg_env, arg)
| otherwise
= do { arg' <- simplExpr (arg_env `setInScope` env) arg
; return (Simplified, zapSubstEnv arg_env, arg') }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
Note [Zap unfolding when beta-reducing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lambda-bound variables can have stable unfoldings, such as
$j = \x. \b{Unf=Just x}. e
See Note [Case binders and join points] below; the unfolding for lets
us optimise e better. However when we beta-reduce it we want to
revert to using the actual value, otherwise we can end up in the
stupid situation of
let x = blah in
let b{Unf=Just x} = y
in ...b...
Here it'd be far better to drop the unfolding and use the actual RHS.
-}
simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplLam env [] body cont = simplExprF env body cont
-- Beta reduction
simplLam env (bndr:bndrs) body (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplLam (extendTvSubst env bndr arg_ty) bndrs body cont }
simplLam env (bndr:bndrs) body (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplNonRecE env (zap_unfolding bndr) (arg, arg_se) (bndrs, body) cont }
where
zap_unfolding bndr -- See Note [Zap unfolding when beta-reducing]
| isId bndr, isStableUnfolding (realIdUnfolding bndr)
= setIdUnfolding bndr NoUnfolding
| otherwise = bndr
-- discard a non-counting tick on a lambda. This may change the
-- cost attribution slightly (moving the allocation of the
-- lambda elsewhere), but we don't care: optimisation changes
-- cost attribution all the time.
simplLam env bndrs body (TickIt tickish cont)
| not (tickishCounts tickish)
= simplLam env bndrs body cont
-- Not enough args, so there are real lambdas left to put in the result
simplLam env bndrs body cont
= do { (env', bndrs') <- simplLamBndrs env bndrs
; body' <- simplExpr env' body
; new_lam <- mkLam bndrs' body' cont
; rebuild env' new_lam cont }
simplLamBndrs :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplLamBndrs env bndrs = mapAccumLM simplLamBndr env bndrs
-------------
simplLamBndr :: SimplEnv -> Var -> SimplM (SimplEnv, Var)
-- Used for lambda binders. These sometimes have unfoldings added by
-- the worker/wrapper pass that must be preserved, because they can't
-- be reconstructed from context. For example:
-- f x = case x of (a,b) -> fw a b x
-- fw a b x{=(a,b)} = ...
-- The "{=(a,b)}" is an unfolding we can't reconstruct otherwise.
simplLamBndr env bndr
| isId bndr && hasSomeUnfolding old_unf -- Special case
= do { (env1, bndr1) <- simplBinder env bndr
; unf' <- simplUnfolding env1 NotTopLevel bndr old_unf
; let bndr2 = bndr1 `setIdUnfolding` unf'
; return (modifyInScope env1 bndr2, bndr2) }
| otherwise
= simplBinder env bndr -- Normal case
where
old_unf = idUnfolding bndr
------------------
simplNonRecE :: SimplEnv
-> InBndr -- The binder
-> (InExpr, SimplEnv) -- Rhs of binding (or arg of lambda)
-> ([InBndr], InExpr) -- Body of the let/lambda
-- \xs.e
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
-- simplNonRecE is used for
-- * non-top-level non-recursive lets in expressions
-- * beta reduction
--
-- It deals with strict bindings, via the StrictBind continuation,
-- which may abort the whole process
--
-- Precondition: rhs satisfies the let/app invariant
-- Note [CoreSyn let/app invariant] in CoreSyn
--
-- The "body" of the binding comes as a pair of ([InId],InExpr)
-- representing a lambda; so we recurse back to simplLam
-- Why? Because of the binder-occ-info-zapping done before
-- the call to simplLam in simplExprF (Lam ...)
-- First deal with type applications and type lets
-- (/\a. e) (Type ty) and (let a = Type ty in e)
simplNonRecE env bndr (Type ty_arg, rhs_se) (bndrs, body) cont
= ASSERT( isTyVar bndr )
do { ty_arg' <- simplType (rhs_se `setInScope` env) ty_arg
; simplLam (extendTvSubst env bndr ty_arg') bndrs body cont }
simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont
= do dflags <- getDynFlags
case () of
_ | preInlineUnconditionally dflags env NotTopLevel bndr rhs
-> do { tick (PreInlineUnconditionally bndr)
; -- pprTrace "preInlineUncond" (ppr bndr <+> ppr rhs) $
simplLam (extendIdSubst env bndr (mkContEx rhs_se rhs)) bndrs body cont }
| isStrictId bndr -- Includes coercions
-> simplExprF (rhs_se `setFloats` env) rhs
(StrictBind bndr bndrs body env cont)
| otherwise
-> ASSERT( not (isTyVar bndr) )
do { (env1, bndr1) <- simplNonRecBndr env bndr
; (env2, bndr2) <- addBndrRules env1 bndr bndr1
; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se
; simplLam env3 bndrs body cont }
{-
************************************************************************
* *
Variables
* *
************************************************************************
-}
simplVar :: SimplEnv -> InVar -> SimplM OutExpr
-- Look up an InVar in the environment
simplVar env var
| isTyVar var = return (Type (substTyVar env var))
| isCoVar var = return (Coercion (substCoVar env var))
| otherwise
= case substId env var of
DoneId var1 -> return (Var var1)
DoneEx e -> return e
ContEx tvs cvs ids e -> simplExpr (setSubstEnv env tvs cvs ids) e
simplIdF :: SimplEnv -> InId -> SimplCont -> SimplM (SimplEnv, OutExpr)
simplIdF env var cont
= case substId env var of
DoneEx e -> simplExprF (zapSubstEnv env) e cont
ContEx tvs cvs ids e -> simplExprF (setSubstEnv env tvs cvs ids) e cont
DoneId var1 -> completeCall env var1 cont
-- Note [zapSubstEnv]
-- The template is already simplified, so don't re-substitute.
-- This is VITAL. Consider
-- let x = e in
-- let y = \z -> ...x... in
-- \ x -> ...y...
-- We'll clone the inner \x, adding x->x' in the id_subst
-- Then when we inline y, we must *not* replace x by x' in
-- the inlined copy!!
---------------------------------------------------------
-- Dealing with a call site
completeCall :: SimplEnv -> OutId -> SimplCont -> SimplM (SimplEnv, OutExpr)
completeCall env var cont
= do { ------------- Try inlining ----------------
dflags <- getDynFlags
; let (lone_variable, arg_infos, call_cont) = contArgs cont
n_val_args = length arg_infos
interesting_cont = interestingCallContext call_cont
unfolding = activeUnfolding env var
maybe_inline = callSiteInline dflags var unfolding
lone_variable arg_infos interesting_cont
; case maybe_inline of {
Just expr -- There is an inlining!
-> do { checkedTick (UnfoldingDone var)
; dump_inline dflags expr cont
; simplExprF (zapSubstEnv env) expr cont }
; Nothing -> do -- No inlining!
{ rule_base <- getSimplRules
; let info = mkArgInfo var (getRules rule_base var) n_val_args call_cont
; rebuildCall env info cont
}}}
where
dump_inline dflags unfolding cont
| not (dopt Opt_D_dump_inlinings dflags) = return ()
| not (dopt Opt_D_verbose_core2core dflags)
= when (isExternalName (idName var)) $
liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done:", nest 4 (ppr var)]
| otherwise
= liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done: " <> ppr var,
nest 4 (vcat [text "Inlined fn: " <+> nest 2 (ppr unfolding),
text "Cont: " <+> ppr cont])]
rebuildCall :: SimplEnv
-> ArgInfo
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_strs = [] }) cont
-- When we run out of strictness args, it means
-- that the call is definitely bottom; see SimplUtils.mkArgInfo
-- Then we want to discard the entire strict continuation. E.g.
-- * case (error "hello") of { ... }
-- * (error "Hello") arg
-- * f (error "Hello") where f is strict
-- etc
-- Then, especially in the first of these cases, we'd like to discard
-- the continuation, leaving just the bottoming expression. But the
-- type might not be right, so we may have to add a coerce.
| not (contIsTrivial cont) -- Only do this if there is a non-trivial
= return (env, castBottomExpr res cont_ty) -- contination to discard, else we do it
where -- again and again!
res = argInfoExpr fun rev_args
cont_ty = contResultType cont
rebuildCall env info (CastIt co cont)
= rebuildCall env (addCastTo info co) cont
rebuildCall env info (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= rebuildCall env (info `addTyArgTo` arg_ty) cont
rebuildCall env info@(ArgInfo { ai_encl = encl_rules, ai_type = fun_ty
, ai_strs = str:strs, ai_discs = disc:discs })
(ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup_flag, sc_cont = cont })
| isSimplified dup_flag -- See Note [Avoid redundant simplification]
= rebuildCall env (addValArgTo info' arg) cont
| str -- Strict argument
= -- pprTrace "Strict Arg" (ppr arg $$ ppr (seIdSubst env) $$ ppr (seInScope env)) $
simplExprF (arg_se `setFloats` env) arg
(StrictArg info' cci cont)
-- Note [Shadowing]
| otherwise -- Lazy argument
-- DO NOT float anything outside, hence simplExprC
-- There is no benefit (unlike in a let-binding), and we'd
-- have to be very careful about bogus strictness through
-- floating a demanded let.
= do { arg' <- simplExprC (arg_se `setInScope` env) arg
(mkLazyArgStop (funArgTy fun_ty) cci)
; rebuildCall env (addValArgTo info' arg') cont }
where
info' = info { ai_strs = strs, ai_discs = discs }
cci | encl_rules = RuleArgCtxt
| disc > 0 = DiscArgCtxt -- Be keener here
| otherwise = BoringCtxt -- Nothing interesting
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules }) cont
| null rules
= rebuild env (argInfoExpr fun rev_args) cont -- No rules, common case
| otherwise
= do { -- We've accumulated a simplified call in <fun,rev_args>
-- so try rewrite rules; see Note [RULEs apply to simplified arguments]
-- See also Note [Rules for recursive functions]
; let env' = zapSubstEnv env -- See Note [zapSubstEnv];
-- and NB that 'rev_args' are all fully simplified
; mb_rule <- tryRules env' rules fun (reverse rev_args) cont
; case mb_rule of {
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
-- Rules don't match
; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules
} }
{-
Note [RULES apply to simplified arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very desirable to try RULES once the arguments have been simplified, because
doing so ensures that rule cascades work in one pass. Consider
{-# RULES g (h x) = k x
f (k x) = x #-}
...f (g (h x))...
Then we want to rewrite (g (h x)) to (k x) and only then try f's rules. If
we match f's rules against the un-simplified RHS, it won't match. This
makes a particularly big difference when superclass selectors are involved:
op ($p1 ($p2 (df d)))
We want all this to unravel in one sweeep.
Note [Avoid redundant simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because RULES apply to simplified arguments, there's a danger of repeatedly
simplifying already-simplified arguments. An important example is that of
(>>=) d e1 e2
Here e1, e2 are simplified before the rule is applied, but don't really
participate in the rule firing. So we mark them as Simplified to avoid
re-simplifying them.
Note [Shadowing]
~~~~~~~~~~~~~~~~
This part of the simplifier may break the no-shadowing invariant
Consider
f (...(\a -> e)...) (case y of (a,b) -> e')
where f is strict in its second arg
If we simplify the innermost one first we get (...(\a -> e)...)
Simplifying the second arg makes us float the case out, so we end up with
case y of (a,b) -> f (...(\a -> e)...) e'
So the output does not have the no-shadowing invariant. However, there is
no danger of getting name-capture, because when the first arg was simplified
we used an in-scope set that at least mentioned all the variables free in its
static environment, and that is enough.
We can't just do innermost first, or we'd end up with a dual problem:
case x of (a,b) -> f e (...(\a -> e')...)
I spent hours trying to recover the no-shadowing invariant, but I just could
not think of an elegant way to do it. The simplifier is already knee-deep in
continuations. We have to keep the right in-scope set around; AND we have
to get the effect that finding (error "foo") in a strict arg position will
discard the entire application and replace it with (error "foo"). Getting
all this at once is TOO HARD!
************************************************************************
* *
Rewrite rules
* *
************************************************************************
-}
tryRules :: SimplEnv -> [CoreRule]
-> Id -> [ArgSpec] -> SimplCont
-> SimplM (Maybe (CoreExpr, SimplCont))
-- The SimplEnv already has zapSubstEnv applied to it
tryRules env rules fn args call_cont
| null rules
= return Nothing
{- Disabled until we fix #8326
| fn `hasKey` tagToEnumKey -- See Note [Optimising tagToEnum#]
, [_type_arg, val_arg] <- args
, Select dup bndr ((_,[],rhs1) : rest_alts) se cont <- call_cont
, isDeadBinder bndr
= do { dflags <- getDynFlags
; let enum_to_tag :: CoreAlt -> CoreAlt
-- Takes K -> e into tagK# -> e
-- where tagK# is the tag of constructor K
enum_to_tag (DataAlt con, [], rhs)
= ASSERT( isEnumerationTyCon (dataConTyCon con) )
(LitAlt tag, [], rhs)
where
tag = mkMachInt dflags (toInteger (dataConTag con - fIRST_TAG))
enum_to_tag alt = pprPanic "tryRules: tagToEnum" (ppr alt)
new_alts = (DEFAULT, [], rhs1) : map enum_to_tag rest_alts
new_bndr = setIdType bndr intPrimTy
-- The binder is dead, but should have the right type
; return (Just (val_arg, Select dup new_bndr new_alts se cont)) }
-}
| otherwise
= do { dflags <- getDynFlags
; case lookupRule dflags (getUnfoldingInRuleMatch env) (activeRule env)
fn (argInfoAppArgs args) rules of {
Nothing -> return Nothing ; -- No rule matches
Just (rule, rule_rhs) ->
do { checkedTick (RuleFired (ru_name rule))
; let cont' = pushSimplifiedArgs env
(drop (ruleArity rule) args)
call_cont
-- (ruleArity rule) says how many args the rule consumed
; dump dflags rule rule_rhs
; return (Just (rule_rhs, cont')) }}}
where
dump dflags rule rule_rhs
| dopt Opt_D_dump_rule_rewrites dflags
= log_rule dflags Opt_D_dump_rule_rewrites "Rule fired" $ vcat
[ text "Rule:" <+> ftext (ru_name rule)
, text "Before:" <+> hang (ppr fn) 2 (sep (map ppr args))
, text "After: " <+> pprCoreExpr rule_rhs
, text "Cont: " <+> ppr call_cont ]
| dopt Opt_D_dump_rule_firings dflags
= log_rule dflags Opt_D_dump_rule_firings "Rule fired:" $
ftext (ru_name rule)
| otherwise
= return ()
log_rule dflags flag hdr details
= liftIO . dumpSDoc dflags alwaysQualify flag "" $
sep [text hdr, nest 4 details]
{-
Note [Optimising tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an enumeration data type:
data Foo = A | B | C
Then we want to transform
case tagToEnum# x of ==> case x of
A -> e1 DEFAULT -> e1
B -> e2 1# -> e2
C -> e3 2# -> e3
thereby getting rid of the tagToEnum# altogether. If there was a DEFAULT
alternative we retain it (remember it comes first). If not the case must
be exhaustive, and we reflect that in the transformed version by adding
a DEFAULT. Otherwise Lint complains that the new case is not exhaustive.
See #8317.
Note [Rules for recursive functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You might think that we shouldn't apply rules for a loop breaker:
doing so might give rise to an infinite loop, because a RULE is
rather like an extra equation for the function:
RULE: f (g x) y = x+y
Eqn: f a y = a-y
But it's too drastic to disable rules for loop breakers.
Even the foldr/build rule would be disabled, because foldr
is recursive, and hence a loop breaker:
foldr k z (build g) = g k z
So it's up to the programmer: rules can cause divergence
************************************************************************
* *
Rebuilding a case expression
* *
************************************************************************
Note [Case elimination]
~~~~~~~~~~~~~~~~~~~~~~~
The case-elimination transformation discards redundant case expressions.
Start with a simple situation:
case x# of ===> let y# = x# in e
y# -> e
(when x#, y# are of primitive type, of course). We can't (in general)
do this for algebraic cases, because we might turn bottom into
non-bottom!
The code in SimplUtils.prepareAlts has the effect of generalise this
idea to look for a case where we're scrutinising a variable, and we
know that only the default case can match. For example:
case x of
0# -> ...
DEFAULT -> ...(case x of
0# -> ...
DEFAULT -> ...) ...
Here the inner case is first trimmed to have only one alternative, the
DEFAULT, after which it's an instance of the previous case. This
really only shows up in eliminating error-checking code.
Note that SimplUtils.mkCase combines identical RHSs. So
case e of ===> case e of DEFAULT -> r
True -> r
False -> r
Now again the case may be elminated by the CaseElim transformation.
This includes things like (==# a# b#)::Bool so that we simplify
case ==# a# b# of { True -> x; False -> x }
to just
x
This particular example shows up in default methods for
comparison operations (e.g. in (>=) for Int.Int32)
Note [Case elimination: lifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a case over a lifted type has a single alternative, and is being used
as a strict 'let' (all isDeadBinder bndrs), we may want to do this
transformation:
case e of r ===> let r = e in ...r...
_ -> ...r...
(a) 'e' is already evaluated (it may so if e is a variable)
Specifically we check (exprIsHNF e). In this case
we can just allocate the WHNF directly with a let.
or
(b) 'x' is not used at all and e is ok-for-speculation
The ok-for-spec bit checks that we don't lose any
exceptions or divergence.
NB: it'd be *sound* to switch from case to let if the
scrutinee was not yet WHNF but was guaranteed to
converge; but sticking with case means we won't build a
thunk
or
(c) 'x' is used strictly in the body, and 'e' is a variable
Then we can just substitute 'e' for 'x' in the body.
See Note [Eliminating redundant seqs]
For (b), the "not used at all" test is important. Consider
case (case a ># b of { True -> (p,q); False -> (q,p) }) of
r -> blah
The scrutinee is ok-for-speculation (it looks inside cases), but we do
not want to transform to
let r = case a ># b of { True -> (p,q); False -> (q,p) }
in blah
because that builds an unnecessary thunk.
Note [Eliminating redundant seqs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have this:
case x of r { _ -> ..r.. }
where 'r' is used strictly in (..r..), the case is effectively a 'seq'
on 'x', but since 'r' is used strictly anyway, we can safely transform to
(...x...)
Note that this can change the error behaviour. For example, we might
transform
case x of { _ -> error "bad" }
--> error "bad"
which is might be puzzling if 'x' currently lambda-bound, but later gets
let-bound to (error "good").
Nevertheless, the paper "A semantics for imprecise exceptions" allows
this transformation. If you want to fix the evaluation order, use
'pseq'. See Trac #8900 for an example where the loss of this
transformation bit us in practice.
See also Note [Empty case alternatives] in CoreSyn.
Just for reference, the original code (added Jan 13) looked like this:
|| case_bndr_evald_next rhs
case_bndr_evald_next :: CoreExpr -> Bool
-- See Note [Case binder next]
case_bndr_evald_next (Var v) = v == case_bndr
case_bndr_evald_next (Cast e _) = case_bndr_evald_next e
case_bndr_evald_next (App e _) = case_bndr_evald_next e
case_bndr_evald_next (Case e _ _ _) = case_bndr_evald_next e
case_bndr_evald_next _ = False
(This came up when fixing Trac #7542. See also Note [Eta reduction of
an eval'd function] in CoreUtils.)
Note [Case elimination: unlifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case a +# b of r -> ...r...
Then we do case-elimination (to make a let) followed by inlining,
to get
.....(a +# b)....
If we have
case indexArray# a i of r -> ...r...
we might like to do the same, and inline the (indexArray# a i).
But indexArray# is not okForSpeculation, so we don't build a let
in rebuildCase (lest it get floated *out*), so the inlining doesn't
happen either.
This really isn't a big deal I think. The let can be
Further notes about case elimination
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider: test :: Integer -> IO ()
test = print
Turns out that this compiles to:
Print.test
= \ eta :: Integer
eta1 :: Void# ->
case PrelNum.< eta PrelNum.zeroInteger of wild { __DEFAULT ->
case hPutStr stdout
(PrelNum.jtos eta ($w[] @ Char))
eta1
of wild1 { (# new_s, a4 #) -> PrelIO.lvl23 new_s }}
Notice the strange '<' which has no effect at all. This is a funny one.
It started like this:
f x y = if x < 0 then jtos x
else if y==0 then "" else jtos x
At a particular call site we have (f v 1). So we inline to get
if v < 0 then jtos x
else if 1==0 then "" else jtos x
Now simplify the 1==0 conditional:
if v<0 then jtos v else jtos v
Now common-up the two branches of the case:
case (v<0) of DEFAULT -> jtos v
Why don't we drop the case? Because it's strict in v. It's technically
wrong to drop even unnecessary evaluations, and in practice they
may be a result of 'seq' so we *definitely* don't want to drop those.
I don't really know how to improve this situation.
-}
---------------------------------------------------------
-- Eliminate the case if possible
rebuildCase, reallyRebuildCase
:: SimplEnv
-> OutExpr -- Scrutinee
-> InId -- Case binder
-> [InAlt] -- Alternatives (inceasing order)
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
--------------------------------------------------
-- 1. Eliminate the case if there's a known constructor
--------------------------------------------------
rebuildCase env scrut case_bndr alts cont
| Lit lit <- scrut -- No need for same treatment as constructors
-- because literals are inlined more vigorously
, not (litIsLifted lit)
= do { tick (KnownBranch case_bndr)
; case findAlt (LitAlt lit) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (_, bs, rhs) -> simple_rhs bs rhs }
| Just (con, ty_args, other_args) <- exprIsConApp_maybe (getUnfoldingInRuleMatch env) scrut
-- Works when the scrutinee is a variable with a known unfolding
-- as well as when it's an explicit constructor application
= do { tick (KnownBranch case_bndr)
; case findAlt (DataAlt con) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (DEFAULT, bs, rhs) -> simple_rhs bs rhs
Just (_, bs, rhs) -> knownCon env scrut con ty_args other_args
case_bndr bs rhs cont
}
where
simple_rhs bs rhs = ASSERT( null bs )
do { env' <- simplNonRecX env case_bndr scrut
-- scrut is a constructor application,
-- hence satisfies let/app invariant
; simplExprF env' rhs cont }
--------------------------------------------------
-- 2. Eliminate the case if scrutinee is evaluated
--------------------------------------------------
rebuildCase env scrut case_bndr alts@[(_, bndrs, rhs)] cont
-- See if we can get rid of the case altogether
-- See Note [Case elimination]
-- mkCase made sure that if all the alternatives are equal,
-- then there is now only one (DEFAULT) rhs
-- 2a. Dropping the case altogether, if
-- a) it binds nothing (so it's really just a 'seq')
-- b) evaluating the scrutinee has no side effects
| is_plain_seq
, exprOkForSideEffects scrut
-- The entire case is dead, so we can drop it
-- if the scrutinee converges without having imperative
-- side effects or raising a Haskell exception
-- See Note [PrimOp can_fail and has_side_effects] in PrimOp
= simplExprF env rhs cont
-- 2b. Turn the case into a let, if
-- a) it binds only the case-binder
-- b) unlifted case: the scrutinee is ok-for-speculation
-- lifted case: the scrutinee is in HNF (or will later be demanded)
| all_dead_bndrs
, if is_unlifted
then exprOkForSpeculation scrut -- See Note [Case elimination: unlifted case]
else exprIsHNF scrut -- See Note [Case elimination: lifted case]
|| scrut_is_demanded_var scrut
= do { tick (CaseElim case_bndr)
; env' <- simplNonRecX env case_bndr scrut
; simplExprF env' rhs cont }
-- 2c. Try the seq rules if
-- a) it binds only the case binder
-- b) a rule for seq applies
-- See Note [User-defined RULES for seq] in MkId
| is_plain_seq
= do { let scrut_ty = exprType scrut
rhs_ty = substTy env (exprType rhs)
out_args = [ TyArg { as_arg_ty = scrut_ty
, as_hole_ty = seq_id_ty }
, TyArg { as_arg_ty = rhs_ty
, as_hole_ty = applyTy seq_id_ty scrut_ty }
, ValArg scrut]
rule_cont = ApplyToVal { sc_dup = NoDup, sc_arg = rhs
, sc_env = env, sc_cont = cont }
env' = zapSubstEnv env
-- Lazily evaluated, so we don't do most of this
; rule_base <- getSimplRules
; mb_rule <- tryRules env' (getRules rule_base seqId) seqId out_args rule_cont
; case mb_rule of
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
Nothing -> reallyRebuildCase env scrut case_bndr alts cont }
where
is_unlifted = isUnLiftedType (idType case_bndr)
all_dead_bndrs = all isDeadBinder bndrs -- bndrs are [InId]
is_plain_seq = all_dead_bndrs && isDeadBinder case_bndr -- Evaluation *only* for effect
seq_id_ty = idType seqId
scrut_is_demanded_var :: CoreExpr -> Bool
-- See Note [Eliminating redundant seqs]
scrut_is_demanded_var (Cast s _) = scrut_is_demanded_var s
scrut_is_demanded_var (Var _) = isStrictDmd (idDemandInfo case_bndr)
scrut_is_demanded_var _ = False
rebuildCase env scrut case_bndr alts cont
= reallyRebuildCase env scrut case_bndr alts cont
--------------------------------------------------
-- 3. Catch-all case
--------------------------------------------------
reallyRebuildCase env scrut case_bndr alts cont
= do { -- Prepare the continuation;
-- The new subst_env is in place
(env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- Simplify the alternatives
; (scrut', case_bndr', alts') <- simplAlts env' scrut case_bndr alts dup_cont
; dflags <- getDynFlags
; let alts_ty' = contResultType dup_cont
; case_expr <- mkCase dflags scrut' case_bndr' alts_ty' alts'
-- Notice that rebuild gets the in-scope set from env', not alt_env
-- (which in any case is only build in simplAlts)
-- The case binder *not* scope over the whole returned case-expression
; rebuild env' case_expr nodup_cont }
{-
simplCaseBinder checks whether the scrutinee is a variable, v. If so,
try to eliminate uses of v in the RHSs in favour of case_bndr; that
way, there's a chance that v will now only be used once, and hence
inlined.
Historical note: we use to do the "case binder swap" in the Simplifier
so there were additional complications if the scrutinee was a variable.
Now the binder-swap stuff is done in the occurrence analyer; see
OccurAnal Note [Binder swap].
Note [knownCon occ info]
~~~~~~~~~~~~~~~~~~~~~~~~
If the case binder is not dead, then neither are the pattern bound
variables:
case <any> of x { (a,b) ->
case x of { (p,q) -> p } }
Here (a,b) both look dead, but come alive after the inner case is eliminated.
The point is that we bring into the envt a binding
let x = (a,b)
after the outer case, and that makes (a,b) alive. At least we do unless
the case binder is guaranteed dead.
Note [Case alternative occ info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are simply reconstructing a case (the common case), we always
zap the occurrence info on the binders in the alternatives. Even
if the case binder is dead, the scrutinee is usually a variable, and *that*
can bring the case-alternative binders back to life.
See Note [Add unfolding for scrutinee]
Note [Improving seq]
~~~~~~~~~~~~~~~~~~~
Consider
type family F :: * -> *
type instance F Int = Int
... case e of x { DEFAULT -> rhs } ...
where x::F Int. Then we'd like to rewrite (F Int) to Int, getting
case e `cast` co of x'::Int
I# x# -> let x = x' `cast` sym co
in rhs
so that 'rhs' can take advantage of the form of x'.
Notice that Note [Case of cast] (in OccurAnal) may then apply to the result.
Nota Bene: We only do the [Improving seq] transformation if the
case binder 'x' is actually used in the rhs; that is, if the case
is *not* a *pure* seq.
a) There is no point in adding the cast to a pure seq.
b) There is a good reason not to: doing so would interfere
with seq rules (Note [Built-in RULES for seq] in MkId).
In particular, this [Improving seq] thing *adds* a cast
while [Built-in RULES for seq] *removes* one, so they
just flip-flop.
You might worry about
case v of x { __DEFAULT ->
... case (v `cast` co) of y { I# -> ... }}
This is a pure seq (since x is unused), so [Improving seq] won't happen.
But it's ok: the simplifier will replace 'v' by 'x' in the rhs to get
case v of x { __DEFAULT ->
... case (x `cast` co) of y { I# -> ... }}
Now the outer case is not a pure seq, so [Improving seq] will happen,
and then the inner case will disappear.
The need for [Improving seq] showed up in Roman's experiments. Example:
foo :: F Int -> Int -> Int
foo t n = t `seq` bar n
where
bar 0 = 0
bar n = bar (n - case t of TI i -> i)
Here we'd like to avoid repeated evaluating t inside the loop, by
taking advantage of the `seq`.
At one point I did transformation in LiberateCase, but it's more
robust here. (Otherwise, there's a danger that we'll simply drop the
'seq' altogether, before LiberateCase gets to see it.)
-}
simplAlts :: SimplEnv
-> OutExpr
-> InId -- Case binder
-> [InAlt] -- Non-empty
-> SimplCont
-> SimplM (OutExpr, OutId, [OutAlt]) -- Includes the continuation
-- Like simplExpr, this just returns the simplified alternatives;
-- it does not return an environment
-- The returned alternatives can be empty, none are possible
simplAlts env scrut case_bndr alts cont'
= do { let env0 = zapFloats env
; (env1, case_bndr1) <- simplBinder env0 case_bndr
; fam_envs <- getFamEnvs
; (alt_env', scrut', case_bndr') <- improveSeq fam_envs env1 scrut
case_bndr case_bndr1 alts
; (imposs_deflt_cons, in_alts) <- prepareAlts scrut' case_bndr' alts
-- NB: it's possible that the returned in_alts is empty: this is handled
-- by the caller (rebuildCase) in the missingAlt function
; alts' <- mapM (simplAlt alt_env' (Just scrut') imposs_deflt_cons case_bndr' cont') in_alts
; -- pprTrace "simplAlts" (ppr case_bndr $$ ppr alts_ty $$ ppr alts_ty' $$ ppr alts $$ ppr cont') $
return (scrut', case_bndr', alts') }
------------------------------------
improveSeq :: (FamInstEnv, FamInstEnv) -> SimplEnv
-> OutExpr -> InId -> OutId -> [InAlt]
-> SimplM (SimplEnv, OutExpr, OutId)
-- Note [Improving seq]
improveSeq fam_envs env scrut case_bndr case_bndr1 [(DEFAULT,_,_)]
| not (isDeadBinder case_bndr) -- Not a pure seq! See Note [Improving seq]
, Just (co, ty2) <- topNormaliseType_maybe fam_envs (idType case_bndr1)
= do { case_bndr2 <- newId (fsLit "nt") ty2
; let rhs = DoneEx (Var case_bndr2 `Cast` mkSymCo co)
env2 = extendIdSubst env case_bndr rhs
; return (env2, scrut `Cast` co, case_bndr2) }
improveSeq _ env scrut _ case_bndr1 _
= return (env, scrut, case_bndr1)
------------------------------------
simplAlt :: SimplEnv
-> Maybe OutExpr -- The scrutinee
-> [AltCon] -- These constructors can't be present when
-- matching the DEFAULT alternative
-> OutId -- The case binder
-> SimplCont
-> InAlt
-> SimplM OutAlt
simplAlt env _ imposs_deflt_cons case_bndr' cont' (DEFAULT, bndrs, rhs)
= ASSERT( null bndrs )
do { let env' = addBinderUnfolding env case_bndr'
(mkOtherCon imposs_deflt_cons)
-- Record the constructors that the case-binder *can't* be.
; rhs' <- simplExprC env' rhs cont'
; return (DEFAULT, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (LitAlt lit, bndrs, rhs)
= ASSERT( null bndrs )
do { env' <- addAltUnfoldings env scrut' case_bndr' (Lit lit)
; rhs' <- simplExprC env' rhs cont'
; return (LitAlt lit, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (DataAlt con, vs, rhs)
= do { -- Deal with the pattern-bound variables
-- Mark the ones that are in ! positions in the
-- data constructor as certainly-evaluated.
-- NB: simplLamBinders preserves this eval info
; let vs_with_evals = add_evals (dataConRepStrictness con)
; (env', vs') <- simplLamBndrs env vs_with_evals
-- Bind the case-binder to (con args)
; let inst_tys' = tyConAppArgs (idType case_bndr')
con_app :: OutExpr
con_app = mkConApp2 con inst_tys' vs'
; env'' <- addAltUnfoldings env' scrut' case_bndr' con_app
; rhs' <- simplExprC env'' rhs cont'
; return (DataAlt con, vs', rhs') }
where
-- add_evals records the evaluated-ness of the bound variables of
-- a case pattern. This is *important*. Consider
-- data T = T !Int !Int
--
-- case x of { T a b -> T (a+1) b }
--
-- We really must record that b is already evaluated so that we don't
-- go and re-evaluate it when constructing the result.
-- See Note [Data-con worker strictness] in MkId.hs
add_evals the_strs
= go vs the_strs
where
go [] [] = []
go (v:vs') strs | isTyVar v = v : go vs' strs
go (v:vs') (str:strs)
| isMarkedStrict str = evald_v : go vs' strs
| otherwise = zapped_v : go vs' strs
where
zapped_v = zapIdOccInfo v -- See Note [Case alternative occ info]
evald_v = zapped_v `setIdUnfolding` evaldUnfolding
go _ _ = pprPanic "cat_evals" (ppr con $$ ppr vs $$ ppr the_strs)
addAltUnfoldings :: SimplEnv -> Maybe OutExpr -> OutId -> OutExpr -> SimplM SimplEnv
addAltUnfoldings env scrut case_bndr con_app
= do { dflags <- getDynFlags
; let con_app_unf = mkSimpleUnfolding dflags con_app
env1 = addBinderUnfolding env case_bndr con_app_unf
-- See Note [Add unfolding for scrutinee]
env2 = case scrut of
Just (Var v) -> addBinderUnfolding env1 v con_app_unf
Just (Cast (Var v) co) -> addBinderUnfolding env1 v $
mkSimpleUnfolding dflags (Cast con_app (mkSymCo co))
_ -> env1
; traceSmpl "addAltUnf" (vcat [ppr case_bndr <+> ppr scrut, ppr con_app])
; return env2 }
addBinderUnfolding :: SimplEnv -> Id -> Unfolding -> SimplEnv
addBinderUnfolding env bndr unf
| debugIsOn, Just tmpl <- maybeUnfoldingTemplate unf
= WARN( not (eqType (idType bndr) (exprType tmpl)),
ppr bndr $$ ppr (idType bndr) $$ ppr tmpl $$ ppr (exprType tmpl) )
modifyInScope env (bndr `setIdUnfolding` unf)
| otherwise
= modifyInScope env (bndr `setIdUnfolding` unf)
zapBndrOccInfo :: Bool -> Id -> Id
-- Consider case e of b { (a,b) -> ... }
-- Then if we bind b to (a,b) in "...", and b is not dead,
-- then we must zap the deadness info on a,b
zapBndrOccInfo keep_occ_info pat_id
| keep_occ_info = pat_id
| otherwise = zapIdOccInfo pat_id
{-
Note [Add unfolding for scrutinee]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general it's unlikely that a variable scrutinee will appear
in the case alternatives case x of { ...x unlikely to appear... }
because the binder-swap in OccAnal has got rid of all such occcurrences
See Note [Binder swap] in OccAnal.
BUT it is still VERY IMPORTANT to add a suitable unfolding for a
variable scrutinee, in simplAlt. Here's why
case x of y
(a,b) -> case b of c
I# v -> ...(f y)...
There is no occurrence of 'b' in the (...(f y)...). But y gets
the unfolding (a,b), and *that* mentions b. If f has a RULE
RULE f (p, I# q) = ...
we want that rule to match, so we must extend the in-scope env with a
suitable unfolding for 'y'. It's *essential* for rule matching; but
it's also good for case-elimintation -- suppose that 'f' was inlined
and did multi-level case analysis, then we'd solve it in one
simplifier sweep instead of two.
Exactly the same issue arises in SpecConstr;
see Note [Add scrutinee to ValueEnv too] in SpecConstr
HOWEVER, given
case x of y { Just a -> r1; Nothing -> r2 }
we do not want to add the unfolding x -> y to 'x', which might seem cool,
since 'y' itself has different unfoldings in r1 and r2. Reason: if we
did that, we'd have to zap y's deadness info and that is a very useful
piece of information.
So instead we add the unfolding x -> Just a, and x -> Nothing in the
respective RHSs.
************************************************************************
* *
\subsection{Known constructor}
* *
************************************************************************
We are a bit careful with occurrence info. Here's an example
(\x* -> case x of (a*, b) -> f a) (h v, e)
where the * means "occurs once". This effectively becomes
case (h v, e) of (a*, b) -> f a)
and then
let a* = h v; b = e in f a
and then
f (h v)
All this should happen in one sweep.
-}
knownCon :: SimplEnv
-> OutExpr -- The scrutinee
-> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces)
-> InId -> [InBndr] -> InExpr -- The alternative
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
knownCon env scrut dc dc_ty_args dc_args bndr bs rhs cont
= do { env' <- bind_args env bs dc_args
; env'' <- bind_case_bndr env'
; simplExprF env'' rhs cont }
where
zap_occ = zapBndrOccInfo (isDeadBinder bndr) -- bndr is an InId
-- Ugh!
bind_args env' [] _ = return env'
bind_args env' (b:bs') (Type ty : args)
= ASSERT( isTyVar b )
bind_args (extendTvSubst env' b ty) bs' args
bind_args env' (b:bs') (arg : args)
= ASSERT( isId b )
do { let b' = zap_occ b
-- Note that the binder might be "dead", because it doesn't
-- occur in the RHS; and simplNonRecX may therefore discard
-- it via postInlineUnconditionally.
-- Nevertheless we must keep it if the case-binder is alive,
-- because it may be used in the con_app. See Note [knownCon occ info]
; env'' <- simplNonRecX env' b' arg -- arg satisfies let/app invariant
; bind_args env'' bs' args }
bind_args _ _ _ =
pprPanic "bind_args" $ ppr dc $$ ppr bs $$ ppr dc_args $$
text "scrut:" <+> ppr scrut
-- It's useful to bind bndr to scrut, rather than to a fresh
-- binding x = Con arg1 .. argn
-- because very often the scrut is a variable, so we avoid
-- creating, and then subsequently eliminating, a let-binding
-- BUT, if scrut is a not a variable, we must be careful
-- about duplicating the arg redexes; in that case, make
-- a new con-app from the args
bind_case_bndr env
| isDeadBinder bndr = return env
| exprIsTrivial scrut = return (extendIdSubst env bndr (DoneEx scrut))
| otherwise = do { dc_args <- mapM (simplVar env) bs
-- dc_ty_args are aready OutTypes,
-- but bs are InBndrs
; let con_app = Var (dataConWorkId dc)
`mkTyApps` dc_ty_args
`mkApps` dc_args
; simplNonRecX env bndr con_app }
-------------------
missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- This isn't strictly an error, although it is unusual.
-- It's possible that the simplifer might "see" that
-- an inner case has no accessible alternatives before
-- it "sees" that the entire branch of an outer case is
-- inaccessible. So we simply put an error case here instead.
missingAlt env case_bndr _ cont
= WARN( True, ptext (sLit "missingAlt") <+> ppr case_bndr )
return (env, mkImpossibleExpr (contResultType cont))
{-
************************************************************************
* *
\subsection{Duplicating continuations}
* *
************************************************************************
-}
prepareCaseCont :: SimplEnv
-> [InAlt] -> SimplCont
-> SimplM (SimplEnv,
SimplCont, -- Dupable part
SimplCont) -- Non-dupable part
-- We are considering
-- K[case _ of { p1 -> r1; ...; pn -> rn }]
-- where K is some enclosing continuation for the case
-- Goal: split K into two pieces Kdup,Knodup so that
-- a) Kdup can be duplicated
-- b) Knodup[Kdup[e]] = K[e]
-- The idea is that we'll transform thus:
-- Knodup[ (case _ of { p1 -> Kdup[r1]; ...; pn -> Kdup[rn] }
--
-- We may also return some extra bindings in SimplEnv (that scope over
-- the entire continuation)
--
-- When case-of-case is off, just make the entire continuation non-dupable
prepareCaseCont env alts cont
| not (sm_case_case (getMode env)) = return (env, mkBoringStop (contHoleType cont), cont)
| not (many_alts alts) = return (env, cont, mkBoringStop (contResultType cont))
| otherwise = mkDupableCont env cont
where
many_alts :: [InAlt] -> Bool -- True iff strictly > 1 non-bottom alternative
many_alts [] = False -- See Note [Bottom alternatives]
many_alts [_] = False
many_alts (alt:alts)
| is_bot_alt alt = many_alts alts
| otherwise = not (all is_bot_alt alts)
is_bot_alt (_,_,rhs) = exprIsBottom rhs
{-
Note [Bottom alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have
case (case x of { A -> error .. ; B -> e; C -> error ..)
of alts
then we can just duplicate those alts because the A and C cases
will disappear immediately. This is more direct than creating
join points and inlining them away; and in some cases we would
not even create the join points (see Note [Single-alternative case])
and we would keep the case-of-case which is silly. See Trac #4930.
-}
mkDupableCont :: SimplEnv -> SimplCont
-> SimplM (SimplEnv, SimplCont, SimplCont)
mkDupableCont env cont
| contIsDupable cont
= return (env, cont, mkBoringStop (contResultType cont))
mkDupableCont _ (Stop {}) = panic "mkDupableCont" -- Handled by previous eqn
mkDupableCont env (CastIt ty cont)
= do { (env', dup, nodup) <- mkDupableCont env cont
; return (env', CastIt ty dup, nodup) }
-- Duplicating ticks for now, not sure if this is good or not
mkDupableCont env cont@(TickIt{})
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env cont@(StrictBind {})
= return (env, mkBoringStop (contHoleType cont), cont)
-- See Note [Duplicating StrictBind]
mkDupableCont env (StrictArg info cci cont)
-- See Note [Duplicating StrictArg]
= do { (env', dup, nodup) <- mkDupableCont env cont
; (env'', args') <- mapAccumLM makeTrivialArg env' (ai_args info)
; return (env'', StrictArg (info { ai_args = args' }) cci dup, nodup) }
mkDupableCont env cont@(ApplyToTy { sc_cont = tail })
= do { (env', dup_cont, nodup_cont) <- mkDupableCont env tail
; return (env', cont { sc_cont = dup_cont }, nodup_cont ) }
mkDupableCont env (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_env = se, sc_cont = cont })
= -- e.g. [...hole...] (...arg...)
-- ==>
-- let a = ...arg...
-- in [...hole...] a
do { (env', dup_cont, nodup_cont) <- mkDupableCont env cont
; (_, se', arg') <- simplArg env' dup se arg
; (env'', arg'') <- makeTrivial NotTopLevel env' arg'
; let app_cont = ApplyToVal { sc_arg = arg'', sc_env = se'
, sc_dup = OkToDup, sc_cont = dup_cont }
; return (env'', app_cont, nodup_cont) }
mkDupableCont env cont@(Select _ case_bndr [(_, bs, _rhs)] _ _)
-- See Note [Single-alternative case]
-- | not (exprIsDupable rhs && contIsDupable case_cont)
-- | not (isDeadBinder case_bndr)
| all isDeadBinder bs -- InIds
&& not (isUnLiftedType (idType case_bndr))
-- Note [Single-alternative-unlifted]
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env (Select _ case_bndr alts se cont)
= -- e.g. (case [...hole...] of { pi -> ei })
-- ===>
-- let ji = \xij -> ei
-- in case [...hole...] of { pi -> ji xij }
do { tick (CaseOfCase case_bndr)
; (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- NB: We call prepareCaseCont here. If there is only one
-- alternative, then dup_cont may be big, but that's ok
-- because we push it into the single alternative, and then
-- use mkDupableAlt to turn that simplified alternative into
-- a join point if it's too big to duplicate.
-- And this is important: see Note [Fusing case continuations]
; let alt_env = se `setInScope` env'
; (alt_env', case_bndr') <- simplBinder alt_env case_bndr
; alts' <- mapM (simplAlt alt_env' Nothing [] case_bndr' dup_cont) alts
-- Safe to say that there are no handled-cons for the DEFAULT case
-- NB: simplBinder does not zap deadness occ-info, so
-- a dead case_bndr' will still advertise its deadness
-- This is really important because in
-- case e of b { (# p,q #) -> ... }
-- b is always dead, and indeed we are not allowed to bind b to (# p,q #),
-- which might happen if e was an explicit unboxed pair and b wasn't marked dead.
-- In the new alts we build, we have the new case binder, so it must retain
-- its deadness.
-- NB: we don't use alt_env further; it has the substEnv for
-- the alternatives, and we don't want that
; (env'', alts'') <- mkDupableAlts env' case_bndr' alts'
; return (env'', -- Note [Duplicated env]
Select OkToDup case_bndr' alts'' (zapSubstEnv env'')
(mkBoringStop (contHoleType nodup_cont)),
nodup_cont) }
mkDupableAlts :: SimplEnv -> OutId -> [InAlt]
-> SimplM (SimplEnv, [InAlt])
-- Absorbs the continuation into the new alternatives
mkDupableAlts env case_bndr' the_alts
= go env the_alts
where
go env0 [] = return (env0, [])
go env0 (alt:alts)
= do { (env1, alt') <- mkDupableAlt env0 case_bndr' alt
; (env2, alts') <- go env1 alts
; return (env2, alt' : alts' ) }
mkDupableAlt :: SimplEnv -> OutId -> (AltCon, [CoreBndr], CoreExpr)
-> SimplM (SimplEnv, (AltCon, [CoreBndr], CoreExpr))
mkDupableAlt env case_bndr (con, bndrs', rhs') = do
dflags <- getDynFlags
if exprIsDupable dflags rhs' -- Note [Small alternative rhs]
then return (env, (con, bndrs', rhs'))
else
do { let rhs_ty' = exprType rhs'
scrut_ty = idType case_bndr
case_bndr_w_unf
= case con of
DEFAULT -> case_bndr
DataAlt dc -> setIdUnfolding case_bndr unf
where
-- See Note [Case binders and join points]
unf = mkInlineUnfolding Nothing rhs
rhs = mkConApp2 dc (tyConAppArgs scrut_ty) bndrs'
LitAlt {} -> WARN( True, ptext (sLit "mkDupableAlt")
<+> ppr case_bndr <+> ppr con )
case_bndr
-- The case binder is alive but trivial, so why has
-- it not been substituted away?
used_bndrs' | isDeadBinder case_bndr = filter abstract_over bndrs'
| otherwise = bndrs' ++ [case_bndr_w_unf]
abstract_over bndr
| isTyVar bndr = True -- Abstract over all type variables just in case
| otherwise = not (isDeadBinder bndr)
-- The deadness info on the new Ids is preserved by simplBinders
; (final_bndrs', final_args) -- Note [Join point abstraction]
<- if (any isId used_bndrs')
then return (used_bndrs', varsToCoreExprs used_bndrs')
else do { rw_id <- newId (fsLit "w") voidPrimTy
; return ([setOneShotLambda rw_id], [Var voidPrimId]) }
; join_bndr <- newId (fsLit "$j") (mkPiTypes final_bndrs' rhs_ty')
-- Note [Funky mkPiTypes]
; let -- We make the lambdas into one-shot-lambdas. The
-- join point is sure to be applied at most once, and doing so
-- prevents the body of the join point being floated out by
-- the full laziness pass
really_final_bndrs = map one_shot final_bndrs'
one_shot v | isId v = setOneShotLambda v
| otherwise = v
join_rhs = mkLams really_final_bndrs rhs'
join_arity = exprArity join_rhs
join_call = mkApps (Var join_bndr) final_args
; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs)
; return (env', (con, bndrs', join_call)) }
-- See Note [Duplicated env]
{-
Note [Fusing case continuations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's important to fuse two successive case continuations when the
first has one alternative. That's why we call prepareCaseCont here.
Consider this, which arises from thunk splitting (see Note [Thunk
splitting] in WorkWrap):
let
x* = case (case v of {pn -> rn}) of
I# a -> I# a
in body
The simplifier will find
(Var v) with continuation
Select (pn -> rn) (
Select [I# a -> I# a] (
StrictBind body Stop
So we'll call mkDupableCont on
Select [I# a -> I# a] (StrictBind body Stop)
There is just one alternative in the first Select, so we want to
simplify the rhs (I# a) with continuation (StricgtBind body Stop)
Supposing that body is big, we end up with
let $j a = <let x = I# a in body>
in case v of { pn -> case rn of
I# a -> $j a }
This is just what we want because the rn produces a box that
the case rn cancels with.
See Trac #4957 a fuller example.
Note [Case binders and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
case (case .. ) of c {
I# c# -> ....c....
If we make a join point with c but not c# we get
$j = \c -> ....c....
But if later inlining scrutines the c, thus
$j = \c -> ... case c of { I# y -> ... } ...
we won't see that 'c' has already been scrutinised. This actually
happens in the 'tabulate' function in wave4main, and makes a significant
difference to allocation.
An alternative plan is this:
$j = \c# -> let c = I# c# in ...c....
but that is bad if 'c' is *not* later scrutinised.
So instead we do both: we pass 'c' and 'c#' , and record in c's inlining
(a stable unfolding) that it's really I# c#, thus
$j = \c# -> \c[=I# c#] -> ...c....
Absence analysis may later discard 'c'.
NB: take great care when doing strictness analysis;
see Note [Lamba-bound unfoldings] in DmdAnal.
Also note that we can still end up passing stuff that isn't used. Before
strictness analysis we have
let $j x y c{=(x,y)} = (h c, ...)
in ...
After strictness analysis we see that h is strict, we end up with
let $j x y c{=(x,y)} = ($wh x y, ...)
and c is unused.
Note [Duplicated env]
~~~~~~~~~~~~~~~~~~~~~
Some of the alternatives are simplified, but have not been turned into a join point
So they *must* have an zapped subst-env. So we can't use completeNonRecX to
bind the join point, because it might to do PostInlineUnconditionally, and
we'd lose that when zapping the subst-env. We could have a per-alt subst-env,
but zapping it (as we do in mkDupableCont, the Select case) is safe, and
at worst delays the join-point inlining.
Note [Small alternative rhs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is worth checking for a small RHS because otherwise we
get extra let bindings that may cause an extra iteration of the simplifier to
inline back in place. Quite often the rhs is just a variable or constructor.
The Ord instance of Maybe in PrelMaybe.hs, for example, took several extra
iterations because the version with the let bindings looked big, and so wasn't
inlined, but after the join points had been inlined it looked smaller, and so
was inlined.
NB: we have to check the size of rhs', not rhs.
Duplicating a small InAlt might invalidate occurrence information
However, if it *is* dupable, we return the *un* simplified alternative,
because otherwise we'd need to pair it up with an empty subst-env....
but we only have one env shared between all the alts.
(Remember we must zap the subst-env before re-simplifying something).
Rather than do this we simply agree to re-simplify the original (small) thing later.
Note [Funky mkPiTypes]
~~~~~~~~~~~~~~~~~~~~~~
Notice the funky mkPiTypes. If the contructor has existentials
it's possible that the join point will be abstracted over
type variables as well as term variables.
Example: Suppose we have
data T = forall t. C [t]
Then faced with
case (case e of ...) of
C t xs::[t] -> rhs
We get the join point
let j :: forall t. [t] -> ...
j = /\t \xs::[t] -> rhs
in
case (case e of ...) of
C t xs::[t] -> j t xs
Note [Join point abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Join points always have at least one value argument,
for several reasons
* If we try to lift a primitive-typed something out
for let-binding-purposes, we will *caseify* it (!),
with potentially-disastrous strictness results. So
instead we turn it into a function: \v -> e
where v::Void#. The value passed to this function is void,
which generates (almost) no code.
* CPR. We used to say "&& isUnLiftedType rhs_ty'" here, but now
we make the join point into a function whenever used_bndrs'
is empty. This makes the join-point more CPR friendly.
Consider: let j = if .. then I# 3 else I# 4
in case .. of { A -> j; B -> j; C -> ... }
Now CPR doesn't w/w j because it's a thunk, so
that means that the enclosing function can't w/w either,
which is a lose. Here's the example that happened in practice:
kgmod :: Int -> Int -> Int
kgmod x y = if x > 0 && y < 0 || x < 0 && y > 0
then 78
else 5
* Let-no-escape. We want a join point to turn into a let-no-escape
so that it is implemented as a jump, and one of the conditions
for LNE is that it's not updatable. In CoreToStg, see
Note [What is a non-escaping let]
* Floating. Since a join point will be entered once, no sharing is
gained by floating out, but something might be lost by doing
so because it might be allocated.
I have seen a case alternative like this:
True -> \v -> ...
It's a bit silly to add the realWorld dummy arg in this case, making
$j = \s v -> ...
True -> $j s
(the \v alone is enough to make CPR happy) but I think it's rare
There's a slight infelicity here: we pass the overall
case_bndr to all the join points if it's used in *any* RHS,
because we don't know its usage in each RHS separately
Note [Duplicating StrictArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The original plan had (where E is a big argument)
e.g. f E [..hole..]
==> let $j = \a -> f E a
in $j [..hole..]
But this is terrible! Here's an example:
&& E (case x of { T -> F; F -> T })
Now, && is strict so we end up simplifying the case with
an ArgOf continuation. If we let-bind it, we get
let $j = \v -> && E v
in simplExpr (case x of { T -> F; F -> T })
(ArgOf (\r -> $j r)
And after simplifying more we get
let $j = \v -> && E v
in case x of { T -> $j F; F -> $j T }
Which is a Very Bad Thing
What we do now is this
f E [..hole..]
==> let a = E
in f a [..hole..]
Now if the thing in the hole is a case expression (which is when
we'll call mkDupableCont), we'll push the function call into the
branches, which is what we want. Now RULES for f may fire, and
call-pattern specialisation. Here's an example from Trac #3116
go (n+1) (case l of
1 -> bs'
_ -> Chunk p fpc (o+1) (l-1) bs')
If we can push the call for 'go' inside the case, we get
call-pattern specialisation for 'go', which is *crucial* for
this program.
Here is the (&&) example:
&& E (case x of { T -> F; F -> T })
==> let a = E in
case x of { T -> && a F; F -> && a T }
Much better!
Notice that
* Arguments to f *after* the strict one are handled by
the ApplyToVal case of mkDupableCont. Eg
f [..hole..] E
* We can only do the let-binding of E because the function
part of a StrictArg continuation is an explicit syntax
tree. In earlier versions we represented it as a function
(CoreExpr -> CoreEpxr) which we couldn't take apart.
Do *not* duplicate StrictBind and StritArg continuations. We gain
nothing by propagating them into the expressions, and we do lose a
lot.
The desire not to duplicate is the entire reason that
mkDupableCont returns a pair of continuations.
Note [Duplicating StrictBind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unlike StrictArg, there doesn't seem anything to gain from
duplicating a StrictBind continuation, so we don't.
Note [Single-alternative cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This case is just like the ArgOf case. Here's an example:
data T a = MkT !a
...(MkT (abs x))...
Then we get
case (case x of I# x' ->
case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
Because the (case x) has only one alternative, we'll transform to
case x of I# x' ->
case (case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
But now we do *NOT* want to make a join point etc, giving
case x of I# x' ->
let $j = \y -> MkT y
in case x' <# 0# of
True -> $j (I# (negate# x'))
False -> $j (I# x')
In this case the $j will inline again, but suppose there was a big
strict computation enclosing the orginal call to MkT. Then, it won't
"see" the MkT any more, because it's big and won't get duplicated.
And, what is worse, nothing was gained by the case-of-case transform.
So, in circumstances like these, we don't want to build join points
and push the outer case into the branches of the inner one. Instead,
don't duplicate the continuation.
When should we use this strategy? We should not use it on *every*
single-alternative case:
e.g. case (case ....) of (a,b) -> (# a,b #)
Here we must push the outer case into the inner one!
Other choices:
* Match [(DEFAULT,_,_)], but in the common case of Int,
the alternative-filling-in code turned the outer case into
case (...) of y { I# _ -> MkT y }
* Match on single alternative plus (not (isDeadBinder case_bndr))
Rationale: pushing the case inwards won't eliminate the construction.
But there's a risk of
case (...) of y { (a,b) -> let z=(a,b) in ... }
Now y looks dead, but it'll come alive again. Still, this
seems like the best option at the moment.
* Match on single alternative plus (all (isDeadBinder bndrs))
Rationale: this is essentially seq.
* Match when the rhs is *not* duplicable, and hence would lead to a
join point. This catches the disaster-case above. We can test
the *un-simplified* rhs, which is fine. It might get bigger or
smaller after simplification; if it gets smaller, this case might
fire next time round. NB also that we must test contIsDupable
case_cont *too, because case_cont might be big!
HOWEVER: I found that this version doesn't work well, because
we can get let x = case (...) of { small } in ...case x...
When x is inlined into its full context, we find that it was a bad
idea to have pushed the outer case inside the (...) case.
Note [Single-alternative-unlifted]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's another single-alternative where we really want to do case-of-case:
data Mk1 = Mk1 Int# | Mk2 Int#
M1.f =
\r [x_s74 y_s6X]
case
case y_s6X of tpl_s7m {
M1.Mk1 ipv_s70 -> ipv_s70;
M1.Mk2 ipv_s72 -> ipv_s72;
}
of
wild_s7c
{ __DEFAULT ->
case
case x_s74 of tpl_s7n {
M1.Mk1 ipv_s77 -> ipv_s77;
M1.Mk2 ipv_s79 -> ipv_s79;
}
of
wild1_s7b
{ __DEFAULT -> ==# [wild1_s7b wild_s7c];
};
};
So the outer case is doing *nothing at all*, other than serving as a
join-point. In this case we really want to do case-of-case and decide
whether to use a real join point or just duplicate the continuation:
let $j s7c = case x of
Mk1 ipv77 -> (==) s7c ipv77
Mk1 ipv79 -> (==) s7c ipv79
in
case y of
Mk1 ipv70 -> $j ipv70
Mk2 ipv72 -> $j ipv72
Hence: check whether the case binder's type is unlifted, because then
the outer case is *not* a seq.
************************************************************************
* *
Unfoldings
* *
************************************************************************
-}
simplLetUnfolding :: SimplEnv-> TopLevelFlag
-> InId
-> OutExpr
-> Unfolding -> SimplM Unfolding
simplLetUnfolding env top_lvl id new_rhs unf
| isStableUnfolding unf
= simplUnfolding env top_lvl id unf
| otherwise
= bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags InlineRhs (isTopLevel top_lvl) bottoming new_rhs) }
-- We make an unfolding *even for loop-breakers*.
-- Reason: (a) It might be useful to know that they are WHNF
-- (b) In TidyPgm we currently assume that, if we want to
-- expose the unfolding then indeed we *have* an unfolding
-- to expose. (We could instead use the RHS, but currently
-- we don't.) The simple thing is always to have one.
where
bottoming = isBottomingId id
simplUnfolding :: SimplEnv-> TopLevelFlag -> InId -> Unfolding -> SimplM Unfolding
-- Note [Setting the new unfolding]
simplUnfolding env top_lvl id unf
= case unf of
NoUnfolding -> return unf
OtherCon {} -> return unf
DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args }
-> do { (env', bndrs') <- simplBinders rule_env bndrs
; args' <- mapM (simplExpr env') args
; return (mkDFunUnfolding bndrs' con args') }
CoreUnfolding { uf_tmpl = expr, uf_src = src, uf_guidance = guide }
| isStableSource src
-> do { expr' <- simplExpr rule_env expr
; case guide of
UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok } -- Happens for INLINE things
-> let guide' = UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok
, ug_boring_ok = inlineBoringOk expr' }
-- Refresh the boring-ok flag, in case expr'
-- has got small. This happens, notably in the inlinings
-- for dfuns for single-method classes; see
-- Note [Single-method classes] in TcInstDcls.
-- A test case is Trac #4138
in return (mkCoreUnfolding src is_top_lvl expr' guide')
-- See Note [Top-level flag on inline rules] in CoreUnfold
_other -- Happens for INLINABLE things
-> bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags src is_top_lvl bottoming expr') } }
-- If the guidance is UnfIfGoodArgs, this is an INLINABLE
-- unfolding, and we need to make sure the guidance is kept up
-- to date with respect to any changes in the unfolding.
| otherwise -> return noUnfolding -- Discard unstable unfoldings
where
bottoming = isBottomingId id
is_top_lvl = isTopLevel top_lvl
act = idInlineActivation id
rule_env = updMode (updModeForStableUnfoldings act) env
-- See Note [Simplifying inside stable unfoldings] in SimplUtils
{-
Note [Force bottoming field]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to force bottoming, or the new unfolding holds
on to the old unfolding (which is part of the id).
Note [Setting the new unfolding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* If there's an INLINE pragma, we simplify the RHS gently. Maybe we
should do nothing at all, but simplifying gently might get rid of
more crap.
* If not, we make an unfolding from the new RHS. But *only* for
non-loop-breakers. Making loop breakers not have an unfolding at all
means that we can avoid tests in exprIsConApp, for example. This is
important: if exprIsConApp says 'yes' for a recursive thing, then we
can get into an infinite loop
If there's an stable unfolding on a loop breaker (which happens for
INLINEABLE), we hang on to the inlining. It's pretty dodgy, but the
user did say 'INLINE'. May need to revisit this choice.
************************************************************************
* *
Rules
* *
************************************************************************
Note [Rules in a letrec]
~~~~~~~~~~~~~~~~~~~~~~~~
After creating fresh binders for the binders of a letrec, we
substitute the RULES and add them back onto the binders; this is done
*before* processing any of the RHSs. This is important. Manuel found
cases where he really, really wanted a RULE for a recursive function
to apply in that function's own right-hand side.
See Note [Loop breaking and RULES] in OccAnal.
-}
addBndrRules :: SimplEnv -> InBndr -> OutBndr -> SimplM (SimplEnv, OutBndr)
-- Rules are added back into the bin
addBndrRules env in_id out_id
| null old_rules
= return (env, out_id)
| otherwise
= do { new_rules <- mapM (simplRule env (Just (idName out_id))) old_rules
; let final_id = out_id `setIdSpecialisation` mkSpecInfo new_rules
; return (modifyInScope env final_id, final_id) }
where
old_rules = specInfoRules (idSpecialisation in_id)
simplRule :: SimplEnv -> Maybe Name -> CoreRule -> SimplM CoreRule
simplRule _ _ rule@(BuiltinRule {}) = return rule
simplRule env mb_new_nm rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs
, ru_act = act })
= do { (env, bndrs') <- simplBinders env bndrs
; let rule_env = updMode (updModeForStableUnfoldings act) env
; args' <- mapM (simplExpr rule_env) args
; rhs' <- simplExpr rule_env rhs
; return (rule { ru_bndrs = bndrs'
, ru_fn = mb_new_nm `orElse` fn_name
, ru_args = args'
, ru_rhs = rhs' }) }
| fmthoma/ghc | compiler/simplCore/Simplify.hs | bsd-3-clause | 122,648 | 20 | 25 | 37,505 | 15,132 | 8,004 | 7,128 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Text.Lens
-- Copyright : (C) 2012-14 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
----------------------------------------------------------------------------
module Data.Text.Lens
( IsText(..), unpacked
) where
import Control.Lens
import Data.Text as Strict
import qualified Data.Text.Strict.Lens as Strict
import Data.Text.Lazy as Lazy
import qualified Data.Text.Lazy.Lens as Lazy
import Data.Text.Lazy.Builder
-- | Traversals for strict or lazy 'Text'
class IsText t where
-- | This isomorphism can be used to 'pack' (or 'unpack') strict or lazy 'Text'.
--
-- @
-- 'pack' x ≡ x '^.' 'packed'
-- 'unpack' x ≡ x '^.' 'from' 'packed'
-- 'packed' ≡ 'from' 'unpacked'
-- @
packed :: Iso' String t
-- | Convert between strict or lazy 'Text' and a 'Builder'.
--
-- @
-- 'fromText' x ≡ x '^.' 'builder'
-- @
builder :: Iso' t Builder
-- | Traverse the individual characters in strict or lazy 'Text'.
--
-- @
-- 'text' = 'unpacked' . 'traversed'
-- @
text :: IndexedTraversal' Int t Char
text = unpacked . traversed
{-# INLINE text #-}
-- | This isomorphism can be used to 'unpack' (or 'pack') both strict or lazy 'Text'.
--
-- @
-- 'unpack' x ≡ x '^.' 'unpacked'
-- 'pack' x ≡ x '^.' 'from' 'unpacked'
-- @
--
-- This 'Iso' is provided for notational convenience rather than out of great need, since
--
-- @
-- 'unpacked' ≡ 'from' 'packed'
-- @
--
unpacked :: IsText t => Iso' t String
unpacked = from packed
{-# INLINE unpacked #-}
instance IsText Strict.Text where
packed = Strict.packed
{-# INLINE packed #-}
builder = Strict.builder
{-# INLINE builder #-}
text = Strict.text
{-# INLINE text #-}
instance IsText Lazy.Text where
packed = Lazy.packed
{-# INLINE packed #-}
builder = Lazy.builder
{-# INLINE builder #-}
text = Lazy.text
{-# INLINE text #-}
| hvr/lens | src/Data/Text/Lens.hs | bsd-3-clause | 2,239 | 0 | 7 | 473 | 257 | 173 | 84 | 33 | 1 |
module Main where
import ABS
(a:b:tmp:n:n1:res:the_end:_)=[1..]
main_ :: Method
main_ [] this wb k =
Assign n (Val (I 2)) $
Assign a (Val (I 1)) $
Assign b (Val (I 3)) $
Assign tmp (Val (I 2)) $
Assign res (Sync hanoi [a,b,tmp,n]) k
hanoi :: Method
hanoi [a,b,tmp,n] this wb k =
Assign res (Val (I 0)) $
If (IGT (I n) (I 0))
(\ k' -> Assign n1 (Val (Param (n-1))) $
Assign res (Sync hanoi [a,tmp,b,n1]) $
Assign n1 (Val (Param (n-1))) $
Assign res (Sync hanoi [tmp,b,a,n1]) k')
Skip $
Return res wb k -- dummy
main :: IO ()
main = printHeap =<< run 9999999999999999 main_ the_end
| abstools/abs-haskell-formal | benchmarks/6_hanoi/progs/2.hs | bsd-3-clause | 652 | 0 | 19 | 185 | 416 | 216 | 200 | 22 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Database.Persist.Sql.Raw where
import Database.Persist
import Database.Persist.Sql.Types
import Database.Persist.Sql.Class
import qualified Data.Map as Map
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (ReaderT, ask, MonadReader)
import Data.Acquire (allocateAcquire, Acquire, mkAcquire, with)
import Data.IORef (writeIORef, readIORef, newIORef)
import Control.Exception (throwIO)
import Control.Monad (when, liftM)
import Data.Text (Text, pack)
import Control.Monad.Logger (logDebugNS, runLoggingT)
import Data.Int (Int64)
import qualified Data.Text as T
import Data.Conduit
import Control.Monad.Trans.Resource (MonadResource,release)
rawQuery :: (MonadResource m, MonadReader env m, HasPersistBackend env, BaseBackend env ~ SqlBackend)
=> Text
-> [PersistValue]
-> ConduitM () [PersistValue] m ()
rawQuery sql vals = do
srcRes <- liftPersist $ rawQueryRes sql vals
(releaseKey, src) <- allocateAcquire srcRes
src
release releaseKey
rawQueryRes
:: (MonadIO m1, MonadIO m2, IsSqlBackend env)
=> Text
-> [PersistValue]
-> ReaderT env m1 (Acquire (ConduitM () [PersistValue] m2 ()))
rawQueryRes sql vals = do
conn <- persistBackend `liftM` ask
let make = do
runLoggingT (logDebugNS (pack "SQL") $ T.append sql $ pack $ "; " ++ show vals)
(connLogFunc conn)
getStmtConn conn sql
return $ do
stmt <- mkAcquire make stmtReset
stmtQuery stmt vals
-- | Execute a raw SQL statement
rawExecute :: (MonadIO m, BackendCompatible SqlBackend backend)
=> Text -- ^ SQL statement, possibly with placeholders.
-> [PersistValue] -- ^ Values to fill the placeholders.
-> ReaderT backend m ()
rawExecute x y = liftM (const ()) $ rawExecuteCount x y
-- | Execute a raw SQL statement and return the number of
-- rows it has modified.
rawExecuteCount :: (MonadIO m, BackendCompatible SqlBackend backend)
=> Text -- ^ SQL statement, possibly with placeholders.
-> [PersistValue] -- ^ Values to fill the placeholders.
-> ReaderT backend m Int64
rawExecuteCount sql vals = do
conn <- projectBackend `liftM` ask
runLoggingT (logDebugNS (pack "SQL") $ T.append sql $ pack $ "; " ++ show vals)
(connLogFunc conn)
stmt <- getStmt sql
res <- liftIO $ stmtExecute stmt vals
liftIO $ stmtReset stmt
return res
getStmt
:: (MonadIO m, BackendCompatible SqlBackend backend)
=> Text -> ReaderT backend m Statement
getStmt sql = do
conn <- projectBackend `liftM` ask
liftIO $ getStmtConn conn sql
getStmtConn :: SqlBackend -> Text -> IO Statement
getStmtConn conn sql = do
smap <- liftIO $ readIORef $ connStmtMap conn
case Map.lookup sql smap of
Just stmt -> return stmt
Nothing -> do
stmt' <- liftIO $ connPrepare conn sql
iactive <- liftIO $ newIORef True
let stmt = Statement
{ stmtFinalize = do
active <- readIORef iactive
if active
then do
stmtFinalize stmt'
writeIORef iactive False
else return ()
, stmtReset = do
active <- readIORef iactive
when active $ stmtReset stmt'
, stmtExecute = \x -> do
active <- readIORef iactive
if active
then stmtExecute stmt' x
else throwIO $ StatementAlreadyFinalized sql
, stmtQuery = \x -> do
active <- liftIO $ readIORef iactive
if active
then stmtQuery stmt' x
else liftIO $ throwIO $ StatementAlreadyFinalized sql
}
liftIO $ writeIORef (connStmtMap conn) $ Map.insert sql stmt smap
return stmt
-- | Execute a raw SQL statement and return its results as a
-- list.
--
-- If you're using 'Entity'@s@ (which is quite likely), then you
-- /must/ use entity selection placeholders (double question
-- mark, @??@). These @??@ placeholders are then replaced for
-- the names of the columns that we need for your entities.
-- You'll receive an error if you don't use the placeholders.
-- Please see the 'Entity'@s@ documentation for more details.
--
-- You may put value placeholders (question marks, @?@) in your
-- SQL query. These placeholders are then replaced by the values
-- you pass on the second parameter, already correctly escaped.
-- You may want to use 'toPersistValue' to help you constructing
-- the placeholder values.
--
-- Since you're giving a raw SQL statement, you don't get any
-- guarantees regarding safety. If 'rawSql' is not able to parse
-- the results of your query back, then an exception is raised.
-- However, most common problems are mitigated by using the
-- entity selection placeholder @??@, and you shouldn't see any
-- error at all if you're not using 'Single'.
--
-- Some example of 'rawSql' based on this schema:
--
-- @
-- share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
-- Person
-- name String
-- age Int Maybe
-- deriving Show
-- BlogPost
-- title String
-- authorId PersonId
-- deriving Show
-- |]
-- @
--
-- Examples based on the above schema:
--
-- @
-- getPerson :: MonadIO m => ReaderT SqlBackend m [Entity Person]
-- getPerson = rawSql "select ?? from person where name=?" [PersistText "john"]
--
-- getAge :: MonadIO m => ReaderT SqlBackend m [Single Int]
-- getAge = rawSql "select person.age from person where name=?" [PersistText "john"]
--
-- getAgeName :: MonadIO m => ReaderT SqlBackend m [(Single Int, Single Text)]
-- getAgeName = rawSql "select person.age, person.name from person where name=?" [PersistText "john"]
--
-- getPersonBlog :: MonadIO m => ReaderT SqlBackend m [(Entity Person, Entity BlogPost)]
-- getPersonBlog = rawSql "select ??,?? from person,blog_post where person.id = blog_post.author_id" []
-- @
--
-- Minimal working program for PostgreSQL backend based on the above concepts:
--
-- > {-# LANGUAGE EmptyDataDecls #-}
-- > {-# LANGUAGE FlexibleContexts #-}
-- > {-# LANGUAGE GADTs #-}
-- > {-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- > {-# LANGUAGE MultiParamTypeClasses #-}
-- > {-# LANGUAGE OverloadedStrings #-}
-- > {-# LANGUAGE QuasiQuotes #-}
-- > {-# LANGUAGE TemplateHaskell #-}
-- > {-# LANGUAGE TypeFamilies #-}
-- >
-- > import Control.Monad.IO.Class (liftIO)
-- > import Control.Monad.Logger (runStderrLoggingT)
-- > import Database.Persist
-- > import Control.Monad.Reader
-- > import Data.Text
-- > import Database.Persist.Sql
-- > import Database.Persist.Postgresql
-- > import Database.Persist.TH
-- >
-- > share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
-- > Person
-- > name String
-- > age Int Maybe
-- > deriving Show
-- > |]
-- >
-- > conn = "host=localhost dbname=new_db user=postgres password=postgres port=5432"
-- >
-- > getPerson :: MonadIO m => ReaderT SqlBackend m [Entity Person]
-- > getPerson = rawSql "select ?? from person where name=?" [PersistText "sibi"]
-- >
-- > liftSqlPersistMPool y x = liftIO (runSqlPersistMPool y x)
-- >
-- > main :: IO ()
-- > main = runStderrLoggingT $ withPostgresqlPool conn 10 $ liftSqlPersistMPool $ do
-- > runMigration migrateAll
-- > xs <- getPerson
-- > liftIO (print xs)
-- >
rawSql :: (RawSql a, MonadIO m)
=> Text -- ^ SQL statement, possibly with placeholders.
-> [PersistValue] -- ^ Values to fill the placeholders.
-> ReaderT SqlBackend m [a]
rawSql stmt = run
where
getType :: (x -> m [a]) -> a
getType = error "rawSql.getType"
x = getType run
process = rawSqlProcessRow
withStmt' colSubsts params sink = do
srcRes <- rawQueryRes sql params
liftIO $ with srcRes (\src -> runConduit $ src .| sink)
where
sql = T.concat $ makeSubsts colSubsts $ T.splitOn placeholder stmt
placeholder = "??"
makeSubsts (s:ss) (t:ts) = t : s : makeSubsts ss ts
makeSubsts [] [] = []
makeSubsts [] ts = [T.intercalate placeholder ts]
makeSubsts ss [] = error (concat err)
where
err = [ "rawsql: there are still ", show (length ss)
, "'??' placeholder substitutions to be made "
, "but all '??' placeholders have already been "
, "consumed. Please read 'rawSql's documentation "
, "on how '??' placeholders work."
]
run params = do
conn <- ask
let (colCount, colSubsts) = rawSqlCols (connEscapeName conn) x
withStmt' colSubsts params $ firstRow colCount
firstRow colCount = do
mrow <- await
case mrow of
Nothing -> return []
Just row
| colCount == length row -> getter mrow
| otherwise -> fail $ concat
[ "rawSql: wrong number of columns, got "
, show (length row), " but expected ", show colCount
, " (", rawSqlColCountReason x, ")." ]
getter = go id
where
go acc Nothing = return (acc [])
go acc (Just row) =
case process row of
Left err -> fail (T.unpack err)
Right r -> await >>= go (acc . (r:))
| bitemyapp/persistent | persistent/Database/Persist/Sql/Raw.hs | mit | 10,124 | 0 | 22 | 3,109 | 1,793 | 956 | 837 | 141 | 7 |
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : Network.MPD.Commands.Status
Copyright : (c) Ben Sinclair 2005-2009, Joachim Fasting 2012
License : MIT (see LICENSE)
Maintainer : [email protected]
Stability : stable
Portability : unportable
Querying MPD's status.
-}
module Network.MPD.Commands.Status
( clearError
, currentSong
, idle
, noidle
, stats
, status
) where
import qualified Network.MPD.Applicative.Internal as A
import qualified Network.MPD.Applicative.Status as A
import Network.MPD.Commands.Types
import Network.MPD.Core
-- | Clear the current error message in status.
clearError :: MonadMPD m => m ()
clearError = A.runCommand A.clearError
-- | Get the currently playing song.
currentSong :: MonadMPD m => m (Maybe Song)
currentSong = A.runCommand A.currentSong
-- | Wait until there is a noteworthy change in one or more of MPD's
-- susbystems.
--
-- The first argument is a list of subsystems that should be considered. An
-- empty list specifies that all subsystems should be considered.
--
-- A list of subsystems that have noteworthy changes is returned.
--
-- Note that running this command will block until either 'idle' returns or is
-- cancelled by 'noidle'.
idle :: MonadMPD m => [Subsystem] -> m [Subsystem]
idle = A.runCommand . A.idle
-- | Cancel 'idle'.
noidle :: MonadMPD m => m ()
noidle = A.runCommand A.noidle
-- | Get server statistics.
stats :: MonadMPD m => m Stats
stats = A.runCommand A.stats
-- | Get the server's status.
status :: MonadMPD m => m Status
status = A.runCommand A.status
| sol/libmpd-haskell | src/Network/MPD/Commands/Status.hs | mit | 1,608 | 0 | 8 | 314 | 261 | 151 | 110 | 24 | 1 |
{-- snippet doblock --}
useAsCString str $ \cstr -> do
... operate on the C string
... return a result
{-- /snippet doblock --}
{-- snippet alloc --}
alloca $ \stringptr -> do
... call some Ptr CString function
peek stringptr
{-- /snippet alloc --}
| binesiyu/ifl | examples/ch17/DoBlock.hs | mit | 262 | 2 | 10 | 58 | 77 | 37 | 40 | -1 | -1 |
import Control.Monad
import Data.Maybe
import Data.Functor
import System.Posix.Env
import Text.XkbCommon
import Text.XkbCommon.Constants
import Text.XkbCommon.KeycodeList
import Common
setRmlvoEnv :: RMLVO -> IO ()
setRmlvoEnv rmlvo = do
procEnv "XKB_DEFAULT_RULES" rules
procEnv "XKB_DEFAULT_MODEL" model
procEnv "XKB_DEFAULT_LAYOUT" layout
procEnv "XKB_DEFAULT_VARIANT" variant
procEnv "XKB_DEFAULT_OPTIONS" options
where
procEnv :: String -> (RMLVO -> Maybe String) -> IO ()
procEnv envName getter = case getter rmlvo of
Just x -> setEnv envName x True
Nothing -> unsetEnv envName
main = do
ctx <- getTestContext
envCtx <- liftM fromJust $ newContext contextNoDefaultIncludes
appendIncludePath envCtx datadir
setRmlvoEnv noPrefs
km <- liftM fromJust $ newKeymapFromNames ctx (RMLVO
(Just "evdev")
(Just "pc105")
(Just "us,il,ru,ca")
(Just ",,,multix")
(Just "grp:alts_toggle,ctrl:nocaps,compose:rwin"))
testKeySeq km [
(keycode_q, Both, keysym_q),
(keycode_leftalt, Down, keysym_Alt_L),
(keycode_rightalt, Down, keysym_ISO_Next_Group),
(keycode_rightalt, Up, keysym_ISO_Level3_Shift),
(keycode_leftalt, Up, keysym_Alt_L),
(keycode_q, Both, keysym_slash),
(keycode_leftshift, Down, keysym_Shift_L),
(keycode_q, Both, keysym_Q),
(keycode_rightmeta, Both, keysym_Multi_key)]
km <- liftM fromJust $ newKeymapFromNames ctx (RMLVO
(Just "evdev")
(Just "pc105")
(Just "us,in")
Nothing
(Just "grp:alts_toggle"))
testKeySeq km [
(keycode_a, Both, keysym_a),
(keycode_leftalt, Down, keysym_Alt_L),
(keycode_rightalt, Down, keysym_ISO_Next_Group),
(keycode_rightalt, Up, keysym_ISO_Level3_Shift),
(keycode_leftalt, Up, keysym_Alt_L),
(keycode_a, Both, ks "U094b")]
km <- liftM fromJust $ newKeymapFromNames ctx (RMLVO
(Just "evdev")
(Just "pc105")
(Just "us")
(Just "intl")
Nothing)
testKeySeq km [
(keycode_grave, Both, keysym_dead_grave)]
km <- liftM fromJust $ newKeymapFromNames ctx (RMLVO
(Just "evdev")
(Just "pc105")
(Just "us")
(Just "intl")
(Just "grp:alts_toggle"))
testKeySeq km [
(keycode_grave, Both, keysym_dead_grave)]
km <- liftM fromJust $ newKeymapFromNames ctx (RMLVO
(Just "evdev")
Nothing
(Just "us:20")
Nothing
Nothing)
testKeySeq km [
(keycode_a, Both, keysym_a)]
km <- liftM fromJust $ newKeymapFromNames ctx (RMLVO
(Just "evdev")
Nothing
(Just "us,,ca")
Nothing
(Just "grp:alts_toggle"))
testKeySeq km [
(keycode_a, Both, keysym_a),
(keycode_leftalt, Down, keysym_Alt_L),
(keycode_rightalt, Down, keysym_ISO_Next_Group),
(keycode_rightalt, Up, keysym_ISO_Next_Group),
(keycode_leftalt, Up, keysym_Alt_L),
(keycode_leftalt, Down, keysym_Alt_L),
(keycode_rightalt, Down, keysym_ISO_Next_Group),
(keycode_rightalt, Up, keysym_ISO_Level3_Shift),
(keycode_leftalt, Up, keysym_Alt_L),
(keycode_apostrophe, Both, keysym_dead_grave)]
km <- liftM fromJust $ newKeymapFromNames ctx noPrefs
testKeySeq km [
(keycode_a, Both, keysym_a)]
km <- newKeymapFromNames ctx (RMLVO
(Just "does-not-exist")
Nothing
Nothing
Nothing
Nothing)
assert (isNothing km) "compiled nonexistent keymap"
setRmlvoEnv (RMLVO
(Just "evdev")
Nothing
(Just "us")
Nothing
Nothing)
km <- liftM fromJust $ newKeymapFromNames envCtx noPrefs
testKeySeq km [
(keycode_a, Both, keysym_a)]
setRmlvoEnv (RMLVO
(Just "evdev")
Nothing
(Just "us")
Nothing
(Just "ctrl:nocaps"))
km <- liftM fromJust $ newKeymapFromNames envCtx noPrefs
testKeySeq km [
(keycode_capslock, Both, keysym_Control_L)]
setRmlvoEnv (RMLVO
(Just "evdev")
Nothing
(Just "us,ca")
(Just ",,,multix")
(Just "grp:alts_toggle"))
km <- liftM fromJust $ newKeymapFromNames envCtx noPrefs
testKeySeq km [
(keycode_a, Both, keysym_a),
(keycode_leftalt, Down, keysym_Alt_L),
(keycode_rightalt, Down, keysym_ISO_Next_Group),
(keycode_rightalt, Up, keysym_ISO_Level3_Shift),
(keycode_leftalt, Up, keysym_Alt_L),
(keycode_grave, Up, keysym_numbersign)]
setRmlvoEnv (RMLVO
(Just "broken")
(Just "what-on-earth")
(Just "invalid")
Nothing
Nothing)
km <- newKeymapFromNames envCtx noPrefs
assert (isNothing km) "compiled nonexistent keymap"
| tulcod/haskell-xkbcommon | tests/rulescomp.hs | mit | 5,090 | 0 | 13 | 1,560 | 1,382 | 726 | 656 | 144 | 2 |
module Phone where
import Data.List (elemIndex, findIndex)
import Data.Maybe (fromJust)
import Data.Char (isUpper, toLower, isLetter, toUpper, ord, chr)
convo :: [String]
convo = [
"Hope is the beginning of despair",
"Ok Marneas calm down",
"We were betrayed at Calth",
"True enough",
"Only the Emperor can save us now",
"Since when do Ultramarines say that",
"Put on your fancy gauntlets and fight"]
type Digit = Char
type Presses = Int
-- assuming the default phone definition
-- 'a' -> [('2', 1)]
-- 'A' -> [('*', 1), ('2', 1)]
-- 1. Create a datastructure that captures the
-- phone layout on page 457.
data Phone = Phone String [String]
deriving (Eq, Show)
standardPhone :: Phone
standardPhone
= Phone "123456789*0#"
["", "abc", "def",
"ghi", "jkl", "mno",
"pqrs", "tuv", "wxyz",
"^", "+ ", ".,"]
standardCharToTaps :: Char
-> [(Digit, Presses)]
standardCharToTaps = charToTaps standardPhone
charToTaps :: Phone
-> Char
-> [(Digit, Presses)]
charToTaps (Phone keys values) c =
if isUpper c
then ('*', 1) : (charToTaps (Phone keys values) $ toLower c)
else thePress
where
f (k, v) acc =
if elem c (v ++ [k])
then (k, pressCount c v): acc
else acc
pressCount :: Char -> String -> Presses
pressCount k v = (fromJust $ elemIndex c (v ++ [k])) + 1
thePress = foldr f [] $ zip keys values
tapToChar :: Phone
-> (Digit, Presses)
-> Char
tapToChar (Phone keys values) (digit, presses) =
let
kv = zip keys values
filtered = filter (\(c, str) -> c == digit) kv
x = head filtered
makeTheStr (key, values) =
values ++ [key]
in
(makeTheStr x) !! (presses - 1)
standardTapsToString :: [(Digit, Presses)]
-> String
standardTapsToString = tapsToString standardPhone
tapsToString :: Phone
-> [(Digit, Presses)]
-> String
tapsToString phone taps = reverse result
where
(_, _, result) = foldr processor (phone, False, "") $ reverse taps
processor :: (Digit, Presses)
-> (Phone, Bool, String)
-> (Phone, Bool, String)
processor tap (p, upCase, acc) =
let
tapChar = tapToChar p tap
capitalize = tap == ('*', 1)
outputChar =
if upCase && isLetter tapChar
then toUpper tapChar
else tapChar
in
if capitalize
then (p, capitalize, acc)
else (p, capitalize, outputChar : acc)
tapsa = [('2' :: Digit, 1 :: Presses)]
tapsA = [('*' :: Digit, 1 :: Presses), ('2', 1 :: Presses)]
(Phone keys values) = standardPhone
kv = zip keys values
standardStringToTaps :: String
-> [(Digit, Presses)]
standardStringToTaps = stringToTaps standardPhone
stringToTaps :: Phone
-> String
-> [(Digit, Presses)]
stringToTaps p s = foldr (++) [] $ map (\c -> charToTaps p c) s
-- standardStringToTaps $ convo !! 1
-- [('*',1),('6',3),('5',2),('0',2),('*',1),('6',1),('2',1),('7',3),('6',2),('3',2),('2',1),('7',4),('0',2),('2',3),('2',1),('5',3),('6',1),('0',2),('3',1),('6',3),('9',1),('6',2)]
hibd = "Hope is the beginning of despair."
hibd' = standardStringToTaps hibd
rhibd = standardTapsToString hibd'
-- 3. How many times do digits need to be pressed
-- for each message?
fingerTapCount :: [(Digit, Presses)] -> Presses
fingerTapCount = foldr (\(d, p) acc -> p + acc) 0
-- 4. What was the most popular letter for each message?
-- What was its cost? You'll want to combine reverseTaps
-- and fingerTaps figure out what it cost in taps.
-- ReverseTaps is a list because you need to press a
-- different button in order to get capitals
beforeSlice :: Int -> [a] -> [a]
beforeSlice index acc =
if index == 0
then []
else take index acc
afterSlice :: Int -> [a] -> [a]
afterSlice index acc =
if (index - 1) == (length acc)
then []
else drop (index + 1) acc
largestLetter :: Int
-> (Int, (Int, Int))
-> (Int, (Int, Int))
largestLetter count (index, t@(largestIndex, largestCount)) =
if count > largestCount
then (newIndex, (index, count))
else (newIndex, t)
where
newIndex = index + 1
mostFrequentLetter :: [Int] -> Char
mostFrequentLetter counts = winnerChar
where
(_, winnerChar) = mostFrequentLetterAndMagnitude counts
mostPopularLetter :: String -> Char
mostPopularLetter s = winnerChar
where
(_, winnerChar) = mostPopularLetterAndMagnitude s
mostFrequentLetterAndMagnitude :: [Int] -> (Int, Char)
mostFrequentLetterAndMagnitude counts = (magnitude, chr (winnerOrd + 97))
where
(_, (winnerOrd, magnitude)) = foldr largestLetter (0, (0, 0)) $ reverse counts
mostPopularLetterAndMagnitude :: String -> (Int, Char)
mostPopularLetterAndMagnitude s =
mostFrequentLetterAndMagnitude $ letterCounts s
letterCounts :: String
-> [Int]
letterCounts s = counts sentenceAllLowerLetters
where
sentenceAllLowerLetters =
foldr (\c acc -> if isLetter c then (toLower c) : acc else acc) "" s
counts sent =
foldr
(\c acc ->
let
index = (ord c) - 97
indexValue = acc !! index
newValue = indexValue + 1
in
(beforeSlice index acc) ++ [newValue] ++ (afterSlice index acc))
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
sent
-- 5. What was the most popular letter overall? What was the
-- most popular word?
acc :: [Int]
acc = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
folder :: String
-> [Int]
-> [Int]
folder s acc = combinedLetterCounts
where
thisLetterCount = letterCounts s
combinedLetterCounts = map (\(x, y) -> x + y) $ zip acc thisLetterCount
maxFolder :: Int
-> (Int, Int, Int)
-> (Int, Int, Int)
maxFolder count (index, curMax, curIx) =
if count > curMax
then (index + 1, count, index)
else (index + 1, curMax, curIx)
maxOfList :: [Int] -> (Int, Int, Int)
maxOfList = foldr maxFolder (0, 0, 0)
x = overallLetterCounts convo
overallLetterCounts :: [String] -> [Int]
overallLetterCounts sentences =
foldr
folder
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
sentences
mostPopularLetterOverall :: [String] -> (Int, Char)
mostPopularLetterOverall sentences = (count, actualChar)
where
(_, count, ordinal) = maxOfList $ reverse $ overallLetterCounts sentences
actualChar = chr (ordinal + 97)
convoMostPopularLetter = mostPopularLetterOverall convo
sentenceWithTheMostOfAParticularLetter :: [String] -> (Int, Char, String)
sentenceWithTheMostOfAParticularLetter sentences =
foldr
(\s t@(magnitude, largestLetter, sLeader) ->
let
(sMag, sChar) = mostPopularLetterAndMagnitude s
in
if sMag > magnitude
then (sMag, sChar, s)
else t)
(0, 'a', "")
sentences
data Trie a =
Node a Int [Trie a]
deriving (Eq, Show)
insert' :: Eq a
=> [a]
-> Trie a
-> Trie a
insert' [] (Node item count children) =
(Node item (count + 1) children)
insert' (x:xs) (Node item count children) =
case maybeIndex x children of
Just ix -> (Node item count ((before ix) ++ [(insert' xs (toUpdate ix))] ++ (after ix)))
_ -> (Node item count ((insert' xs (Node x 0 [])) : children))
where
before ix = beforeSlice ix children
after ix = afterSlice ix children
toUpdate ix = children !! ix
convoInTrie :: [String]
-> Trie Char
convoInTrie sentences =
foldr (\w acc -> insert' w acc) (Node '_' 0 []) $ allWordsLower sentences
allWordsLower :: [String]
-> [String]
allWordsLower sentences =
foldr (\w acc -> (foldr (\c cacc -> toLower c : cacc) "" w) : acc) [] $ allTheWords sentences
where
allTheWords :: [String]
-> [String]
allTheWords = foldr (\s acc -> words s ++ acc) []
maybeIndex :: Eq a
=> a
-> [Trie a]
-> Maybe Int
maybeIndex _ [] = Nothing
maybeIndex x children = findIndex (\(Node c _ _) -> c == x) children
frequencyOfWord :: String
-> Trie Char
-> Maybe Int
frequencyOfWord [] (Node _ count _) = Just count
frequencyOfWord (x:xs) (Node item count children) =
case maybeIndex x children of
Just ix -> frequencyOfWord xs (children !! ix)
_ -> Nothing
countOfThe = frequencyOfWord "the" $ convoInTrie convo
frequencyOfWords :: [String]
-> [(Maybe Int, String)]
frequencyOfWords sentences = wordOccurences
where
allLower = allWordsLower sentences
filledTrie = convoInTrie allLower
wordOccurences =
map (\w -> (frequencyOfWord w filledTrie, w)) allLower
mostPopularWord :: [String] -> (Maybe Int, String)
mostPopularWord sentences = winner
where
winner = foldr folder (Just 0, "") $ frequencyOfWords sentences
folder (thisMag, thisWord) t@(largestMag, _) =
if thisMag > largestMag
then (thisMag, thisWord)
else t
winnerWord = mostPopularWord convo | brodyberg/Notes | ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/PhoneExercise.hs | mit | 9,097 | 0 | 17 | 2,354 | 3,124 | 1,753 | 1,371 | 232 | 3 |
module Run.Shrink (runShrink) where
import Control.Monad
import Test.QuickFuzz.Gen.FormatInfo
import Args
import Debug
-- Run shrink subcommand
runShrink :: QFCommand -> FormatInfo base actions -> IO ()
runShrink cmd fmt = do
when (hasActions fmt)
(putStrLn "Selected format supports actions base generation/shrinking!")
| elopez/QuickFuzz | app/Run/Shrink.hs | gpl-3.0 | 340 | 0 | 9 | 60 | 83 | 45 | 38 | 9 | 1 |
module Lamdu.Data.Export.JSON.Migration.ToVersion10 (migrate) where
import qualified Control.Lens as Lens
import Control.Lens.Extended ((~~>))
import qualified Data.Aeson as Aeson
import qualified Data.Text as Text
import qualified Lamdu.CharClassification as Chars
import Lamdu.Data.Export.JSON.Migration.Common (migrateToVer)
import Lamdu.Prelude
isOperator :: Text -> Bool
isOperator = Lens.allOf Lens.each (`elem` Chars.operator)
migrateVal :: Aeson.Value -> Either Text Aeson.Value
migrateVal (Aeson.Object val) =
case val ^. Lens.at "name" of
Nothing -> Right val
Just (Aeson.String x) ->
val
& Lens.at "name" .~ Nothing
& addName
& Right
where
addName
| isOperator x = Lens.at "op" ?~ Aeson.String x
| otherwise =
Lens.at "names" ?~
Aeson.Object ("english" ~~> Aeson.String x)
Just x -> Left ("Unexpected name: " <> Text.pack (show x))
<&> Aeson.Object
migrateVal x = Right x
migrate :: Aeson.Value -> Either Text Aeson.Value
migrate = migrateToVer 10 (traverse migrateVal)
| lamdu/lamdu | src/Lamdu/Data/Export/JSON/Migration/ToVersion10.hs | gpl-3.0 | 1,160 | 0 | 17 | 310 | 350 | 188 | 162 | 29 | 3 |
module IrcServer (
IrcServer(..),
IrcConnection(..),
connect,
listen,
send
) where
import Config
import Prelude hiding ()
import IrcMessage
import Network (connectTo, PortID(PortNumber))
import System.IO (Handle, hSetBuffering, BufferMode(..), hGetLine, hPutStrLn,
mkTextEncoding, hSetEncoding, stdout, hSetNewlineMode,
NewlineMode(..), Newline(..))
import Control.Concurrent (forkIO, forkFinally, ThreadId)
import Control.Concurrent.Chan (Chan(..), newChan, writeChan, readChan, getChanContents)
import Control.Concurrent.STM (atomically)
import Control.Concurrent.STM.TVar (TVar(..), newTVar, readTVar, writeTVar)
import Control.Exception (IOException, handle)
import Control.Monad (forever, ap)
import Control.Monad.Reader (ReaderT(..), runReaderT, ask, reader, liftIO)
import Control.Conditional (if', ifM)
data IrcServer = IrcServer {
host :: String,
port :: Int,
ssl :: Bool
}
data IrcConnection = IrcConnection {
socket :: Handle,
readQ :: Chan Message,
sendQ :: Chan Message,
connected :: TVar Bool
}
connect :: String -> Int -> IO Handle
connect server port = connectTo server portno >>=
\h ->
mkTextEncoding "UTF-8//IGNORE" >>=
hSetEncoding h >>
hSetBuffering h NoBuffering >>
hSetNewlineMode h nlCRLFMode >>
return h
where
portno = PortNumber . fromIntegral $ port
nlCRLFMode = NewlineMode CRLF CRLF
listen :: ReaderT Handle IO IrcConnection
listen = reader (flip (,)) `ap` makeConn >>= \(ic, h) ->
(loopWithState (connected ic) $ readOnto h (readQ ic)) >>
(loopWhile (connected ic) $ writeOnto h (sendQ ic)) >>
return ic
where
readOnto :: Handle -> Chan Message -> IO ()
readOnto h inp = hGetLine h >>=
writeChan inp . read
writeOnto :: Handle -> Chan Message -> IO ()
writeOnto h out = readChan out >>=
hPutStrLn h . show
makeConn :: ReaderT Handle IO IrcConnection
makeConn = liftIO newChan >>= \input ->
liftIO newChan >>= \output ->
liftIO (atomically $ newTVar True) >>= \state ->
ask >>= \sock ->
return (IrcConnection sock input output state)
loopWithState :: TVar Bool -> IO a -> ReaderT Handle IO ThreadId
loopWithState tv io = liftIO $ forkFinally (forever io) (const . atomically $ writeTVar tv False)
loopWhile :: TVar Bool -> IO () -> ReaderT Handle IO ThreadId
loopWhile tv io = liftIO . forkIO . forever $ ifM (atomically $ readTVar tv) io (return ())
send :: Message -> ReaderT Handle IO ()
send msg = ReaderT (flip hPutStrLn . show $ msg)
| bqv/mako | IrcServer.hs | mpl-2.0 | 3,125 | 0 | 17 | 1,105 | 902 | 492 | 410 | 63 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module NumberSix.Handlers.Sup
( handler
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<$>))
import Control.Monad (when)
import qualified Data.Text as T
--------------------------------------------------------------------------------
import NumberSix.Irc
import NumberSix.Message
--------------------------------------------------------------------------------
handler :: UninitializedHandler
handler = makeHandler "Sup" [supHook]
--------------------------------------------------------------------------------
supHook :: Irc ()
supHook = onCommand "PRIVMSG" $ do
text <- getMessageText
expected <- ("sup " <>) <$> getNick
when (expected `T.isPrefixOf` text) $ do
sender <- getSender
write $ "sup " <> sender
| itkovian/number-six | src/NumberSix/Handlers/Sup.hs | bsd-3-clause | 997 | 0 | 12 | 173 | 159 | 91 | 68 | 17 | 1 |
module Numeric.Coalgebra.Trigonometric.Class
( Trigonometric(..)
) where
import Prelude (return)
import Numeric.Covector
class Trigonometric r where
cos :: r
sin :: r
instance Trigonometric a => Trigonometric (Covector r a) where
cos = return cos
sin = return sin
| athanclark/algebra | src/Numeric/Coalgebra/Trigonometric/Class.hs | bsd-3-clause | 279 | 0 | 7 | 53 | 88 | 49 | 39 | 10 | 0 |
{-# LANGUAGE CPP, DeriveDataTypeable #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module Language.Java.Paragon.TypeCheck.NullAnalysis
(
NullType, NullAnnot(..), NullModif(..),
nullable, committed, free,
joinNT
) where
import Language.Java.Paragon.Pretty
#ifdef BASE4
import Data.Data
#else
import Data.Generics (Data(..),Typeable(..))
#endif
data NullAnnot = NotNull | MaybeNull
deriving (Eq, Show, Data, Typeable)
data NullModif = Free | Committed | Unclassified
deriving (Eq, Show, Data, Typeable)
type NullType = (NullAnnot, NullModif)
instance Ord NullAnnot where
MaybeNull <= NotNull = False
_ <= _ = True
instance Ord NullModif where
_ <= Unclassified = True
nm1 <= nm2 = nm1 == nm2
instance Pretty NullAnnot where
pretty = text . show
instance Pretty NullModif where
pretty = text . show
instance Pretty NullType where
pretty (na, nm) = text "(" <> pretty na <> text ", " <> pretty nm <> text ")"
nullable :: NullType -> Bool
nullable (MaybeNull, _) = True
nullable _ = False
committed :: NullType -> Bool
committed (_, Committed) = True
committed _ = False
free :: NullType -> Bool
free (_, Free) = True
free _ = False
joinNA :: NullAnnot -> NullAnnot -> NullAnnot
joinNA MaybeNull _ = MaybeNull
joinNA NotNull a = a
joinNM :: NullModif -> NullModif -> NullModif
joinNM Committed Committed = Committed
joinNM _ _ = Free
joinNT :: NullType -> NullType -> NullType
joinNT (an1, mod1) (an2, mod2) = (an1 `joinNA` an2, mod1 `joinNM` mod2)
| bvdelft/parac2 | src/Language/Java/Paragon/TypeCheck/NullAnalysis.hs | bsd-3-clause | 1,609 | 0 | 10 | 365 | 494 | 274 | 220 | 44 | 1 |
-- | The Github issue comments API from
-- <http://developer.github.com/v3/issues/comments/>.
module Github.Issues.Comments (
comment
,comments
,comments'
-- * Modifying Comments
-- |
-- Only authenticated users may create and edit comments.
,GithubAuth(..)
,createComment
,editComment
,module Github.Data
) where
import Github.Data
import Github.Private
-- | A specific comment, by ID.
--
-- > comment "thoughtbot" "paperclip" 1468184
comment :: String -> String -> Int -> IO (Either Error IssueComment)
comment user reqRepoName reqCommentId =
githubGet ["repos", user, reqRepoName, "issues", "comments", show reqCommentId]
-- | All comments on an issue, by the issue's number.
--
-- > comments "thoughtbot" "paperclip" 635
comments :: String -> String -> Int -> IO (Either Error [IssueComment])
comments user reqRepoName reqIssueNumber =
githubGet ["repos", user, reqRepoName, "issues", show reqIssueNumber, "comments"]
-- | All comments on an issue, by the issue's number, using authentication.
--
-- > comments' (GithubUser (user, password)) "thoughtbot" "paperclip" 635
comments' :: Maybe GithubAuth -> String -> String -> Int -> IO (Either Error [IssueComment])
comments' auth user reqRepoName reqIssueNumber =
githubGet' auth ["repos", user, reqRepoName, "issues", show reqIssueNumber, "comments"]
-- |
-- Create a new comment.
--
-- > createComment (GithubUser (user, password)) user repo issue
-- > "some words"
createComment :: GithubAuth -> String -> String -> Int -> String
-> IO (Either Error Comment)
createComment auth user repo iss body =
githubPost auth
["repos", user, repo, "issues", show iss, "comments"] (NewComment body)
-- |
-- Edit a comment.
--
-- > editComment (GithubUser (user, password)) user repo commentid
-- > "new words"
editComment :: GithubAuth -> String -> String -> Int -> String
-> IO (Either Error Comment)
editComment auth user repo commid body =
githubPatch auth ["repos", user, repo, "issues", "comments", show commid]
(EditComment body)
| deckool/my-hs-github | Github/Issues/Comments.hs | bsd-3-clause | 2,032 | 0 | 12 | 338 | 452 | 256 | 196 | 29 | 1 |
module Main where
import Criterion.Main
import CV.Image
import qualified CV.ImageMath as IM
import System.IO.Unsafe
import CV.ColourUtils
import CV.Pixelwise
import qualified CV.Transforms as T
pwFlip :: Image GrayScale D32 -> Image GrayScale D32
pwFlip i = unsafePerformIO $ withClone i (return . toImage . remap flip . fromImage)
where flip f = \(x,y) -> f (w-x,y)
(w,h) = getSize i
main = do
Just x <- loadImage "smallLena.jpg"
Just xc <- loadColorImage"smallLena.jpg"
print ("getpixel", getPixelOld (100,100) x,getPixel (100,100) x)
print ("RGB",getPixelOldRGB (100,100) xc,getPixel (100,100) xc)
let nAtan :: Image GrayScale D32 -> Image GrayScale D32
nAtan x = unsafePerformIO $ withClone x (mapImageInplace atan)
nSqrt :: Image GrayScale D32 -> Image GrayScale D32
nSqrt x = unsafePerformIO $ withClone x (mapImageInplace sqrt)
nSqrtPw :: Image GrayScale D32 -> Image GrayScale D32
nSqrtPw x = unsafePerformIO $ withClone x (return . toImage . fmap sqrt . fromImage)
nSqrtPP :: Image GrayScale D32 -> Image GrayScale D32
nSqrtPP x = unsafePerformIO $ withClone x (return . toImageP . fmap sqrt . fromImage)
print ("Atan-eq", getPixel (100,100) (IM.atan x), getPixel (100,100) (nAtan x))
saveImage "A.png" ( nSqrt $ x)
saveImage "B.png" ( IM.sqrt $ x)
let gp = getPixel (100,100)
defaultMain [
bgroup "getPixel" [
bench "old" $ nf ((flip getPixelOld) x :: (Int,Int) -> Float) (105,105)
, bench "new" $ nf ((flip getPixel) x) (105,105)
, bench "old3" $ nf ((flip getPixelOldRGB) xc :: (Int,Int) -> (Float,Float,Float)) (105,105)
, bench "new3" $ nf ((flip getPixel) xc) (105,105)]
,bgroup "setPixel" [
bench "Old" $ setPixelOld (105,105) 1 x
, bench "new" $ setPixelOld (105,105) 1 x
]
,bgroup "transformations" [
bench "Old" $ nf (T.flip T.Horizontal) x
, bench "new" $ nf pwFlip x
]
,bgroup "sqrt" [
bench "im-sqrt" $ saveImage "sqrt-im.png" $ (IM.sqrt) x
, bench "map-sqrt" $ saveImage "sqrt-m.png" $ (nSqrt) x
, bench "fmap-sqrt" $ saveImage "sqrt-f.png" $ (nSqrtPw) x
, bench "p-fmap-sqrt" $ saveImage "sqrt-p.png" $ (nSqrtPP) x
]
,bgroup "mapping" [
bench "map-atan" $ nf (gp . nAtan) x
, bench "im-atan" $ nf (gp . IM.atan) x
, bench "map-sqrt" $ nf (gp . nSqrt) x
, bench "im-sqrt" $ nf (gp . IM.sqrt) x
] ]
| TomMD/CV | performance/Pixelwise.hs | bsd-3-clause | 2,833 | 0 | 16 | 965 | 1,036 | 527 | 509 | 51 | 1 |
-- |
-- Module : Template
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- A very simple template engine
--
module Template
(
-- * Types
Template
, Attrs
-- * methods
, parseTemplate
, renderTemplate
) where
import Data.Char (isDigit, isAlpha)
import Data.List (isPrefixOf)
import Control.Applicative
import Control.Monad
data TAtom =
Text String
| Var String
| Tpl String Template
deriving (Show)
type Template = [TAtom]
type Attrs = [(String, String)]
renderTemplate :: Template
-> Attrs
-> [(String, [Attrs])]
-> String
renderTemplate template attrs multiAttrs =
concat $ map renderAtom template
where
renderAtom :: TAtom -> String
renderAtom (Text b) = b
renderAtom (Var s) = maybe "" id $ lookup s attrs
renderAtom (Tpl n t) =
case lookup n multiAttrs of
Nothing -> error ("cannot find inner template attributes for: " ++ n)
Just [] -> error ("empty multiattrs for: " ++ n)
Just (i:is) ->
renderTemplate t (i ++ attrs) [] ++
concatMap (\inAttrs -> renderTemplate t (inAttrs ++ attrs ++ [("COMMA", ",")]) []) is
parseTemplate :: String -> Template
parseTemplate = parseTemplateFromTokens . tokenize
parseTemplateFromTokens :: [Token] -> Template
parseTemplateFromTokens toks =
case runStreamParser parse toks of
Left err -> error ("template parse error: " ++ err)
Right (tatoms, []) -> tatoms
Right (_, over) -> error ("template left over: " ++ show over)
where parse = do
done <- isDone
if done
then return []
else do next <- getTemplate <|> getVariable <|> getOther
liftM (next:) parse
------------------------------------------------------------------------
-- parser methods
------------------------------------------------------------------------
getVariable :: StreamParser TAtom
getVariable = StreamParser $ \toks ->
case toks of
[] -> Left "variable: end of stream"
TokVariableMarker:TokText t:TokVariableMarker:rest
| isVariable t -> Right (Var t, rest)
| otherwise -> Left "not a variable, variable name invalid"
_ -> Left "not a variable: not starting by %%"
getTemplate :: StreamParser TAtom
getTemplate = StreamParser $ \toks ->
case toks of
[] -> Left "template: end of stream"
TokGroupStart:TokText t:TokGroupEnd:rest
| isVariable t ->
case break (== TokGroupStart) rest of
(_, []) -> Left "template: no end found"
(inner, TokGroupStart:TokText t2:TokGroupEnd:rest2)
| isVariable t2 ->
if t == t2
then Right (Tpl t (parseTemplateFromTokens inner), rest2)
else Left ("template: end name " ++ show t2 ++ " not matching start name " ++ show t)
| otherwise -> Left "template: end sequence: invalid name"
(_, _) -> Left "template: end sequence: not found"
| otherwise -> Left "template: start sequence: invalid name"
_ -> Left "template: not right starting sequence"
getOther :: StreamParser TAtom
getOther = StreamParser $ \toks ->
case toks of
(x:xs) -> Right (Text (show x), xs)
[] -> Left "getOther: end of string"
isVariable :: String -> Bool
isVariable = and . map isVariableChar
where isVariableChar :: Char -> Bool
isVariableChar c = isAlpha c || isDigit c || c == '_'
isDone :: StreamParser Bool
isDone = StreamParser $ \s -> Right (null s, s)
------------------------------------------------------------------------
-- parser subsystem
------------------------------------------------------------------------
newtype StreamParser a = StreamParser { runStreamParser :: [Token] -> Either String (a, [Token]) }
instance Functor StreamParser where
fmap f x = StreamParser $ \s ->
case (runStreamParser x) s of
Right (a, s') -> Right (f a, s')
Left err -> Left err
instance Applicative StreamParser where
pure = return
(<*>) fm m = StreamParser $ \s1 ->
case runStreamParser m s1 of
Left err -> Left err
Right (a, s2) ->
case runStreamParser fm s2 of
Left err -> Left err
Right (f, s3) -> Right (f a, s3)
instance Alternative StreamParser where
empty = mzero
(<|>) = mplus
instance Monad StreamParser where
return a = StreamParser $ \s -> Right (a, s)
(>>=) m1 m2 = StreamParser $ \s1 ->
case (runStreamParser m1) s1 of
Left err -> Left err
Right (a, s2) -> runStreamParser (m2 a) s2
instance MonadPlus StreamParser where
mzero = StreamParser $ \_ -> Left "empty"
mplus m1 m2 = StreamParser $ \s ->
case (runStreamParser m1) s of
Left _ -> (runStreamParser m2) s
Right (a, s2) -> Right (a, s2)
------------------------------------------------------------------------
-- token parsing
------------------------------------------------------------------------
data Token = TokVariableMarker
| TokGroupStart
| TokGroupEnd
| TokText String
deriving (Eq)
instance Show Token where
show TokVariableMarker = "%%"
show TokGroupStart = "%{"
show TokGroupEnd = "%}"
show (TokText t) = t
tokenize :: String -> [Token]
tokenize s
| "%%" `isPrefixOf` s = TokVariableMarker : tokenize (drop 2 s)
| "%{" `isPrefixOf` s = TokGroupStart : tokenize (drop 2 s)
| "%}" `isPrefixOf` s = TokGroupEnd : tokenize (drop 2 s)
| otherwise =
case break (== '%') s of
(t, "") -> [TokText t]
(t1, t2) -> TokText t1 : tokenize t2
| tekul/cryptonite | gen/Template.hs | bsd-3-clause | 6,160 | 0 | 21 | 1,935 | 1,728 | 897 | 831 | 132 | 6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.