code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-|
Module : Numeric.AERN.Poly.IntPoly.Addition
Description : out-rounded polynomial addition
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
Out-rounded polynomial addition, subtraction and negation.
-}
module Numeric.AERN.Poly.IntPoly.Addition
(
addTerms,
negTerms,
addTermsConst
)
where
import Prelude hiding ((+))
import Numeric.AERN.Poly.IntPoly.Config
import Numeric.AERN.Poly.IntPoly.IntPoly
import Numeric.AERN.Poly.IntPoly.Reduction
import Numeric.AERN.Poly.IntPoly.New
--import Numeric.AERN.RmToRn.New
--import Numeric.AERN.RmToRn.Domain
--import qualified Numeric.AERN.RealArithmetic.NumericOrderRounding as ArithUpDn
import qualified Numeric.AERN.RealArithmetic.RefinementOrderRounding as ArithInOut
import Numeric.AERN.RealArithmetic.RefinementOrderRounding
(AddEffortIndicator, MixedAddEffortIndicator)
import Numeric.AERN.RealArithmetic.ExactOps
import Numeric.AERN.RealArithmetic.Measures
--import Numeric.AERN.RealArithmetic.Auxiliary
import qualified Numeric.AERN.NumericOrder as NumOrd
import qualified Numeric.AERN.RefinementOrder as RefOrd
import Numeric.AERN.Basics.Interval
import Numeric.AERN.Basics.Effort
import Numeric.AERN.Basics.Consistency
--import Numeric.AERN.Basics.SizeLimits (SizeLimits)
--import Numeric.AERN.Misc.Debug
import qualified Data.IntMap as IntMap
import Test.QuickCheck (Arbitrary)
instance
(RefOrd.IntervalLike cf,
ArithInOut.RoundedReal cf)
=>
ArithInOut.RoundedAddEffort (IntPoly var cf)
where
type AddEffortIndicator (IntPoly var cf) =
IntPolyEffort cf
addDefaultEffort (IntPoly cfg _) =
ipolycfg_effort cfg
instance
(RefOrd.IntervalLike cf,
ArithInOut.RoundedReal cf,
HasAntiConsistency cf,
Arbitrary cf,
NumOrd.PartialComparison (Imprecision cf),
Ord var,
Show var, Show cf)
=>
ArithInOut.RoundedAdd (IntPoly var cf)
where
addOutEff eff (IntPoly cfg1 terms1) (IntPoly cfg2 terms2) =
reducePolyTermCountOut effCf $
IntPoly cfg $ addTerms (<+>) terms1 terms2
where
cfg = combineIntPolyCfgs cfg1 cfg2
(<+>) = ArithInOut.addOutEff effAdd
effAdd = ArithInOut.fldEffortAdd sampleCf $ ArithInOut.rrEffortField sampleCf effCf
effCf = ipolyeff_cfRoundedRealEffort eff
sampleCf = ipolycfg_sample_cf cfg
addInEff =
error "aern-poly: IntPoly does not support inwards-rounded addition"
-- addInEff eff (IntPoly cfg terms1) (IntPoly _ terms2) =
-- reducePolyTermCountIn eff $
-- IntPoly cfg $
-- let ?addInOutEffort = effAdd in
-- addTerms (<+>) terms1 terms2
-- where
-- effAdd = ArithInOut.fldEffortAdd sample $ ArithInOut.rrEffortField sample eff
-- sample = ipolycfg_sample_cf cfg
instance
(RefOrd.IntervalLike cf,
ArithInOut.RoundedReal cf,
HasAntiConsistency cf,
NumOrd.PartialComparison cf,
Arbitrary cf,
NumOrd.PartialComparison (Imprecision cf),
Ord var,
Show var, Show cf)
=>
ArithInOut.RoundedSubtr (IntPoly var cf)
addTerms ::
(Show var, Show cf) =>
(cf -> cf -> cf) ->
IntPolyTerms var cf -> IntPolyTerms var cf -> IntPolyTerms var cf
addTerms (+) (IntPolyC val1) (IntPolyC val2) = IntPolyC $ val1 + val2
addTerms (+) (IntPolyV _xName1 powers1) (IntPolyV xName2 powers2)
= IntPolyV xName2 $ IntMap.unionWith (addTerms (+)) powers1 powers2
addTerms _ t1 t2 =
error $ "addTerms: cannot add t1=" ++ show t1 ++ " and t2=" ++ show t2
{----- negation -----}
instance
(Neg cf) => Neg (IntPoly var cf)
where
neg (IntPoly cfg terms) = IntPoly cfg $ negTerms terms
negTerms ::
Neg cf =>
IntPolyTerms var cf -> IntPolyTerms var cf
negTerms (IntPolyC val) =
IntPolyC $ neg val
negTerms (IntPolyV x polys) =
IntPolyV x $ IntMap.map negTerms polys
instance
(ArithInOut.RoundedMixedAddEffort cf Integer,
EffortIndicator (IntPolyEffort cf))
=>
ArithInOut.RoundedMixedAddEffort (IntPoly var cf) Integer
where
type MixedAddEffortIndicator (IntPoly var cf) Integer =
IntPolyEffort cf
mixedAddDefaultEffort (IntPoly cfg _) _c =
ipolycfg_effort cfg
instance
(Ord var, Show var, Show cf,
ArithInOut.RoundedMixedAdd cf Integer,
ArithInOut.RoundedReal cf,
HasConsistency cf,
RefOrd.IntervalLike cf)
=>
ArithInOut.RoundedMixedAdd (IntPoly var cf) Integer
where
mixedAddOutEff = mixedAddOutEffGeneric ArithInOut.rrEffortIntegerMixedField 0
mixedAddInEff = mixedAddInEffGeneric ArithInOut.rrEffortIntegerMixedField 0
instance
(ArithInOut.RoundedMixedAddEffort cf Int,
EffortIndicator (IntPolyEffort cf))
=>
ArithInOut.RoundedMixedAddEffort (IntPoly var cf) Int
where
type MixedAddEffortIndicator (IntPoly var cf) Int =
IntPolyEffort cf
mixedAddDefaultEffort (IntPoly cfg _) _c =
ipolycfg_effort cfg
instance
(Ord var, Show var, Show cf,
ArithInOut.RoundedMixedAdd cf Int,
ArithInOut.RoundedReal cf,
HasConsistency cf,
RefOrd.IntervalLike cf)
=>
ArithInOut.RoundedMixedAdd (IntPoly var cf) Int
where
mixedAddOutEff = mixedAddOutEffGeneric ArithInOut.rrEffortIntMixedField 0
mixedAddInEff = mixedAddInEffGeneric ArithInOut.rrEffortIntMixedField 0
instance
(ArithInOut.RoundedMixedAddEffort cf Rational,
EffortIndicator (IntPolyEffort cf))
=>
ArithInOut.RoundedMixedAddEffort (IntPoly var cf) Rational
where
type MixedAddEffortIndicator (IntPoly var cf) Rational =
IntPolyEffort cf
mixedAddDefaultEffort (IntPoly cfg _) _c =
ipolycfg_effort cfg
instance
(Ord var, Show var, Show cf,
ArithInOut.RoundedMixedAdd cf Rational,
ArithInOut.RoundedReal cf,
HasConsistency cf,
RefOrd.IntervalLike cf)
=>
ArithInOut.RoundedMixedAdd (IntPoly var cf) Rational
where
mixedAddOutEff = mixedAddOutEffGeneric ArithInOut.rrEffortRationalMixedField 0
mixedAddInEff = mixedAddInEffGeneric ArithInOut.rrEffortRationalMixedField 0
instance
(ArithInOut.RoundedMixedAddEffort cf Double,
EffortIndicator (IntPolyEffort cf)) =>
ArithInOut.RoundedMixedAddEffort (IntPoly var cf) Double
where
type MixedAddEffortIndicator (IntPoly var cf) Double =
IntPolyEffort cf
mixedAddDefaultEffort (IntPoly cfg _) _c =
ipolycfg_effort cfg
instance
(Ord var, Show var, Show cf,
ArithInOut.RoundedMixedAdd cf Double,
ArithInOut.RoundedReal cf,
HasConsistency cf,
RefOrd.IntervalLike cf)
=>
ArithInOut.RoundedMixedAdd (IntPoly var cf) Double
where
mixedAddOutEff = mixedAddOutEffGeneric ArithInOut.rrEffortDoubleMixedField 0
mixedAddInEff = mixedAddInEffGeneric ArithInOut.rrEffortDoubleMixedField 0
mixedAddOutEffGeneric, mixedAddInEffGeneric ::
(Ord var, Show cf, Show var, ArithInOut.RoundedReal cf,
RefOrd.IntervalLike cf, HasConsistency cf,
ArithInOut.RoundedMixedField cf t)
=>
(cf -> ArithInOut.RoundedRealEffortIndicator cf -> ArithInOut.MixedFieldOpsEffortIndicator cf t)
-> t
-> IntPolyEffort cf
-> IntPoly var cf -> t -> IntPoly var cf
mixedAddOutEffGeneric rrEffortMixedField sampleT eff (IntPoly cfg terms) a =
IntPoly cfg $ addTermsConst (+|) cfg terms a
where
(+|) = ArithInOut.mixedAddOutEff effMixedAdd
effMixedAdd = ArithInOut.mxfldEffortAdd sampleCf sampleT effMixedField
effMixedField = rrEffortMixedField sampleCf effCf
effCf = ipolyeff_cfRoundedRealEffort eff
sampleCf = ipolycfg_sample_cf cfg
mixedAddInEffGeneric =
error "aern-poly: IntPoly does not support inwards-rounded mixed addition"
instance
(EffortIndicator (IntPolyEffort (Interval e)))
=>
ArithInOut.RoundedMixedAddEffort (IntPoly var (Interval e)) (Interval e)
where
type MixedAddEffortIndicator (IntPoly var (Interval e)) (Interval e) =
IntPolyEffort (Interval e)
mixedAddDefaultEffort (IntPoly cfg _) _c =
ipolycfg_effort cfg
instance
(Ord var, Show var,
cf ~ Interval e,
Show cf,
ArithInOut.RoundedReal cf,
HasConsistency cf)
=>
ArithInOut.RoundedMixedAdd (IntPoly var (Interval e)) (Interval e)
where
mixedAddOutEff eff (IntPoly cfg terms) a =
IntPoly cfg $ addTermsConst (+|) cfg terms a
where
(+|) = ArithInOut.addOutEff effAddCf
effAddCf = ArithInOut.fldEffortAdd sampleCf effFieldCf
effFieldCf = ArithInOut.rrEffortField sampleCf effCf
effCf = ipolyeff_cfRoundedRealEffort eff
sampleCf = ipolycfg_sample_cf cfg
mixedAddInEff =
error "aern-poly: IntPoly does not support inwards-rounded mixed addition"
addTermsConst ::
(Ord var, Show var, Show cf,
RefOrd.IntervalLike cf,
HasConsistency cf,
ArithInOut.RoundedReal cf)
=>
(cf -> t -> cf) ->
IntPolyCfg var cf ->
IntPolyTerms var cf ->
t ->
IntPolyTerms var cf
addTermsConst (+|) _ (IntPolyC val) constant =
IntPolyC $ val +| constant
addTermsConst (+|) cfg (IntPolyV x polys) constant =
IntPolyV x $ IntMap.insert 0 newConstPoly polys
where
oldConstPoly =
case IntMap.lookup 0 polys of
Nothing -> mkConstTerms (zero sampleCf) varsR
Just p -> p
newConstPoly = addTermsConst (+|) cfgR oldConstPoly constant
varsR = ipolycfg_vars cfg
cfgR = cfgRemFirstVar cfg
sampleCf = ipolycfg_sample_cf cfg
instance
(EffortIndicator (IntPolyEffort (Interval e)),
ArithInOut.RoundedReal (Interval e))
=>
ArithInOut.RoundedMixedAddEffort
(IntPoly var (Interval e))
(IntPoly var (Interval e))
where
type MixedAddEffortIndicator
(IntPoly var (Interval e))
(IntPoly var (Interval e)) =
IntPolyEffort (Interval e)
mixedAddDefaultEffort _ sample =
ArithInOut.addDefaultEffort sample
instance
(EffortIndicator (IntPolyEffort (Interval e)),
ArithInOut.RoundedReal (Interval e),
ArithInOut.RoundedAdd (IntPoly var (Interval e)))
=>
ArithInOut.RoundedMixedAdd
(IntPoly var (Interval e))
(IntPoly var (Interval e))
where
mixedAddOutEff = ArithInOut.addOutEff
mixedAddInEff =
error "aern-poly: IntPoly does not support inwards-rounded mixed addition"
| michalkonecny/aern | aern-poly/src/Numeric/AERN/Poly/IntPoly/Addition.hs | bsd-3-clause | 10,915 | 47 | 15 | 2,538 | 2,611 | 1,349 | 1,262 | 236 | 2 |
{-# LANGUAGE DataKinds #-}
module ElmFormat.Render.Text where
import Data.Coapplicative
import Elm.Utils ((|>))
import ElmVersion (ElmVersion)
import AST.Structure
import AST.V0_16
import AST.Module (Module)
import qualified Box
import qualified Data.Text as Text
import qualified ElmFormat.Render.Box as Render
render :: Coapplicative annf => ElmVersion -> Module [UppercaseIdentifier] (ASTNS annf [UppercaseIdentifier] 'TopLevelNK) -> Text.Text
render elmVersion modu =
renderBox $ Render.formatModule elmVersion True 2 modu
renderBox :: Box.Box -> Text.Text
renderBox box =
box
|> Box.render
-- TODO: remove this and convert the Integration test to a test fixture
renderLiteral :: ElmVersion -> LiteralValue -> Text.Text
renderLiteral elmVersion literal =
renderBox $ Render.formatLiteral elmVersion literal
| avh4/elm-format | elm-format-lib/src/ElmFormat/Render/Text.hs | bsd-3-clause | 839 | 0 | 11 | 129 | 208 | 118 | 90 | 21 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Snap.Internal.Http.Parser.Benchmark
( benchmarks )
where
import Control.Monad
import Criterion.Main hiding (run)
import qualified Data.ByteString as S
import Snap.Internal.Http.Parser.Data
import Snap.Internal.Http.Server.Parser
import qualified System.IO.Streams as Streams
parseGet :: S.ByteString -> IO ()
parseGet s = do
!_ <- Streams.fromList [s] >>= parseRequest
return $! ()
benchmarks :: Benchmark
benchmarks = bgroup "parser"
[ bench "firefoxget" $ whnfIO $! replicateM_ 1000
$! parseGet parseGetData
]
| k-bx/snap-server | benchmark/Snap/Internal/Http/Parser/Benchmark.hs | bsd-3-clause | 843 | 0 | 10 | 270 | 159 | 93 | 66 | 20 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Cloud.AWS.EC2.Types.Subnets
( CreateSubnetRequest(..)
, Subnet(..)
, SubnetState
) where
import Cloud.AWS.EC2.Types.Common (ResourceTag)
import Cloud.AWS.Lib.FromText (deriveFromText)
import Data.IP (AddrRange, IPv4)
import Data.Text (Text)
data CreateSubnetRequest = CreateSubnetRequest
{ createSubnetRequestVpcId :: Text
, createSubnetRequestCidrBlock :: AddrRange IPv4
, createSubnetRequestAvailabilityZone :: Maybe Text
}
deriving (Show, Read, Eq)
data Subnet = Subnet
{ subnetId :: Text
, subnetState :: SubnetState
, subnetVpcId :: Text
, subnetCidrBlock :: AddrRange IPv4
, subnetAvailableIpAddressCount :: Int
, subnetAvailabilityZone :: Text
, subnetDefaultForAz :: Maybe Bool
, subnetMapPublicIpOnLaunch :: Maybe Bool
, subnetTagSet :: [ResourceTag]
}
deriving (Show, Read, Eq)
data SubnetState
= SubnetStatePending
| SubnetStateAvailable
deriving (Show, Read, Eq)
deriveFromText "SubnetState" ["pending", "available"]
| worksap-ate/aws-sdk | Cloud/AWS/EC2/Types/Subnets.hs | bsd-3-clause | 1,061 | 0 | 9 | 204 | 254 | 154 | 100 | 30 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Google.Drive.UploadSpec
( main
, spec
) where
import SpecHelper
import Data.Conduit (($$+-))
import Data.Conduit.Binary (sinkLbs)
import qualified Data.ByteString.Lazy.Char8 as C8
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "Network.Google.Drive.Upload" $ do
it "can upload new content" $ do
runApiSpec $ \folder -> do
let fd = setParent folder $ newFile "test-file" Nothing
file <- createFileWithContent fd (fSize fixture) $
uploadSourceFile $ fPath fixture
fileTitle (fileData file) `shouldBe` "test-file"
downloadFile file ($$+- sinkLbs)
`shouldReturn` Just (C8.pack $ fContent fixture)
it "can update existing content" $ do
runApiSpec $ \folder -> do
let fd = setParent folder $ newFile "test-file" Nothing
file <- createFile fd
file' <- updateFileWithContent
(fileId file)
(fd { fileTitle = "test-file-updated" })
(fSize fixture)
(uploadSourceFile $ fPath fixture)
fileTitle (fileData file') `shouldBe` "test-file-updated"
downloadFile file ($$+- sinkLbs)
`shouldReturn` Just (C8.pack $ fContent fixture)
| pbrisbin/google-drive | test/Network/Google/Drive/UploadSpec.hs | mit | 1,344 | 0 | 20 | 413 | 364 | 187 | 177 | 32 | 1 |
{- Copyright © 2012, Vincent Elisha Lee Frey. All rights reserved.
- This is open source software distributed under a MIT license.
- See the file 'LICENSE' for further information.
-}
module System.Console.CmdTheLine.Util
(
-- * File path validation
-- ** Existing path check
fileExists, dirExists, pathExists
-- ** Existing paths check
, filesExist, dirsExist, pathsExist
-- ** Valid path
, validPath
) where
import Control.Applicative
import Text.PrettyPrint
import System.Console.CmdTheLine.Common
import System.Console.CmdTheLine.Err
import Control.Monad.IO.Class ( liftIO )
import System.Directory ( doesFileExist, doesDirectoryExist )
import System.FilePath ( isValid )
doesFileOrDirExist :: String -> IO Bool
doesFileOrDirExist = liftA2 (||) <$> doesFileExist <*> doesDirectoryExist
check :: (String -> IO Bool) -> String -> String -> Err String
check test errStr path = do
isDir <- liftIO $ test path
if isDir
then return path
else msgFail $ no errStr path
validate :: (String -> IO Bool) -> String -> Term String -> Term String
validate test errStr = ret . fmap (check test errStr)
validates :: (String -> IO Bool) -> String -> Term [String] -> Term [String]
validates test errStr = ret . fmap (mapM $ check test errStr)
-- | 'fileExists' @term@ checks that 'String' in @term@ is a path to an existing
-- /file/. If it is not, exit with an explanatory message for the user.
fileExists :: Term String -> Term String
fileExists = validate doesFileExist "file"
-- | 'dirExists' @term@ checks that 'String' in @term@ is a path to an existing
-- /directory/. If it is not, exit with an explanatory message for the user.
dirExists :: Term String -> Term String
dirExists = validate doesDirectoryExist "directory"
-- | 'pathExists' @term@ checks that 'String' in @term@ is a path to an existing
-- /file or directory/. If it is not, exit with an explanatory message for the
-- user.
pathExists :: Term String -> Term String
pathExists = validate doesFileOrDirExist "file or directory"
-- | 'filesExist' @term@ is as 'fileExists' but for a @term@ containing a list
-- of file paths.
filesExist :: Term [String] -> Term [String]
filesExist = validates doesFileExist "file"
-- | 'dirsExist' @term@ is as 'dirExists' but for a @term@ containing a list
-- of directory paths.
dirsExist :: Term [String] -> Term [String]
dirsExist = validates doesDirectoryExist "directory"
-- | 'pathsExist' @term@ is as 'pathExists' but for a @term@ containing a list
-- of paths.
pathsExist :: Term [String] -> Term [String]
pathsExist = validates doesFileOrDirExist "file or directory"
-- | 'validPath' @term@ checks that 'String' in @term@ is a valid path under
-- the current operating system. If it is not, exit with an explanatory
-- message for the user.
validPath :: Term String -> Term String
validPath = ret . fmap check
where
check str = if isValid str then return str else msgFail $ failDoc str
failDoc str = quotes (text str) <+> text "is not a valid file path."
| glutamate/cmdtheline | src/System/Console/CmdTheLine/Util.hs | mit | 3,036 | 0 | 10 | 559 | 596 | 319 | 277 | 40 | 2 |
--------------------------------------------------------------------------------
{-| Module : Events
Copyright : (c) Daan Leijen 2003
(c) Shelarcy ([email protected]) 2006
License : wxWindows
Maintainer : [email protected]
Stability : provisional
Portability : portable
Define event handling. Events are parametrised by the widget that can
correspond to a certain event and the type of the event handler.
For example, the 'resize' event has type:
> Reactive w => Event w (IO ())
This means that all widgets in the 'Reactive' class can respond to
'resize' events. (and since 'Window' is an instance of this class, this
means that basically all visible widgets are reactive).
An @Event w a@ can be transformed into an attribute of type 'Attr' @w a@
using the 'on' function.
> do f <- frame [text := "test"]
> set f [on resize := set f [text := "resizing"]]
For convenience, the 'mouse' and 'keyboard' have a serie of /event filters/:
'click', 'drag', 'enterKey', 'charKey', etc. These filters are write-only
and do not overwrite any previous mouse or keyboard handler but all stay
active at the same time. However, all filter will be overwritten again
when 'mouse' or 'keyboard' is set again. For example, the following program
makes sense:
> set w [on click := ..., on drag := ...]
But in the following program, only the handler for 'mouse' will be called:
> set w [on click := ..., on mouse := ...]
If you want to set the 'mouse' later but retain the old event filters,
you can first read the current 'mouse' handler and call it in the
new handler (and the same for the 'keyboard' of course). This implemenation
technique is used to implement event filters themselves and is also
very useful when setting an event handler for a 'closing' event:
> set w [on closing :~ \previous -> do{ ...; previous }]
Note that you should call 'propagateEvent' (or 'Graphics.UI.WXCore.Events.skipCurrentEvent') whenever
you do not process the event yourself in an event handler. This propagates
the event to the parent event handlers and give them a chance to
handle the event in an appropiate way. This gives another elegant way to install
a 'closing' event handler:
> set w [on closing := do{ ...; propagateEvent }]
-}
--------------------------------------------------------------------------------
module Graphics.UI.WX.Events
( -- * Event
Event
, on
, mapEvent
, propagateEvent
-- * Basic events
-- ** Selecting
, Selecting, select
-- ** Commanding
, Commanding, command
-- ** Reactive
, Reactive
, mouse, keyboard
, closing, idle, resize, focus, activate
, Paint
, paint, paintRaw, repaint
-- * Event filters
-- ** Mouse filters
, enter, leave, motion, drag
, click, unclick, doubleClick
, clickRight, unclickRight
-- * Keyboard event filters
, anyKey, key, charKey
, enterKey,tabKey,escKey,helpKey
, delKey,homeKey,endKey
, pgupKey,pgdownKey
, downKey,upKey,leftKey,rightKey
, rebind
-- * Types
-- ** Modifiers
, Modifiers(..)
, showModifiers
, noneDown, justShift, justAlt, justControl, justMeta, isNoneDown
, isNoShiftAltControlDown
-- ** Mouse events
, EventMouse (..)
, showMouse
, mousePos, mouseModifiers
-- ** Calender event
, EventCalendar(..)
, calendarEvent
-- ** Keyboard events
, EventKey (..), Key(..)
, keyKey, keyModifiers, keyPos
, showKey, showKeyModifiers
-- * Internal
, newEvent
) where
import Graphics.UI.WXCore hiding (Event)
import Graphics.UI.WX.Types
import Graphics.UI.WX.Attributes
import Graphics.UI.WX.Layout
import Graphics.UI.WX.Classes
{--------------------------------------------------------------------
Basic events
--------------------------------------------------------------------}
-- | An event for a widget @w@ that expects an event handler of type @a@.
data Event w a = Event (Attr w a)
-- | Transform an event to an attribute.
on :: Event w a -> Attr w a
on (Event attr)
= attr
-- | Change the event type.
mapEvent :: (a -> b) -> (a -> b -> a) -> Event w a -> Event w b
mapEvent get set (Event attr)
= Event (mapAttr get set attr)
{--------------------------------------------------------------------
Event classes
--------------------------------------------------------------------}
-- | 'Selecting' widgets fire a 'select' event when an item is selected.
class Selecting w where
-- | A 'select' event is fired when an item is selected.
select :: Event w (IO ())
-- | 'Commanding' widgets fire a 'command' event.
class Commanding w where
-- | A commanding event, for example a button press.
command :: Event w (IO ())
-- | 'Reactive' widgets are almost all visible widgets on the screen.
class Reactive w where
mouse :: Event w (EventMouse -> IO ())
keyboard :: Event w (EventKey -> IO ())
closing :: Event w (IO ())
idle :: Event w (IO Bool)
resize :: Event w (IO ())
focus :: Event w (Bool -> IO ())
activate :: Event w (Bool -> IO ())
-- | 'Paint' widgets can serve as a canvas.
-- /Note:/ it is illegal to use both a 'paint' and 'paintRaw'
-- event handler at the same widget.
class Paint w where
-- | Paint double buffered to a device context. The context is always
-- cleared before drawing. Takes the current view rectangle (adjusted
-- for scrolling) as an argument.
paint :: Event w (DC () -> Rect -> IO ())
-- | Paint directly to the on-screen device context. Takes the current
-- view rectangle and a list of dirty rectangles as arguments.\
paintRaw :: Event w (DC () -> Rect -> [Rect] -> IO ())
-- | Emit a paint event to the specified widget.
repaint :: w -> IO ()
{--------------------------------------------------------------------
Mouse event filters
--------------------------------------------------------------------}
click :: Reactive w => Event w (Point -> IO ())
click
= mouseFilter "click" filter
where
filter (MouseLeftDown point mod) = isNoShiftAltControlDown mod
filter other = False
unclick :: Reactive w => Event w (Point -> IO ())
unclick
= mouseFilter "unclick" filter
where
filter (MouseLeftUp point mod) = isNoShiftAltControlDown mod
filter other = False
doubleClick :: Reactive w => Event w (Point -> IO ())
doubleClick
= mouseFilter "doubleClick" filter
where
filter (MouseLeftDClick point mod) = isNoShiftAltControlDown mod
filter other = False
drag :: Reactive w => Event w (Point -> IO ())
drag
= mouseFilter "drag" filter
where
filter (MouseLeftDrag point mod) = isNoShiftAltControlDown mod
filter other = False
motion :: Reactive w => Event w (Point -> IO ())
motion
= mouseFilter "motion" filter
where
filter (MouseMotion point mod) = isNoShiftAltControlDown mod
filter other = False
clickRight :: Reactive w => Event w (Point -> IO ())
clickRight
= mouseFilter "clickRight" filter
where
filter (MouseRightDown point mod) = isNoShiftAltControlDown mod
filter other = False
unclickRight :: Reactive w => Event w (Point -> IO ())
unclickRight
= mouseFilter "unclickRight" filter
where
filter (MouseRightUp point mod) = isNoShiftAltControlDown mod
filter other = False
enter :: Reactive w => Event w (Point -> IO ())
enter
= mouseFilter "enter" filter
where
filter (MouseEnter point mod) = True
filter other = False
leave :: Reactive w => Event w (Point -> IO ())
leave
= mouseFilter "leave" filter
where
filter (MouseLeave point mod) = True
filter other = False
mouseFilter :: Reactive w => String -> (EventMouse -> Bool) -> Event w (Point -> IO ())
mouseFilter name filter
= mapEvent get set mouse
where
get prev x
= ioError (userError ("WX.Events: the " ++ name ++ " event is write-only."))
set prev new mouseEvent
= if (filter mouseEvent)
then new (mousePos mouseEvent)
else prev mouseEvent
{--------------------------------------------------------------------
Keyboard filter events
--------------------------------------------------------------------}
rebind :: Event w (IO ()) -> Event w (IO ())
rebind event
= mapEvent get set event
where
get prev
= prev
set new prev
= new
enterKey,tabKey,escKey,helpKey,delKey,homeKey,endKey :: Reactive w => Event w (IO ())
pgupKey,pgdownKey,downKey,upKey,leftKey,rightKey :: Reactive w => Event w (IO ())
enterKey = key KeyReturn
tabKey = key KeyTab
escKey = key KeyEscape
helpKey = key KeyHelp
delKey = key KeyDelete
homeKey = key KeyHome
endKey = key KeyEnd
pgupKey = key KeyPageUp
pgdownKey = key KeyPageDown
downKey = key KeyDown
upKey = key KeyUp
leftKey = key KeyLeft
rightKey = key KeyRight
charKey :: Reactive w => Char -> Event w (IO ())
charKey c
= key (KeyChar c)
key :: Reactive w => Key -> Event w (IO ())
key k
= keyboardFilter "key" filter
where
filter (EventKey x mod pt) = k==x
anyKey :: Reactive w => Event w (Key -> IO ())
anyKey
= keyboardFilter1 "anyKey" (const True)
keyboardFilter :: Reactive w => String -> (EventKey -> Bool) -> Event w (IO ())
keyboardFilter name filter
= mapEvent get set keyboard
where
get prev
= ioError (userError ("WX.Events: the " ++ name ++ " event is write-only."))
set prev new keyboardEvent
= do when (filter keyboardEvent) new
prev keyboardEvent
keyboardFilter1 :: Reactive w => String -> (EventKey -> Bool) -> Event w (Key -> IO ())
keyboardFilter1 name filter
= mapEvent get set keyboard
where
get prev key
= ioError (userError ("WX.Events: the " ++ name ++ " event is write-only."))
set prev new keyboardEvent
= if (filter keyboardEvent)
then new (keyKey keyboardEvent)
else prev keyboardEvent
{--------------------------------------------------------------------
Calender event filters
--------------------------------------------------------------------}
calendarEvent :: Event (CalendarCtrl a) (EventCalendar -> IO ())
calendarEvent
= newEvent "calendarEvent" calendarCtrlGetOnCalEvent calendarCtrlOnCalEvent
{--------------------------------------------------------------------
Generic event creators
-------------------------------------------------------------------}
-- | Create a new event from a get and set function.
newEvent :: String -> (w -> IO a) -> (w -> a -> IO ()) -> Event w a
newEvent name getter setter
= Event (newAttr name getter setter) | ekmett/wxHaskell | wx/src/Graphics/UI/WX/Events.hs | lgpl-2.1 | 11,138 | 0 | 13 | 2,802 | 2,244 | 1,185 | 1,059 | 172 | 2 |
{- |
Module : $Header$
Description : MatchCAD program
Copyright : (c) Ewaryst Schulz, DFKI Bremen 2010
License : similar to LGPL, see HetCATS/LICENSE.txt or LIZENZ.txt
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (via imports)
Program for matching to HasCASL exported CAD designs against design patterns
-}
import System.Environment
import System.Console.GetOpt
import HasCASL.InteractiveTests
import Data.Bits
import Data.Maybe
import Data.List
main :: IO ()
main = do
args <- getArgs
case processArgs args of
Left msg -> putStrLn $ "Design Matching: " ++ msg ++ "\n\n" ++ dmUsage
Right st -> runProg st >>= putStrLn
runProg :: ProgSettings -> IO String
runProg st
| translate st = matchTranslate (lib st) (spec st) (pattern st) $ design st
| otherwise = matchDesign (lib st) (spec st) (pattern st) $ design st
-- ----------------------- Input Arguments -------------------------
processArgs :: [String] -> Either String ProgSettings
processArgs args =
let (flags, noopts, unrecopts, errs) = getOpt' (ReturnInOrder PFLib) options args
msgl = checkFlags flags
f str (l, s) = if null l then str else str ++ "\n" ++ s ++ unlines l
msg = foldl f ""
[ (noopts, "non-handled extra arguments encountered ")
, (unrecopts, "unrecognized flags encountered ")
, (errs, "")
, (msgl, "")
]
in if null msg then Right $ getSettings flags else Left msg
dmHeader :: String
dmHeader = unlines
[ "Usage: matchcad [OPTION...] [file]"
, ""
, "matchcad /tmp/flange.het -sMatch -pFlangePattern -dComponent"
, ""
]
dmUsage :: String
dmUsage = usageInfo dmHeader options
{- | 'options' describes all available options and is used to generate usage
information -}
options :: [OptDescr ProgFlag]
-- Option [Char] [String] (ArgDescr a) String
options = map f
[ ( "lib", "Path to the hets file", ReqArg PFLib "FILE")
, ( "spec"
, "Name of specification importing both, the pattern and the design specification"
, ReqArg PFSpec "SPECNAME")
, ( "pattern", "Name of the pattern specification"
, ReqArg PFPattern "SPECNAME")
, ( "design", "Name of the design specification"
, ReqArg PFDesign "SPECNAME")
, ( "translate"
, "If this flag is set the match is further translated to an EnCL specification"
, NoArg PFTrans)
, ( "verbosity"
, "A value from 0=quiet to 4=print out all information during processing"
, OptArg (PFVerbosity . read . fromMaybe "4") "0-4")
, ( "quiet", "Equal to -v0", NoArg PFQuiet)
] where
f (fs, descr, arg) = Option [head fs] [fs] arg descr
checkFlags :: [ProgFlag] -> [String]
checkFlags = g . mapAccumL f (0 :: Int) where
f i (PFLib _) = (setBit i 0, ())
f i (PFSpec _) = (setBit i 1, ())
f i (PFPattern _) = (setBit i 2, ())
f i (PFDesign _) = (setBit i 3, ())
f i _ = (i, ())
g (i, _) = mapMaybe (h i) [ (0, "lib")
, (1, "spec")
, (2, "pattern")
, (3, "design") ]
h i (j, s)
| testBit i j = Nothing
| otherwise = Just $ s ++ " argument is missing"
data ProgSettings =
ProgSettings
{ lib :: String
, spec :: String
, pattern :: String
, design :: String
, translate :: Bool
, verbosity :: Int }
defaultSettings :: ProgSettings
defaultSettings = ProgSettings
{ lib = error "uninitialized settings"
, spec = error "uninitialized settings"
, pattern = error "uninitialized settings"
, design = error "uninitialized settings"
, translate = False
, verbosity = 4 }
data ProgFlag =
PFLib String
| PFSpec String
| PFPattern String
| PFDesign String
| PFVerbosity Int
| PFQuiet
| PFTrans
makeSettings :: ProgSettings -> ProgFlag -> ProgSettings
makeSettings settings flg =
case flg of
PFLib s -> settings { lib = s }
PFSpec s -> settings { spec = s }
PFPattern s -> settings { pattern = s }
PFDesign s -> settings { design = s }
PFVerbosity i -> settings { verbosity = i }
PFQuiet -> settings { verbosity = 0 }
PFTrans -> settings { translate = True }
getSettings :: [ProgFlag] -> ProgSettings
getSettings = foldl makeSettings defaultSettings
| keithodulaigh/Hets | HasCASL/MatchCAD.hs | gpl-2.0 | 4,665 | 0 | 13 | 1,460 | 1,197 | 652 | 545 | 103 | 7 |
{-# LANGUAGE BangPatterns #-}
{- $Id: Parser.hs,v 1.2 2003/11/10 21:28:58 antony Exp $
******************************************************************************
* I N V A D E R S *
* *
* Module: Parser *
* Purpose: Parsing (mainly lexical analysis) of window event *
* stream. *
* Author: Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
-- Quick 'n dirty adaptation from old robot simulator. Could probably be
-- done better in the new AFRP framework.
module Parser (
GameInput, -- Abstract
parseWinInput, -- :: SF WinInput GameInput
command, -- :: SF GameInput (Event Command)
cmdString, -- :: SF GameInput (Event String)
ptrPos, -- :: SF GameInput Position2
lbp, -- :: SF GameInput (Event ())
lbpPos, -- :: SF GameInput (Event Position2)
lbDown, -- :: SF GameInput Bool
rbp, -- :: SF GameInput (Event ())
rbpPos, -- :: SF GameInput (Event Position2)
rbDown, -- :: SF GameInput Bool
dragStart, -- :: SF GameInput (Event ())
dragStop, -- :: SF GameInput (Event Distance2)
dragStartPos, -- :: SF GameInput Position2
dragVec, -- :: SF GameInput Distance2
dragging, -- :: SF GameInput Bool
keyStat,
getT,
getDt
) where
import Data.Maybe (isJust)
import qualified HGL as HGL (Event(..))
import qualified HGL as HGL (Point(..))
import AFRP
import AFRPUtilities
import AFRPGeometry
-- import AFRPMiscellany (mapFst)
import PhysicalDimensions
import Command
type WinInput = Event HGL.Event
------------------------------------------------------------------------------
-- Exported entities
------------------------------------------------------------------------------
data GameInput = GameInput {
giCmdStr :: !String,
giCmd :: !(Event Command),
giPDS :: !PDState,
giKS :: !(Event (Char, Bool)),
giDt :: !Time,
giT :: !Time
}
parseWinInput :: SF (WinInput,WinInput) GameInput
parseWinInput = (wiToCmd <<< (arr fst))&&&
(wiToPDS <<<(arr snd)) &&&
(wiToKs <<< (arr fst)) &&& (gDt)
>>^ \((cmdStr, cmd), (pds, (ks,(dt,t)))) ->
GameInput {giCmdStr = cmdStr,
giCmd = cmd,
giPDS = pds,
giKS = ks,
giDt = dt,
giT = t}
-- All event sources below are defined such that they will NOT occur at local
-- time 0 (immediately after a switch). Sometimes explicitly using a "notYet".
-- Sometimes using through careful use of "edge" and relatives. Is this the
-- right approach?
-- A valid command has been read.
command :: SF GameInput (Event Command)
command = giCmd ^>> notYet
gDt :: SF a (Double,Double)
gDt
= (arr (\_ -> 1) >>>
(imIntegral 0 >>> arr (\ t -> (t, t))) >>>
(first ((iPre 0) <<< identity) >>> arr (\ (lt, t) -> (t - lt + 0.005,t))))
-- Continuous parser feed back.
cmdString :: SF GameInput String
cmdString = arr giCmdStr
getT :: SF GameInput Time
getT = (iPre 0) <<< (arr giT)
getDt :: SF GameInput Time
getDt = (iPre 0) <<< (arr giDt)
ptrPos :: SF GameInput Position2
ptrPos = arr (pdsPos . giPDS)
lbp :: SF GameInput (Event ())
lbp = lbpPos >>^ (`tag` ())
keyStat :: SF GameInput (Event (Char, Bool))
keyStat = giKS ^>> notYet
lbpPos :: SF GameInput (Event Position2)
lbpPos = giPDS # pdsLeft ^>> edgeJust
lbDown :: SF GameInput Bool
lbDown = arr (giPDS # pdsLeft # isJust)
rbp :: SF GameInput (Event ())
rbp = rbpPos >>^ (`tag` ())
rbpPos :: SF GameInput (Event Position2)
rbpPos = giPDS # pdsRight ^>> edgeJust
rbDown :: SF GameInput Bool
rbDown = arr (giPDS # pdsRight # isJust)
dragStart :: SF GameInput (Event ())
dragStart = giPDS # pdsDrag ^>> edgeBy detectStart (Just undefined)
where
detectStart Nothing (Just _) = Just ()
detectStart _ _ = Nothing
dragStop :: SF GameInput (Event Distance2)
dragStop = (giPDS # pdsDrag ^>> edgeBy detectStop Nothing) &&& dragVec
>>^ \(e, dv) -> e `tag` dv
where
detectStop (Just _) Nothing = Just ()
detectStop _ _ = Nothing
-- (Last) drag start position.
dragStartPos :: SF GameInput Position2
dragStartPos = arr (giPDS # pdsDragStartPos)
-- (Last) drag vector.
dragVec :: SF GameInput Distance2
dragVec = arr (giPDS # pdsDragVec)
dragging :: SF GameInput Bool
dragging = arr (giPDS # pdsDrag # isJust)
------------------------------------------------------------------------------
-- Lexical analysis of character input
------------------------------------------------------------------------------
-- Currently overkill, but being able to enter multi-character commands
-- could possibly be useful.
wiToKs :: SF WinInput (Event (Char,Bool))
wiToKs = arr (mapFilterE getKs)
where
getKs (HGL.Char {HGL.char=c, HGL.isDown = ks}) = Just (c,ks)
getKs _ = Nothing
wiToCmd :: SF WinInput (String, Event Command)
wiToCmd = arr (mapFilterE selChar)
>>> (accumBy scanChar (undefined,scanCmds) >>^ fmap fst >>^ splitE)
>>> hold "" *** arr (mapFilterE id)
where
scanChar (_, S cont) c = cont c
selChar (HGL.Char {HGL.char=c, HGL.isDown = True}) = Just c
selChar _ = Nothing
-- This ought to be redone. Kont should probably be called Tranition or
-- somethinig.
-- We define a continuation to be the command recognized thus far (a String
-- and maybe a complete Command), and a scanner to be applied to the rest
-- of the input. (I.e., there's output at every step.)
type Kont = ((String, Maybe Command), Scanner)
type Cont a = a -> Kont
-- Since a scanner is applied to one character at a time (typically, on
-- Char events), we recursively define a scanner to be a character
-- continuation.
newtype Scanner = S (Cont Char)
-- Scan commands
scanCmds :: Scanner
scanCmds = scanCmd cmds
where
cmds =
[ ("q", emitCmd scanCmds CmdQuit), -- Discard inp.?
("p", emitCmd scanCmds CmdNewGame),
("f", emitCmd scanCmds CmdFreeze),
("r", emitCmd scanCmds CmdResume)
]
-- Scan one command.
-- Looks for a valid command. Outputs prefix as long as the current
-- prefix is valid. Starts over on first invalid character. Invokes success
-- continuation on success.
-- cmds ....... List of pairs of valid command and corresponding success
-- continuation.
scanCmd :: [(String, Cont String)] -> Scanner
scanCmd cmds = scanSubCmd "" cmds
-- Scan one subcommand/keyword argument.
-- Looks for a valid command. Outputs prefix as long as the current
-- prefix is valid. Starts over on first invalid character. Invokes success
-- continuation on success.
-- pfx0 ....... Initial prefix.
-- cmds ....... List of pairs of valid command and corresponding success
-- continuation.
scanSubCmd :: String -> [(String, Cont String)] -> Scanner
scanSubCmd pfx0 cmds = S (scHlp pfx0 cmds)
where
-- pfx ........ Command prefix.
-- sfxconts ... Command suffixes paired with success continuations.
-- c .......... Input character.
scHlp pfx sfxconts c =
case c of
'\r' ->
case [ cont | ("", cont) <- sfxconts ] of
[] -> emitPfx (S (scHlp pfx sfxconts)) pfx
(cont : _) -> cont pfx
'.' ->
case sfxconts of
[] -> emitPfx (S (scHlp pfx0 cmds)) pfx0
[(sfx, cont)] -> cont (pfx ++ sfx)
_ ->
let
(sfxs, conts) = unzip sfxconts
cpfx = foldr1 lcp sfxs
sfxs' = map (drop (length cpfx)) sfxs
pfx' = pfx ++ cpfx
sfxconts' = zip sfxs' conts
in
emitPfx (S (scHlp pfx' sfxconts')) pfx'
_ ->
let
pfx' = pfx ++ [c]
sfxconts' = [ (tail sfx, cont) | (sfx, cont) <- sfxconts, not (null sfx) && head sfx == c]
in
case sfxconts' of
[] -> emitPfx (S (scHlp pfx0 cmds))
pfx0
-- ("Invalid: " ++ [c])
[("", cont)] -> cont pfx'
_ -> emitPfx (S (scHlp pfx' sfxconts'))
pfx'
-- Emit command (and command string), then continue scanning.
emitCmd :: Scanner -> Command -> String -> Kont
emitCmd scanner cmd cmdStr = ((cmdStr, Just cmd), scanner)
-- Emit current prefix, then scan next character.
emitPfx :: Scanner -> String -> Kont
emitPfx scanner pfx = ((pfx, Nothing), scanner)
------------------------------------------------------------------------------
-- Pointing device processing
------------------------------------------------------------------------------
-- State of the pointing device.
-- The points for pdsLeft, pdsRight, and pdsDrag reflect where the button
-- was initially pressed.
data PDState = PDState {
pdsPos :: !Position2, -- Current position.
pdsDragStartPos :: !Position2, -- (Last) drag start position.
pdsDragVec :: !Distance2, -- (Latest) drag vector.
pdsLeft :: !(Maybe Position2), -- Left button currently down.
pdsRight :: !(Maybe Position2), -- Right button currently down.
pdsDrag :: !(Maybe Position2) -- Currently dragging.
}
-- Initial state.
initPDS :: PDState
initPDS = PDState {
pdsPos = origin,
pdsDragStartPos = origin,
pdsDragVec = zeroVector,
pdsLeft = Nothing,
pdsRight = Nothing,
pdsDrag = Nothing
}
wiToPDS :: SF WinInput PDState
wiToPDS = (accumHoldBy nextPDS initPDS)
-- Compute next pointing device state.
nextPDS :: PDState -> HGL.Event -> PDState
--nextPDS pds (HGL.Key {}) = pds -- Currently we ignore keys.
nextPDS pds (HGL.Button {HGL.pt = p, HGL.isLeft = True, HGL.isDown = True}) =
-- Left button pressed.
pds {pdsPos = p', pdsDragVec = dv, pdsLeft = Just p'}
where
p' = gPointToPosition2 p
dv = maybe (pdsDragVec pds) (\dspos -> p' .-. dspos) (pdsDrag pds)
nextPDS pds (HGL.Button {HGL.pt = p, HGL.isLeft = True, HGL.isDown = False}) =
-- Left button released.
pds {pdsPos = p', pdsDragVec = dv, pdsLeft = Nothing, pdsDrag = md}
where
p' = gPointToPosition2 p
md = maybe Nothing (const (pdsDrag pds)) (pdsRight pds)
dv = maybe (pdsDragVec pds) (\dspos -> p' .-. dspos) md
nextPDS pds (HGL.Button {HGL.pt = p, HGL.isLeft = False, HGL.isDown = True}) =
-- Right button pressed.
pds {pdsPos = p', pdsDragVec = dv, pdsRight = Just p'}
where
p' = gPointToPosition2 p
dv = maybe (pdsDragVec pds) (\dspos -> p' .-. dspos) (pdsDrag pds)
nextPDS pds (HGL.Button {HGL.pt = p, HGL.isLeft = False, HGL.isDown = False}) =
-- Right button released.
pds {pdsPos = p', pdsDragVec = dv, pdsRight = Nothing, pdsDrag = md}
where
p' = gPointToPosition2 p
md = maybe Nothing (const (pdsDrag pds)) (pdsLeft pds)
dv = maybe (pdsDragVec pds) (\dspos -> p' .-. dspos) md
nextPDS pds (HGL.MouseMove {HGL.pt = p}) =
-- Mouse move.
pds {pdsPos = p', pdsDragStartPos = dsp, pdsDragVec = dv, pdsDrag = md}
where
p' = gPointToPosition2 p
md = case pdsLeft pds of
mlp@(Just _) -> mlp
Nothing -> pdsRight pds
dsp = maybe (pdsDragStartPos pds) id md
dv = maybe (pdsDragVec pds) (\dspos -> p' .-. dspos) md
nextPDS pds _ = pds -- Ignore unknown events.
gPointToPosition2 :: HGL.Point -> Position2
gPointToPosition2 (HGL.Point (x, y)) = (Point2 (fromIntegral x) (fromIntegral y))
------------------------------------------------------------------------------
-- General utilities
------------------------------------------------------------------------------
-- Longest common prefix.
lcp :: Eq a => [a] -> [a] -> [a]
lcp [] _ = []
lcp _ [] = []
lcp (x:xs) (y:ys) | x == y = x : lcp xs ys
| otherwise = []
| snowmantw/Frag | src/Parser.hs | gpl-2.0 | 14,087 | 36 | 24 | 5,188 | 2,962 | 1,675 | 1,287 | 225 | 8 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-|
Internal types and accessors. There are no guarantees that heist will
preserve backwards compatibility for symbols in this module. If you use them,
no complaining when your code breaks.
-}
module Heist.Internal.Types
( module Heist.Internal.Types.HeistState
, module Heist.Internal.Types
) where
------------------------------------------------------------------------------
import Data.HashMap.Strict (HashMap)
import Data.Text (Text)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
import Data.Monoid
#endif
------------------------------------------------------------------------------
import qualified Heist.Compiled.Internal as C
import qualified Heist.Interpreted.Internal as I
import Heist.Internal.Types.HeistState
------------------------------------------------------------------------------
------------------------------------------------------------------------------
type TemplateRepo = HashMap TPath DocumentFile
------------------------------------------------------------------------------
-- | An IO action for getting a template repo from this location. By not just
-- using a directory path here, we support templates loaded from a database,
-- retrieved from the network, or anything else you can think of.
type TemplateLocation = IO (Either [String] TemplateRepo)
------------------------------------------------------------------------------
-- | My lens creation function to avoid a dependency on lens.
lens :: Functor f => (t1 -> t) -> (t1 -> a -> b) -> (t -> f a) -> t1 -> f b
lens sa sbt afb s = sbt s <$> afb (sa s)
------------------------------------------------------------------------------
-- | The splices and templates Heist will use. To bind a splice simply
-- include it in the appropriate place here.
data SpliceConfig m = SpliceConfig
{ _scInterpretedSplices :: Splices (I.Splice m)
-- ^ Interpreted splices are the splices that Heist has always had.
-- They return a list of nodes and are processed at runtime.
, _scLoadTimeSplices :: Splices (I.Splice IO)
-- ^ Load time splices are like interpreted splices because they
-- return a list of nodes. But they are like compiled splices because
-- they are processed once at load time. All of Heist's built-in
-- splices should be used as load time splices.
, _scCompiledSplices :: Splices (C.Splice m)
-- ^ Compiled splices return a DList of Chunks and are processed at
-- load time to generate a runtime monad action that will be used to
-- render the template.
, _scAttributeSplices :: Splices (AttrSplice m)
-- ^ Attribute splices are bound to attribute names and return a list
-- of attributes.
, _scTemplateLocations :: [TemplateLocation]
-- ^ A list of all the locations that Heist should get its templates
-- from.
}
------------------------------------------------------------------------------
-- | Lens for interpreted splices
-- :: Simple Lens (SpliceConfig m) (Splices (I.Splice m))
scInterpretedSplices
:: Functor f
=> (Splices (I.Splice m) -> f (Splices (I.Splice m)))
-> SpliceConfig m -> f (SpliceConfig m)
scInterpretedSplices = lens _scInterpretedSplices setter
where
setter sc v = sc { _scInterpretedSplices = v }
------------------------------------------------------------------------------
-- | Lens for load time splices
-- :: Simple Lens (SpliceConfig m) (Splices (I.Splice IO))
scLoadTimeSplices
:: Functor f
=> (Splices (I.Splice IO) -> f (Splices (I.Splice IO)))
-> SpliceConfig m -> f (SpliceConfig m)
scLoadTimeSplices = lens _scLoadTimeSplices setter
where
setter sc v = sc { _scLoadTimeSplices = v }
------------------------------------------------------------------------------
-- | Lens for complied splices
-- :: Simple Lens (SpliceConfig m) (Splices (C.Splice m))
scCompiledSplices
:: Functor f
=> (Splices (C.Splice m) -> f (Splices (C.Splice m)))
-> SpliceConfig m -> f (SpliceConfig m)
scCompiledSplices = lens _scCompiledSplices setter
where
setter sc v = sc { _scCompiledSplices = v }
------------------------------------------------------------------------------
-- | Lens for attribute splices
-- :: Simple Lens (SpliceConfig m) (Splices (AttrSplice m))
scAttributeSplices
:: Functor f
=> (Splices (AttrSplice m) -> f (Splices (AttrSplice m)))
-> SpliceConfig m -> f (SpliceConfig m)
scAttributeSplices = lens _scAttributeSplices setter
where
setter sc v = sc { _scAttributeSplices = v }
------------------------------------------------------------------------------
-- | Lens for template locations
-- :: Simple Lens (SpliceConfig m) [TemplateLocation]
scTemplateLocations
:: Functor f
=> ([TemplateLocation] -> f [TemplateLocation])
-> SpliceConfig m -> f (SpliceConfig m)
scTemplateLocations = lens _scTemplateLocations setter
where
setter sc v = sc { _scTemplateLocations = v }
instance Monoid (SpliceConfig m) where
mempty = SpliceConfig mempty mempty mempty mempty mempty
mappend (SpliceConfig a1 b1 c1 d1 e1) (SpliceConfig a2 b2 c2 d2 e2) =
SpliceConfig (mappend a1 a2) (mappend b1 b2) (mappend c1 c2)
(mappend d1 d2) (mappend e1 e2)
data HeistConfig m = HeistConfig
{ _hcSpliceConfig :: SpliceConfig m
-- ^ Splices and templates
, _hcNamespace :: Text
-- ^ A namespace to use for all tags that are bound to splices. Use
-- empty string for no namespace.
, _hcErrorNotBound :: Bool
-- ^ Whether to throw an error when a tag wih the heist namespace does
-- not correspond to a bound splice. When not using a namespace, this
-- flag is ignored.
}
------------------------------------------------------------------------------
-- | Lens for the SpliceConfig
-- :: Simple Lens (HeistConfig m) (SpliceConfig m)
hcSpliceConfig
:: Functor f
=> ((SpliceConfig m) -> f (SpliceConfig m))
-> HeistConfig m -> f (HeistConfig m)
hcSpliceConfig = lens _hcSpliceConfig setter
where
setter hc v = hc { _hcSpliceConfig = v }
------------------------------------------------------------------------------
-- | Lens for the namespace
-- :: Simple Lens (HeistConfig m) Text
hcNamespace
:: Functor f
=> (Text -> f Text)
-> HeistConfig m -> f (HeistConfig m)
hcNamespace = lens _hcNamespace setter
where
setter hc v = hc { _hcNamespace = v }
------------------------------------------------------------------------------
-- | Lens for the namespace error flag
-- :: Simple Lens (HeistConfig m) Bool
hcErrorNotBound
:: Functor f
=> (Bool -> f Bool)
-> HeistConfig m -> f (HeistConfig m)
hcErrorNotBound = lens _hcErrorNotBound setter
where
setter hc v = hc { _hcErrorNotBound = v }
------------------------------------------------------------------------------
-- | Lens for interpreted splices
-- :: Simple Lens (HeistConfig m) (Splices (I.Splice m))
hcInterpretedSplices
:: Functor f
=> (Splices (I.Splice m) -> f (Splices (I.Splice m)))
-> HeistConfig m -> f (HeistConfig m)
hcInterpretedSplices = hcSpliceConfig . scInterpretedSplices
------------------------------------------------------------------------------
-- | Lens for load time splices
-- :: Simple Lens (HeistConfig m) (Splices (I.Splice IO))
hcLoadTimeSplices
:: Functor f
=> (Splices (I.Splice IO) -> f (Splices (I.Splice IO)))
-> HeistConfig m -> f (HeistConfig m)
hcLoadTimeSplices = hcSpliceConfig . scLoadTimeSplices
------------------------------------------------------------------------------
-- | Lens for compiled splices
-- :: Simple Lens (HeistConfig m) (Splices (C.Splice m))
hcCompiledSplices
:: Functor f
=> (Splices (C.Splice m) -> f (Splices (C.Splice m)))
-> HeistConfig m -> f (HeistConfig m)
hcCompiledSplices = hcSpliceConfig . scCompiledSplices
------------------------------------------------------------------------------
-- | Lens for attribute splices
-- :: Simple Lens (HeistConfig m) (Splices (AttrSplice m))
hcAttributeSplices
:: Functor f
=> (Splices (AttrSplice m) -> f (Splices (AttrSplice m)))
-> HeistConfig m -> f (HeistConfig m)
hcAttributeSplices = hcSpliceConfig . scAttributeSplices
------------------------------------------------------------------------------
-- | Lens for template locations
-- :: Simple Lens (HeistConfig m) [TemplateLocation]
hcTemplateLocations
:: Functor f
=> ([TemplateLocation] -> f [TemplateLocation])
-> HeistConfig m -> f (HeistConfig m)
hcTemplateLocations = hcSpliceConfig . scTemplateLocations
| sopvop/heist | src/Heist/Internal/Types.hs | bsd-3-clause | 9,076 | 0 | 14 | 1,752 | 1,564 | 842 | 722 | 110 | 1 |
{-|
Module : System.GPIO.Linux.Sysfs.Types
Description : Types for Linux @sysfs@ GPIO
Copyright : (c) 2019, Drew Hess
License : BSD3
Maintainer : Drew Hess <[email protected]>
Stability : experimental
Portability : non-portable
Types used by the various Linux @sysfs@ GPIO implementations.
-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE Safe #-}
module System.GPIO.Linux.Sysfs.Types
( -- * @sysfs@-specific types
SysfsEdge(..)
, toPinInterruptMode
, toSysfsEdge
-- * Exceptions
, SysfsException(..)
) where
import Protolude
import Data.Data (Data)
import Test.QuickCheck (Arbitrary(..), arbitraryBoundedEnum, genericShrink)
import System.GPIO.Types
(Pin, PinInputMode, PinOutputMode, PinInterruptMode(..),
gpioExceptionToException, gpioExceptionFromException)
-- | Linux GPIO pins that can be configured to generate inputs have an
-- @edge@ attribute in the @sysfs@ GPIO filesystem. This type
-- represents the values that the @edge@ attribute can take.
--
-- Note that in Linux @sysfs@ GPIO, the signal edge referred to by the
-- @edge@ attribute refers to the signal's /logical/ value; i.e., it
-- takes into account the value of the pin's @active_low@ attribute.
--
-- This type is isomorphic to the 'PinInterruptMode' type. See
-- 'toPinInterruptMode' and 'toSysfsEdge'.
data SysfsEdge
= None
-- ^ Interrupts disabled
| Rising
-- ^ Interrupt on the (logical) signal's rising edge
| Falling
-- ^ Interrupt on the (logical) signal's falling edge
| Both
-- ^ Interrupt on any change to the signal level
deriving (Bounded,Enum,Eq,Data,Ord,Read,Show,Generic,Typeable)
instance Arbitrary SysfsEdge where
arbitrary = arbitraryBoundedEnum
shrink = genericShrink
-- | Convert a 'SysfsEdge' value to its equivalent 'PinInterruptMode'
-- value.
--
-- >>> toPinInterruptMode None
-- Disabled
-- >>> toPinInterruptMode Rising
-- RisingEdge
-- >>> toPinInterruptMode Falling
-- FallingEdge
-- >>> toPinInterruptMode Both
-- Level
toPinInterruptMode :: SysfsEdge -> PinInterruptMode
toPinInterruptMode None = Disabled
toPinInterruptMode Rising = RisingEdge
toPinInterruptMode Falling = FallingEdge
toPinInterruptMode Both = Level
-- | Convert a 'PinInterruptMode' value to its equivalent 'SysfsEdge'
-- value.
--
-- >>> toSysfsEdge Disabled
-- None
-- >>> toSysfsEdge RisingEdge
-- Rising
-- >>> toSysfsEdge FallingEdge
-- Falling
-- >>> toSysfsEdge Level
-- Both
toSysfsEdge :: PinInterruptMode -> SysfsEdge
toSysfsEdge Disabled = None
toSysfsEdge RisingEdge = Rising
toSysfsEdge FallingEdge = Falling
toSysfsEdge Level = Both
-- | Exceptions that can be thrown by @sysfs@ computations (in
-- addition to standard 'System.IO.Error.IOError' exceptions, of
-- course).
--
-- The @UnexpectedX@ values are truly exceptional and mean that, while
-- the @sysfs@ attribute for the given pin exists, the contents of the
-- attribute do not match any expected value for that attribute, which
-- probably means that the package is incompatible with the @sysfs@
-- filesystem due to a kernel-level change.
data SysfsException
= SysfsNotPresent
-- ^ The @sysfs@ filesystem does not exist
| SysfsError
-- ^ Something in the @sysfs@ filesystem does not behave as
-- expected (could indicate a change in @sysfs@ behavior that the
-- package does not expect)
| SysfsPermissionDenied
-- ^ The @sysfs@ operation is not permitted due to insufficient
-- permissions
| PermissionDenied Pin
-- ^ The operation on the specified pin is not permitted, either
-- due to insufficient permissions, or because the pin's attribute
-- cannot be modified (e.g., trying to write to a pin that's
-- configured for input)
| InvalidOperation Pin
-- ^ The operation is invalid for the specified pin, or in the
-- specified pin's current configuration
| AlreadyExported Pin
-- ^ The pin has already been exported
| InvalidPin Pin
-- ^ The specified pin does not exist
| NotExported Pin
-- ^ The pin has been un-exported or does not exist
| UnsupportedInputMode PinInputMode Pin
-- ^ The pin does not support the specified input mode
| UnsupportedOutputMode PinOutputMode Pin
-- ^ The pin does not support the specified output mode
| NoDirectionAttribute Pin
-- ^ The pin does not have a @direction@ attribute
| NoEdgeAttribute Pin
-- ^ The pin does not have an @edge@ attribute
| UnexpectedDirection Pin Text
-- ^ An unexpected value was read from the pin's @direction@
-- attribute
| UnexpectedValue Pin Text
-- ^ An unexpected value was read from the pin's @value@
-- attribute
| UnexpectedEdge Pin Text
-- ^ An unexpected value was read from the pin's @edge@
-- attribute
| UnexpectedActiveLow Pin Text
-- ^ An unexpected value was read from the pin's @active_low@
-- attribute
| UnexpectedContents FilePath Text
-- ^ An unexpected value was read from the specified file
| InternalError Text
-- ^ An internal error has occurred in the interpreter, something
-- which should "never happen" and should be reported to the
-- package maintainer
deriving (Eq,Show,Typeable)
instance Exception SysfsException where
toException = gpioExceptionToException
fromException = gpioExceptionFromException
| dhess/gpio | src/System/GPIO/Linux/Sysfs/Types.hs | bsd-3-clause | 5,362 | 0 | 6 | 1,035 | 466 | 305 | 161 | 57 | 1 |
-- | benchmarking Base.Animation.pickAnimationFrame
import Utils.Tests
import Base.Animation
import Test.QuickCheck
import Criterion.Main
main = do
quickCheck $ equalImplementation pickAnimationFrameNaive pickAnimationFrameOld
quickCheck $ equalImplementation pickAnimationFrameNaive
(\ a b c -> pickAnimationFrame (mkAnimation a b) c)
defaultMain (
bench "pickAnimationFrame" new :
bench "pickAnimationFrameOld" old :
-- bench "pickAnimationFrameNaive" naive :
[])
type Seconds = Double
type ImplType = [Char] -> [Seconds] -> Seconds -> Char
equalImplementation :: ImplType -> ImplType -> Property
equalImplementation a b =
property $
\ list -> not (null list) ==>
forAllShrink (listOf positive) shrink $ \ frameTimes ->
not (null frameTimes) ==>
forAllShrink positive shrink $ \ now ->
(a list frameTimes now ?= b list frameTimes now)
where
positive :: Gen Double
positive = suchThat arbitrary (> 0)
new = nf
(map (pickAnimationFrame (mkAnimation (take 8 ['a' .. 'z']) [0.3, 0.3, 0.2, 1])))
[0, 0.1 .. 300]
old = nf
(map (pickAnimationFrameOld (take 8 ['a' .. 'z']) [0.3, 0.3, 0.2, 1]))
[0, 0.1 .. 300]
naive = nf
(map (pickAnimationFrameNaive (take 8 ['a' .. 'z']) [0.3, 0.3, 0.2, 1]))
[0, 0.1 .. 300]
| geocurnoff/nikki | src/benchmarks/pickAnimationFrame.hs | lgpl-3.0 | 1,332 | 2 | 13 | 304 | 466 | 245 | 221 | 33 | 1 |
{-
values:
eq :: (Bool, Bool) -> Bool
le :: (Bool, Bool) -> Bool
ne :: (Bool, Bool) -> Bool
neg :: Bool -> Bool
vee :: (Bool, Bool) -> Bool
wedge :: (Bool, Bool) -> Bool
scope:
Prelude.eq |-> Prelude.eq, Value
Prelude.le |-> Prelude.le, Value
Prelude.ne |-> Prelude.ne, Value
Prelude.neg |-> Prelude.neg, Value
Prelude.vee |-> Prelude.vee, Value
Prelude.wedge |-> Prelude.wedge, Value
eq |-> Prelude.eq, Value
le |-> Prelude.le, Value
ne |-> Prelude.ne, Value
neg |-> Prelude.neg, Value
vee |-> Prelude.vee, Value
wedge |-> Prelude.wedge, Value
-}
module Dummy where
eq :: (Bool, Bool) -> Bool
le :: (Bool, Bool) -> Bool
ne :: (Bool, Bool) -> Bool
neg :: Bool -> Bool
vee :: (Bool, Bool) -> Bool
wedge :: (Bool, Bool) -> Bool
neg x
= case x of
False -> True
True -> False
wedge (x, y)
= case (x, y) of
(False, False) -> False
(True, False) -> False
(False, True) -> False
(True, True) -> True
vee (x, y) = neg (wedge (neg x, neg y))
le (x, y) = vee (neg x, y)
eq (x, y) = wedge (le (x, y), le (y, x))
ne (x, y) = wedge (vee (x, y), neg (wedge (x, y)))
| keithodulaigh/Hets | ToHaskell/test/BoolEx.hascasl.hs | gpl-2.0 | 1,138 | 0 | 10 | 288 | 334 | 189 | 145 | 21 | 4 |
-- | A test for ensuring that GHC's supporting language extensions remains in
-- sync with Cabal's own extension list.
--
-- If you have ended up here due to a test failure, please see
-- Note [Adding a language extension] in compiler/main/DynFlags.hs.
module Main (main) where
import Control.Monad
import Data.List
import DynFlags
import Language.Haskell.Extension
main :: IO ()
main = do
let ghcExtensions = map flagSpecName xFlags
cabalExtensions = map show [ toEnum 0 :: KnownExtension .. ]
ghcOnlyExtensions = ghcExtensions \\ cabalExtensions
cabalOnlyExtensions = cabalExtensions \\ ghcExtensions
check "GHC-only flags" expectedGhcOnlyExtensions ghcOnlyExtensions
check "Cabal-only flags" expectedCabalOnlyExtensions cabalOnlyExtensions
check :: String -> [String] -> [String] -> IO ()
check title expected got
= do let unexpected = got \\ expected
missing = expected \\ got
showProblems problemType problems
= unless (null problems) $
do putStrLn (title ++ ": " ++ problemType)
putStrLn "-----"
mapM_ putStrLn problems
putStrLn "-----"
putStrLn ""
showProblems "Unexpected flags" unexpected
showProblems "Missing flags" missing
-- See Note [Adding a language extension] in compiler/main/DynFlags.hs.
expectedGhcOnlyExtensions :: [String]
expectedGhcOnlyExtensions = ["RelaxedLayout",
"AlternativeLayoutRule",
"AlternativeLayoutRuleTransitional",
"TypeFamilyDependencies"]
expectedCabalOnlyExtensions :: [String]
expectedCabalOnlyExtensions = ["Generics",
"ExtensibleRecords",
"RestrictedTypeSynonyms",
"HereDocuments",
"NewQualifiedOperators",
"XmlSyntax",
"RegularPatterns",
"SafeImports",
"Safe",
"Unsafe",
"Trustworthy"]
| tjakway/ghcjvm | testsuite/tests/driver/T4437.hs | bsd-3-clause | 2,262 | 0 | 16 | 824 | 336 | 179 | 157 | 43 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.Commands
-- Copyright : (c) David Glasser 2007
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- Allows you to run internal xmonad commands (X () actions) using
-- a dmenu menu in addition to key bindings. Requires dmenu and
-- the Dmenu XMonad.Actions module.
--
-----------------------------------------------------------------------------
module XMonad.Actions.Commands (
-- * Usage
-- $usage
commandMap,
runCommand,
runCommand',
workspaceCommands,
screenCommands,
defaultCommands
) where
import XMonad
import XMonad.StackSet hiding (workspaces)
import XMonad.Util.Dmenu (dmenu)
import qualified Data.Map as M
import System.Exit
import Data.Maybe
-- $usage
--
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Actions.Commands
--
-- Then add a keybinding to the runCommand action:
--
-- > , ((modm .|. controlMask, xK_y), commands >>= runCommand)
--
-- and define the list of commands you want to use:
--
-- > commands :: X [(String, X ())]
-- > commands = defaultCommands
--
-- Whatever key you bound to will now cause a popup menu of internal
-- xmonad commands to appear. You can change the commands by changing
-- the contents of the list returned by 'commands'. (If you like it
-- enough, you may even want to get rid of many of your other key
-- bindings!)
--
-- For detailed instructions on editing your key bindings, see
-- "XMonad.Doc.Extending#Editing_key_bindings".
-- | Create a 'Data.Map.Map' from @String@s to xmonad actions from a
-- list of pairs.
commandMap :: [(String, X ())] -> M.Map String (X ())
commandMap c = M.fromList c
-- | Generate a list of commands to switch to\/send windows to workspaces.
workspaceCommands :: X [(String, X ())]
workspaceCommands = asks (workspaces . config) >>= \spaces -> return
[((m ++ show i), windows $ f i)
| i <- spaces
, (f, m) <- [(view, "view"), (shift, "shift")] ]
-- | Generate a list of commands dealing with multiple screens.
screenCommands :: [(String, X ())]
screenCommands = [((m ++ show sc), screenWorkspace (fromIntegral sc) >>= flip whenJust (windows . f))
| sc <- [0, 1]::[Int] -- TODO: adapt to screen changes
, (f, m) <- [(view, "screen"), (shift, "screen-to-")]
]
-- | A nice pre-defined list of commands.
defaultCommands :: X [(String, X ())]
defaultCommands = do
wscmds <- workspaceCommands
return $ wscmds ++ screenCommands ++ otherCommands
where
otherCommands =
[ ("shrink" , sendMessage Shrink )
, ("expand" , sendMessage Expand )
, ("next-layout" , sendMessage NextLayout )
, ("default-layout" , asks (layoutHook . config) >>= setLayout )
, ("restart-wm" , restart "xmonad" True )
, ("restart-wm-no-resume", restart "xmonad" False )
, ("xterm" , spawn =<< asks (terminal . config) )
, ("run" , spawn "exe=`dmenu_path | dmenu -b` && exec $exe" )
, ("kill" , kill )
, ("refresh" , refresh )
, ("focus-up" , windows focusUp )
, ("focus-down" , windows focusDown )
, ("swap-up" , windows swapUp )
, ("swap-down" , windows swapDown )
, ("swap-master" , windows swapMaster )
, ("sink" , withFocused $ windows . sink )
, ("quit-wm" , io $ exitWith ExitSuccess )
]
-- | Given a list of command\/action pairs, prompt the user to choose a
-- command and return the corresponding action.
runCommand :: [(String, X ())] -> X ()
runCommand cl = do
let m = commandMap cl
choice <- dmenu (M.keys m)
fromMaybe (return ()) (M.lookup choice m)
-- | Given the name of a command from 'defaultCommands', return the
-- corresponding action (or the null action if the command is not
-- found).
runCommand' :: String -> X ()
runCommand' c = do
m <- fmap commandMap defaultCommands
fromMaybe (return ()) (M.lookup c m)
| pjones/xmonad-test | vendor/xmonad-contrib/XMonad/Actions/Commands.hs | bsd-2-clause | 5,060 | 0 | 12 | 1,879 | 848 | 493 | 355 | 55 | 1 |
module Main (main) where
import Data.List
main :: IO ()
main = do print (genericLength [1..10000000] :: Int)
print (genericLength [1..10000000] :: Integer)
| urbanslug/ghc | libraries/base/tests/length001.hs | bsd-3-clause | 169 | 0 | 10 | 36 | 69 | 37 | 32 | 5 | 1 |
module Main () where
import JSONClass
import PrettyJSON
import Prettify
main = putStrLn (pretty 10 value) where
value = renderJValue (toJValue (JObj [("f", JNumber 1), ("q", JNumber 10)]))
| pauldoo/scratch | RealWorldHaskell/ch06/Main.hs | isc | 201 | 0 | 14 | 40 | 78 | 44 | 34 | 6 | 1 |
{-# LANGUAGE TypeOperators
, EmptyDataDecls
, MultiParamTypeClasses
, FunctionalDependencies
, FlexibleInstances
, UndecidableInstances
, ScopedTypeVariables
#-}
module Measures.Unit.Internal (
) where
import Measures.Unit
import Measures.IntType
import Control.Monad (mzero)
import Utils (scalaGroupBy)
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
data UAtom
data UComposite
class UnitDecompositionEvidence u underlying | u -> underlying
class Unit' underlying u where
unitDecomposition' :: underlying -> u -> UnitDecomposition
instance (UnitDecompositionEvidence u underlying, Unit' underlying u) => Unit u where
unitDecomposition = unitDecomposition' (undefined :: underlying)
instance AtomicUnit u => Unit' UAtom u where
unitDecomposition' _ atom = [(UnitAtom atom, 1)]
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
joinUnitDecompositions :: UnitDecomposition -> UnitDecomposition -> UnitDecomposition
joinUnitDecompositions a b = let grouped = scalaGroupBy fst (a ++ b)
in do group <- grouped
let atom = fst . head $ group
let power = sum $ map snd group
if power /= 0 then return (atom, power)
else mzero -- []
instance (Unit' ua a, UnitDecompositionEvidence a ua, Unit' ub b, UnitDecompositionEvidence b ub) => CompositeUnit (a :* b) where
unitDecompositionC (x :* y) = joinUnitDecompositions (unitDecomposition x) (unitDecomposition y)
instance (Unit' ua a, UnitDecompositionEvidence a ua, Unit' ub b, UnitDecompositionEvidence b ub) => CompositeUnit (a :/ b) where
unitDecompositionC (x :/ y) = joinUnitDecompositions (unitDecomposition x) (map (\(u, p) -> (u, -p) ) $ unitDecomposition y)
instance (Unit' ua a, UnitDecompositionEvidence a ua, IntType pow) => CompositeUnit (a :^ pow) where
unitDecompositionC (x :^ pow) = map (\(u, p) -> (u, p * intValue pow)) $ unitDecomposition x
instance CompositeUnit u => Unit' UComposite u where
unitDecomposition' _ = unitDecompositionC
instance (Unit a, Unit b) => UnitDecompositionEvidence (a :* b) UComposite
instance (Unit a, Unit b) => UnitDecompositionEvidence (a :/ b) UComposite
instance (Unit a, IntType pow) => UnitDecompositionEvidence (a :^ pow) UComposite
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
instance UnitDecompositionEvidence Time UAtom
instance UnitDecompositionEvidence Distance UAtom
instance UnitDecompositionEvidence Mass UAtom
instance UnitDecompositionEvidence Temperature UAtom
instance UnitDecompositionEvidence Angle UAtom
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
instance UnitDecompositionEvidence Speed UComposite
instance CompositeUnit Speed where
unitDecompositionC _ = unitDecomposition $ d' Distance
instance UnitDecompositionEvidence Acceleration UComposite
instance CompositeUnit Acceleration where
unitDecompositionC _ = unitDecomposition $ d'' Distance
instance UnitDecompositionEvidence Force UComposite
instance CompositeUnit Force where
unitDecompositionC _ = unitDecomposition $ Mass :* Acceleration
instance UnitDecompositionEvidence Impulse UComposite
instance CompositeUnit Impulse where
unitDecompositionC _ = unitDecomposition $ Mass :* Acceleration
instance UnitDecompositionEvidence Energy UComposite
instance CompositeUnit Energy where
unitDecompositionC _ = unitDecomposition $ Distance :* Force
| fehu/hgt | core-universe/src/Measures/Unit/Internal.hs | mit | 3,951 | 0 | 13 | 985 | 893 | 460 | 433 | -1 | -1 |
type Rotor = [(Int,Int)]
type RotorArray = [Rotor]
type Key = [Int]
charRotor = ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'] ++ [' ']
charLimit = length charRotor - 1
initRotor = [(x, charLimit - x)|x<-[0..charLimit]]
{- Functions To Simulate the Operation of Rotors -}
setupRotors :: Key -> RotorArray
setupRotors [] = []
setupRotors key = (rotateRotor initRotor (head key)): setupRotors (tail key)
rotateRotors :: RotorArray -> Key -> RotorArray
rotateRotors (top:[]) key = (rotateRotor top 1) : []
rotateRotors (top:bottom) (currentKey:nextKey:furtherKeys) =
if nextPosition == nextKey
then (rotateRotor top 1) : rotateRotors bottom (nextKey:furtherKeys)
else top : rotateRotors bottom (nextKey:furtherKeys)
where
nextPosition = fst (head (tail (head bottom)))
rotateRotor :: Rotor -> Int -> Rotor
rotateRotor rotor 0 = rotor
rotateRotor (top:bottom) amount = rotateRotor ((bottom) ++ [(top)]) (amount - 1)
{- Functions To Search the Rotors -}
findPosition :: (Eq a) => [a] -> a -> Int
findPosition rotor character =
if head rotor == character
then 0
else (findPosition (tail rotor) character) + 1
getPosition :: Rotor -> Int -> Int
getPosition rotor value =
if snd (head rotor) == value
then 0
else (getPosition (tail rotor) value) + 1
resolveRotors :: RotorArray -> Int -> Int
resolveRotors (top:[]) position = snd(top !! position)
resolveRotors (top:bottom) position = resolveRotors bottom (snd(top !! position))
resolveRotorsBackwards :: RotorArray -> Int -> Int
resolveRotorsBackwards (top:[]) position = getPosition top position
resolveRotorsBackwards (top:bottom) position = getPosition top (resolveRotorsBackwards bottom position)
{- Code To Encode Text -}
encodeString :: String -> RotorArray -> Key -> String
encodeString [] rotors key = []
encodeString (top:bottom) rotors key = (encodeChar rotors top) : (encodeString bottom (rotateRotors rotors key) key)
encodeChar :: RotorArray -> Char -> Char
encodeChar rotors plain = charRotor !! (resolveRotors rotors (findPosition charRotor plain))
encrypt :: String -> Key -> String
encrypt plainText key = encodeString plainText (setupRotors key) key
{- Code To Decode Text -}
decodeString :: String -> RotorArray -> Key -> String
decodeString [] rotors key = []
decodeString (top:bottom) rotors key = (decodeChar rotors top) : (decodeString bottom (rotateRotors rotors key) key)
decodeChar :: RotorArray -> Char -> Char
decodeChar rotors cipher = charRotor !! (resolveRotorsBackwards rotors (findPosition charRotor cipher))
decrypt :: String -> Key -> String
decrypt cipherText key = decodeString cipherText (setupRotors key) key
| KeithKirk/haskell-ciphers | rotor.hs | mit | 2,738 | 0 | 13 | 533 | 990 | 521 | 469 | 49 | 2 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
------------------------------------------------------------------------------
-- |
-- Module : Main (main.hs)
-- Copyright : (C) 2014 Samuli Thomasson
-- License : MIT (see the file LICENSE)
-- Maintainer : Samuli Thomasson <[email protected]>
-- Stability : experimental
-- Portability : non-portable
------------------------------------------------------------------------------
module Main (main) where
import ZabbixDB hiding (Future, History)
import Query
import Future
import Prelude
import Data.Monoid
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Logger
import Data.Aeson hiding (Result)
import qualified Data.Map.Strict as M
import qualified Data.Aeson.Encode.Pretty as A
import qualified Data.ByteString.Lazy.Char8 as BLC
import Data.Maybe
import Data.Text (Text, pack, unpack)
import Data.Text.Encoding (encodeUtf8)
import Data.Time
import qualified Data.Vector as DV
import qualified Data.Vector.Storable as V
import qualified Data.Yaml as Yaml
import Data.Int
import qualified Database.Esqueleto as E
import Database.Esqueleto (toSqlKey, Entity(..))
import qualified Database.Persist as P
import System.Console.CmdArgs
import System.Directory (doesFileExist)
import Text.Printf
-- | Inserted by MigrateDb
defaultMetricNames :: [Metric]
defaultMetricNames =
[ Metric "mem" "vm.memory.size[available]" (1024 ^ 3) -- a gB
, Metric "cpu" "system.cpu.load[percpu,avg5]" 1 -- %
, Metric "swap" "system.swap.size[,pfree]" (1024 ^ 3)
, Metric "fsroot" "vfs.fs.size[/,pfree]" (1024 ^ 3)
]
type DashboardConfig = M.Map Text [Text]
data Config = Config
{ localDatabase :: ConnectionString
, zabbixDatabase :: ConnectionString
, modeldir :: FilePath
, dashboardConfig :: DashboardConfig
}
instance FromJSON Config where
parseJSON (Object o) = Config <$> (encodeUtf8 <$> o .: "localDatabase")
<*> (encodeUtf8 <$> o .: "zabbixDatabase")
<*> (fromMaybe "forecast_models" <$> o .:? "modeldir")
<*> (o .: "dashboard")
parseJSON _ = mzero
data Program = Hosts { config :: String, outType :: DataOutType }
| Apps { config :: String, outType :: DataOutType, argid :: Int64 }
| Items { config :: String, outType :: DataOutType, argid :: Int64 }
| History { config :: String, outType :: DataOutType, argid :: Int64, samples :: Int }
| Trends { config :: String, outType :: DataOutType, argid :: Int64, asItem :: Int64, asHost :: Int64 }
| Future { config :: String, outType :: DataOutType }
| Models { config :: String, outType :: DataOutType }
| MigrateDb { config :: String, outType :: DataOutType }
| Sync { config :: String, outType :: DataOutType, syncAll :: Bool, itemsToSync :: [Int64] }
| Configure { config :: String, outType :: DataOutType, item :: Int64, model :: Int64, executable :: String }
| Execute { config :: String, outType :: DataOutType, argid :: Int64, params :: String, outCombine :: Bool }
| Compare { config :: String, outType :: DataOutType, argid :: Int64, fromInterval :: (Epoch, Epoch), toInterval :: (Epoch, Epoch) }
| Dashboard { config :: String, outType :: DataOutType, cached :: Bool }
| Th { config :: String, outType :: DataOutType, update :: Bool, metric :: Maybe String, critical :: Maybe Double, warning :: Maybe Double, high :: Maybe Double, lower :: Maybe Bool }
deriving (Show, Data, Typeable)
data DataOutType = OutHuman | OutJSON | OutSQL
deriving (Eq, Show, Data, Typeable)
prgConf :: Program
prgConf = modes
-- Query only
[ Hosts { outType = enum [ OutHuman &= name "human" &= help "Human-readable JSON output"
, OutJSON &= name "json" &= help "Bare JSON output"
, OutSQL &= name "sql" &= help "SQL dump output"
]
, config = "config.yaml" &= typFile &= help "yaml config file (default: ./config.yaml)"
} &= help "List all hosts and groups except templates"
, Apps { argid = (-1) &= argPos 0 &= typ "ID"
} &= help "List available \"metric groups\" for the Host ID"
, Items { } &= help "List available \"metrics\" in the metric group App ID>"
, History { samples = 80 &= help "Sample resolution (default 80)"
} &= help "Print history data for <itemid>"
, Trends { asItem = (-1) &= help "Output the trend data with this item id"
, asHost = (-1) &= help "Output trend data with this host id"
} &= help "Print trend data for the REMOTE item"
, Future {
} &= help "List all item futures"
, Models {
} &= help "List available future models"
-- Database modifying actions
, MigrateDb {
} &= help "Create or update the local DB schema"
, Sync { syncAll = False &= help "Sync every table, not just history"
, itemsToSync = [] &= help "Optional item_future.id's to sync"
} &= help "Synchronize remote db with local and run futures"
, Configure { executable = "" &= typFile &= help "Register a new forecast model"
, item = (-1) &= typ "ITEMID" &= help "Create a new item_future"
, model = (-1) &= typ "MODELID" &= help "Model to register on the new item_future"
} &= help "Configure the predictions in database"
-- Info
, Execute { -- argid
params = "" &= typ "JSON"
, outCombine = False &= help "Combine clock/value in the output"
} &= help "Execute item_future.ID but only output the results, instead of modifying database"
, Compare { -- argid
fromInterval = def &= help "Interval to use with predictions"
, toInterval = def &= help "Interval to compare the predicted model to"
} &= help "Compare predictions from knowing A to an actual history B"
, Dashboard { cached = True &= help "Use cached version"
} &= help "Print dashboard-y information"
, Th { update = False &= help "Make changes in the database"
, metric = Nothing &= help "Which metric to operate on"
, lower = Nothing &= help "Are the threshold lower bounds (default: upper)"
, critical = Nothing &= help "critical threshold"
, warning = Nothing &= help "warning threshold"
, high = Nothing &= help "high threshold"
} &= help "Print or set threshold values"
] &= program "habbix" &= verbosity
main :: IO ()
main = do
prg <- cmdArgs prgConf
Just Config{..} <- Yaml.decodeFile (config prg)
debugInfo <- isLoud
bequiet <- not <$> isNormal
let argid' | argid prg < 0 = error "ItemID must be >= 0"
| otherwise = toSqlKey $ argid prg
asItem' | asItem prg < 0 = error "asItem must be >= 0"
| otherwise = toSqlKey $ asItem prg :: ItemId
asHost' | asHost prg < 0 = error "asHost must be >= 0"
| otherwise = toSqlKey $ asHost prg :: HostId
out x = liftIO . BLC.putStrLn $ case outType prg of
OutJSON -> encode $ outJSON x
OutHuman -> outHuman x
OutSQL -> outSQL x
runHabbix bequiet debugInfo modeldir localDatabase zabbixDatabase $ case prg of
Hosts{..} -> out =<< runLocalDB selectHosts
Apps{..} -> out =<< runLocalDB (selectHostApplications argid')
Items{..} -> out =<< runLocalDB (selectAppItems argid')
Trends{..} -> out . (,,) asItem' asHost' =<< runRemoteDB (selectZabTrendItem argid')
History{..} -> out . sampled samples =<< runLocalDB (historyVectors $ selectHistory argid')
Future{..} -> out =<< runLocalDB (P.selectList [] [P.Asc ItemFutureId])
Models{..} -> out =<< runLocalDB (P.selectList [] [P.Asc FutureModelId])
MigrateDb{..} -> runLocalDB $ E.runMigration migrateAll >> mapM_ P.insertUnique defaultMetricNames
Dashboard{..} -> liftIO . BLC.putStrLn . encode =<< if cached then getDashboardCached else getDashboard dashboardConfig
Th{..} -> do
when update $ runLocalDB $ updateThresholds (pack $ fromJust metric) lower warning high critical
out =<< runLocalDB (selectThresholds $ fmap pack metric)
Sync{..} -> do
when syncAll (populateZabbixParts >> populateDefaultFutures)
case itemsToSync of
[] -> populateAll >> executeFutures' Nothing
is -> executeFutures' (Just $ map toSqlKey is)
logInfoN "Now rebuilding the dashboard"
void $ getDashboard dashboardConfig
Configure{..}
| not (null executable) -> do
dir <- asks modelsDir
let ex = dir ++ "/" ++ executable
e <- liftIO $ doesFileExist ex
unless e (error $ "Executable " ++ ex ++ " not found")
runLocalDB (P.insert_ $ FutureModel (pack executable))
| item < 0 || model < 0 -> error "Provide either --executable or (ID and --model)"
| otherwise -> do
fid <- newItemFuture (toSqlKey item) (toSqlKey model) False
liftIO . putStrLn $ "Added future model id " ++ show (E.fromSqlKey fid)
#ifdef STATISTICS
Compare{..} -> putStrLn =<< futureCompare argid' fromInterval toInterval
#else
Compare{} -> error "habbix was not compiled with -fstatistics"
#endif
Execute{..} -> do
[(a, p, t, f, m)] <- getItemFutures $ Just [toSqlKey argid]
let p' = if not (null params) then E.Value (encodeUtf8 $ pack params) else p
r <- executeModel (a, p', t, f, m)
case r of
Right (_, r') -> out r'
Left er -> error er
getDashboard :: DashboardConfig -> Habbix Value
getDashboard config = runLocalDB $ do
hosts <- selectHosts
hostsJson <- fmap toJSON . forM hosts $ \(Entity hostId host) -> do
items <- getItems hostId (hostName host)
return $ object [ "hostid" .= hostId
, "hostname" .= hostName host
, "items" .= items ]
time <- liftIO getCurrentTime
let res = object [ "timestamp" .= show time, "hosts" .= hostsJson ]
liftIO $ BLC.writeFile "dashboard.cached.json" (encode res)
return res
where
getItems i k = let keys = fromMaybe (config M.! "default") $ M.lookup k config
in forM keys (selectItemDashboardData i)
getDashboardCached :: Habbix Value
getDashboardCached = do
res <- liftIO $ BLC.readFile "dashboard.cached.json"
return (fromJust $ decode res)
-- * Print
-- | How to output with
class Out t where
outJSON :: t -> Value
outHuman :: t -> BLC.ByteString
outHuman = A.encodePretty . outJSON
-- | Output SQL INSERT'S
outSQL :: t -> BLC.ByteString
outSQL _ = "SQL output not implemented for this case"
instance Out [(Entity Metric, Entity Threshold)] where
outJSON = toJSON . map p where
p (Entity _ metric, Entity thresholdId threshold) = object
[ "threshold" .= threshold, "thresholdId" .= thresholdId, "metric" .= metric ]
instance Out [Entity Host] where
outJSON = toJSON . map p where
p (Entity hid host) = object
[ "hostid" .= hid
, "host" .= hostName host ]
instance Out [Entity Item] where
outJSON = toJSON . map p where
p (Entity iid item) = object
[ "itemid" .= iid
, "hostid" .= itemHost item
, "key_" .= itemKey_ item
, "name" .= itemName item
, "description" .= itemDescription item
]
instance Out [Entity Application] where
outJSON = toJSON . map p where
p (Entity aid app) = object
[ "hostid" .= applicationHost app
, "appid" .= aid
, "name" .= applicationName app
]
instance Out DP where
outJSON (ts, vs) = toJSON $ zipWith (\t v -> (t, toJSON v)) (V.toList ts) (DV.toList vs)
-- | Print model info
instance Out [Entity FutureModel] where
outJSON = toJSON . map p where
p (Entity key model) = object [ "modelid" .= key
, "name" .= futureModelName model
]
instance Out [Entity ItemFuture] where
outJSON = toJSON . map p where
p (Entity futId fut) = object
[ "futureid" .= futId
, "itemid" .= itemFutureItem fut
, "modelid" .= itemFutureModel fut
, "params" .= fromMaybe (String "ERROR: params could not be parsed") (decodeStrict (itemFutureParams fut))
]
instance Out (Result Object) where
outJSON Result{..} = toJSON $ zipWith (\c v -> object [ "time" .= c, "val" .= v]) (V.toList reClocks) (V.toList reValues)
instance Out (ItemId, HostId, (Entity Host, Entity Item, [Trend])) where
outSQL (iid, hid, (Entity _ Host{..}, Entity _ Item{..}, trends)) = "BEGIN;"
<> BLC.unlines (map BLC.pack
[ printf "INSERT INTO hosts VALUES (%d, '%s', %d, %d, '%s');"
(E.fromSqlKey hid) (unpack hostHost) hostStatus hostAvailable (unpack hostName)
, printf "INSERT INTO items VALUES (%d, %d, %d, '%s', '%s', '%s', '%d');"
(E.fromSqlKey iid) itemType (E.fromSqlKey hid) (unpack itemName)
(unpack itemKey_) (unpack itemDescription) itemValueType
, "INSERT INTO trend VALUES " ])
<> BLC.intercalate ",\n" (map printfTrend trends) <> "; COMMIT;"
where
printfTrend Trend{..} = BLC.pack $
printf " (%d, %d, %.4f, %.4f, %.4f)"
(E.fromSqlKey iid) trendClock (tof trendValueMin) (tof trendValueAvg) (tof trendValueMax)
tof :: Rational -> Double
tof = fromRational
outJSON (iid, hid, (Entity _ Host{..}, Entity _ Item{..}, trends)) = object
[ "itemid" .= iid
, "hostid" .= hid
, "trends" .= map (\Trend{..} -> object ["clock" .= trendClock, "value_min" .= trendValueMin, "value_avg" .= trendValueAvg, "value_max" .= trendValueMax]) trends
]
-- * History stuff
-- | Sample of n evenly leaning towards end.
sampled :: Int -> DP -> DP
sampled n (ts, vs) =
let interval = fromIntegral (V.length ts) / fromIntegral n :: Double
getIndex i = floor ((fromIntegral i + 1) * interval - 1)
in ( V.generate n $ \i -> ts V.! getIndex i
, DV.generate n $ \i -> vs DV.! getIndex i
)
| Multi-Axis/habbix | app/main.hs | mit | 15,730 | 0 | 20 | 5,042 | 4,155 | 2,194 | 1,961 | 259 | 20 |
{-|
Module: Flaw.Graphics.GLSL
Description: GLSL generator for OpenGL/WebGL graphics.
License: MIT
-}
{-# LANGUAGE DeriveGeneric, GADTs, OverloadedStrings, TypeFamilies #-}
module Flaw.Graphics.GLSL
( GlslConfig(..)
, GlslAttribute(..)
, GlslUniformBlock(..)
, GlslUniform(..)
, GlslSampler(..)
, GlslFragmentTarget(..)
, GlslStage(..)
, GlslProgram(..)
, glslGenerateProgram
) where
import Data.Bits
import Data.List
import qualified Data.Serialize as S
import Data.Serialize.Text()
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Data.Text.Lazy.Builder
import GHC.Generics(Generic)
import Flaw.Graphics.Program.Internal
import qualified Flaw.Graphics.Program.SL as SL
-- | GLSL config for various versions of GLSL.
data GlslConfig = GlslConfig
{
-- | GLSL version.
glslConfigVersion :: Maybe Int
-- | Replace integer types with float types for attributes.
, glslConfigForceFloatAttributes :: !Bool
-- | Replace unsigned types with signed types.
, glslConfigUnsignedUnsupported :: !Bool
-- | Use uniform blocks (instead of separate uniforms).
, glslConfigUniformBlocks :: !Bool
-- | Use "in" and "out" keywords instead of "attribute" and "varying", declare fragment targets.
, glslConfigInOutSyntax :: !Bool
-- | Use dimension specifiers in texture sampling functions (like texture2D vs texture).
, glslConfigTextureSampleDimensionSpecifier :: !Bool
}
data GlslAttribute = GlslAttribute
{ glslAttributeName :: !T.Text
, glslAttributeInfo :: Attribute
} deriving (Show, Generic)
instance S.Serialize GlslAttribute
-- | Information for binding uniform block to slot.
-- It might be multiple uniform blocks bound to a single slot
-- (for example, if uniforms from this slot are used in multiple shader stages).
data GlslUniformBlock = GlslUniformBlock
{ glslUniformBlockName :: !T.Text
, glslUniformBlockSlot :: !Int
} deriving (Show, Generic)
instance S.Serialize GlslUniformBlock
-- | Uniform used by shader.
data GlslUniform = GlslUniform
{ glslUniformName :: !T.Text
, glslUniformInfo :: Uniform
} deriving (Show, Generic)
instance S.Serialize GlslUniform
-- | Sampler used by shader.
data GlslSampler = GlslSampler
{ glslSamplerName :: !T.Text
, glslSamplerInfo :: Sampler
} deriving (Show, Generic)
instance S.Serialize GlslSampler
-- | Fragment target outputted by shader.
data GlslFragmentTarget
= GlslFragmentTarget
{ glslFragmentTargetName :: !T.Text
, glslFragmentTargetIndex :: !Int
}
| GlslDualFragmentTarget
{ glslFragmentTargetName0 :: !T.Text
, glslFragmentTargetName1 :: !T.Text
}
deriving (Show, Generic)
instance S.Serialize GlslFragmentTarget
data GlslStage
= GlslVertexStage
| GlslFragmentStage
deriving (Show, Generic)
instance S.Serialize GlslStage
-- | GLSL program.
data GlslProgram = GlslProgram
{ glslProgramAttributes :: [GlslAttribute]
, glslProgramUniformBlocks :: [GlslUniformBlock]
, glslProgramUniforms :: [GlslUniform]
, glslProgramSamplers :: [GlslSampler]
, glslProgramFragmentTargets :: [GlslFragmentTarget]
, glslProgramShaders :: [(GlslStage, T.Text)]
} deriving (Show, Generic)
instance S.Serialize GlslProgram
-- | Generate shader programs in GLSL.
glslGenerateProgram :: GlslConfig -> State -> GlslProgram
glslGenerateProgram GlslConfig
{ glslConfigVersion = configVersion
, glslConfigForceFloatAttributes = configForceFloatAttributes
, glslConfigUnsignedUnsupported = configUnsignedUnsupported
, glslConfigUniformBlocks = configUniformBlocks
, glslConfigInOutSyntax = configInOutSyntax
, glslConfigTextureSampleDimensionSpecifier = configTextureSampleDimensionSpecifier
} state = program where
-- generate program information
SL.ProgramInfo shaders = SL.programInfo state
-- generate shaders and unite them
program = foldr mergeProgram emptyProgram shaders
mergeProgram shader@(stage, _shaderInfo) GlslProgram
{ glslProgramAttributes = pas
, glslProgramUniformBlocks = pubs
, glslProgramUniforms = pus
, glslProgramSamplers = pss
, glslProgramFragmentTargets = pts
, glslProgramShaders = ps
} = GlslProgram
{ glslProgramAttributes = as ++ pas
, glslProgramUniformBlocks = ubs ++ pubs
, glslProgramUniforms = us ++ pus
, glslProgramSamplers = ss ++ pss
, glslProgramFragmentTargets = ts ++ pts
, glslProgramShaders = (glslStage, s) : ps
} where
(s, as, ubs, us, ss, ts) = generateShader shader
glslStage = case stage of
VertexStage -> GlslVertexStage
PixelStage -> GlslFragmentStage
_ -> error "wrong stage"
emptyProgram = GlslProgram
{ glslProgramAttributes = []
, glslProgramUniformBlocks = []
, glslProgramUniforms = []
, glslProgramSamplers = []
, glslProgramFragmentTargets = []
, glslProgramShaders = []
}
-- function to generate shader source and lists of resources
generateShader :: (Stage, SL.ShaderInfo) -> (T.Text, [GlslAttribute], [GlslUniformBlock], [GlslUniform], [GlslSampler], [GlslFragmentTarget])
generateShader (stage, SL.ShaderInfo
{ SL.shaderTemps = temps
, SL.shaderAttributes = attributes
, SL.shaderUniforms = uniforms
, SL.shaderSamplers = samplers
, SL.shaderTargets = targets
}) = (TL.toStrict $ toLazyText source, resAttributes, resUniformBlocks, resUniforms, resSamplers, resFragmentTargets) where
-- GLSL attributes
resAttributes = map resAttribute attributes
resAttribute attribute@Attribute
{ attributeSlot = slot
, attributeOffset = offset
} = GlslAttribute
{ glslAttributeName = TL.toStrict $ toLazyText $ attributeName slot offset
, glslAttributeInfo = attribute
}
-- GLSL uniform blocks
resUniformBlocks = if configUniformBlocks then map resUniformBlock uniformBlocks else []
resUniformBlock us = GlslUniformBlock
{ glslUniformBlockName = TL.toStrict $ toLazyText $ uniformBlockName slot
, glslUniformBlockSlot = slot
} where slot = uniformSlot $ head us
-- GLSL uniforms
resUniforms = if configUniformBlocks then [] else map resUniform uniforms
resUniform uniform@Uniform
{ uniformSlot = slot
, uniformOffset = offset
} = GlslUniform
{ glslUniformName = TL.toStrict $ toLazyText $ uniformName slot offset
, glslUniformInfo = uniform
}
-- GLSL samplers
resSamplers = map resSampler samplers
resSampler sampler@Sampler
{ samplerSlot = slot
} = GlslSampler
{ glslSamplerName = TL.toStrict $ toLazyText $ samplerName slot
, glslSamplerInfo = sampler
}
-- GLSL fragment targets
resFragmentTargets = if configInOutSyntax then concatMap resFragmentTarget targets else []
resFragmentTarget target = case target of
PositionTarget _ -> []
ColorTarget slot _ -> [GlslFragmentTarget
{ glslFragmentTargetName = TL.toStrict $ toLazyText $ targetColorName slot
, glslFragmentTargetIndex = slot
}]
DualColorTarget _ _ -> [GlslDualFragmentTarget
{ glslFragmentTargetName0 = TL.toStrict $ toLazyText $ targetColorName 0
, glslFragmentTargetName1 = TL.toStrict $ toLazyText $ targetColorName 1
}]
DepthTarget _ -> []
-- source
source = headerSource <> attributesSource <> inInterpolantsSource <> outInterpolantsSource
<> (if configUniformBlocks then uniformBlocksSource else uniformsSource)
<> samplersSource <> targetDeclsSource <> codeSource
-- header source
headerSource = versionSource <> "#ifdef GL_ES\nprecision highp float;\n#endif\n"
versionSource = case configVersion of
Just version -> "#version " <> fromString (show version) <> "\n"
Nothing -> mempty
-- attributes source
attributesSource = foldr (mappend . attributeDefinitionSource) mempty attributes
attributeDefinitionSource Attribute
{ attributeSlot = slot
, attributeOffset = offset
, attributeValueType = t
} = (if configInOutSyntax then "in " else "attribute ") <> attributeValueTypeSource t <> " " <> attributeName slot offset <> ";\n"
-- in-interpolants source
-- in-interpolants are temps used during this stage, but defined at another stage
inInterpolantsSource = foldr (mappend . inInterpolantSource) mempty $ filter (\temp -> tempStage temp /= stage) temps
inInterpolantSource Temp
{ tempIndex = i
, tempType = t
} = (if configInOutSyntax then "in " else "varying ") <> valueTypeSource t <> " " <> interpolantName i <> ";\n"
-- out-interpolants are temps defined at this stage, but used by some other stage
outInterpolants = filter (\temp -> tempStage temp == stage && tempUsedByOtherStage (tempIndex temp)) temps
-- out-interpolants source
outInterpolantsSource = foldr (mappend . outInterpolantSource) mempty outInterpolants
outInterpolantSource Temp
{ tempIndex = i
, tempType = t
} = (if configInOutSyntax then "out " else "varying ") <> valueTypeSource t <> " " <> interpolantName i <> ";\n"
otherShaderInfos = concatMap (\(otherStage, otherShaderInfo) -> if otherStage == stage then [] else [otherShaderInfo]) shaders
tempUsedByOtherStage i = any (elem i . map tempIndex . SL.shaderTemps) otherShaderInfos
-- uniform blocks source
uniformBlocks = groupBy eqUniformBySlot $ sortBy compareUniformBySlot uniforms
uniformBlocksSource = foldr (mappend . uniformBlockSource) mempty uniformBlocks
uniformBlockSource blockUniforms = blockHeader <> blockSource <> blockFooter where
blockHeader = "layout(std140) uniform " <> uniformBlockName (uniformSlot $ head blockUniforms) <> "\n{\n"
blockFooter = "};\n"
blockSource = foldr (mappend . uniformInBlockSource) mempty $ addBlockGaps blockUniforms 0
uniformInBlockSource u = "\t" <> uniformDefinitionSource u
addBlockGaps ua@(u:us) offset
| advance == 0 = u : addBlockGaps us (offset + valueTypeScalarsCount (uniformType u) * count * 4)
| advance `mod` 4 == 0 && advance >= 4 = Uniform
{ uniformSlot = uniformSlot u
, uniformOffset = offset
, uniformSize = 0
, uniformType = case cappedAdvance `quot` 4 of
1 -> ScalarValueType ScalarFloat
2 -> VectorValueType Dimension2 ScalarFloat
3 -> VectorValueType Dimension3 ScalarFloat
4 -> VectorValueType Dimension4 ScalarFloat
_ -> undefined
} : addBlockGaps ua (offset + cappedAdvance)
| otherwise = error "wrong uniform offset"
where
advance = uniformOffset u - offset
cappedAdvance = min advance $ ((offset + 16) .&. complement 15) - offset
size = uniformSize u
count = if size > 0 then size else 1
addBlockGaps [] _ = []
eqUniformBySlot a b = uniformSlot a == uniformSlot b
compareUniformBySlot a b = compare (uniformSlot a) (uniformSlot b)
-- uniforms source
uniformsSource = foldr (mappend . uniformSource) mempty uniforms
uniformSource uniform = "uniform " <> uniformDefinitionSource uniform
-- helper function for uniform
uniformDefinitionSource Uniform
{ uniformSlot = slot
, uniformOffset = offset
, uniformSize = size
, uniformType = t
} = valueTypeSource t <> " " <> uniformName slot offset <> (if size > 0 then "[" <> fromString (show size) <> "]" else mempty) <> ";\n"
-- samplers source
samplersSource = foldr (mappend . samplerSource) mempty samplers
samplerSource Sampler
{ samplerSlot = slot
, samplerDimension = dimension
, samplerSampleType = sampleType
} = "uniform " <> typeSource <> samplerDimensionSource <> " " <> samplerName slot <> ";\n" where
typeSource = case sampleType of
ScalarValueType st -> scalarShortTypeSource st
VectorValueType _dim st -> scalarShortTypeSource st
MatrixValueType _dim1 _dim2 st -> scalarShortTypeSource st
samplerDimensionSource = case dimension of
Sampler1D -> "sampler1D"
Sampler2D -> "sampler2D"
Sampler3D -> "sampler3D"
SamplerCube -> "samplerCube"
-- target decls source
targetDeclsSource = if configInOutSyntax then foldr (mappend . targetDeclSource) mempty targets else mempty
targetDeclSource target = case target of
PositionTarget _ -> mempty
ColorTarget slot _ -> "out " <> valueTypeSource (VectorValueType Dimension4 ScalarFloat) <> " " <> targetColorName slot <> ";\n"
DualColorTarget a b -> targetDeclSource (ColorTarget 0 a) <> targetDeclSource (ColorTarget 1 b)
DepthTarget _ -> mempty
-- code source
codeSource = "void main()\n{\n" <> tempsSource <> outInterpolantsAssignmentsSource <> targetsSource <> "}\n"
-- definitions of temp variables
tempsSource = foldr (mappend . tempSource) mempty temps
tempSource Temp
{ tempIndex = i
, tempNode = node
, tempStage = ts
, tempType = t
} = "\t" <> valueTypeSource t <> " " <> tempName i <> " = " <> (if ts == stage then tempNodeSource else interpolantName i) <> ";\n" where
tempNodeSource = nodeSource $ case node of
-- cast attribute node back to non-float type if needed
AttributeNode _ -> if configForceFloatAttributes && t /= forceFloatType t then CastNode (forceFloatType t) t node else node
_ -> node
-- assignments to out-interpolants
outInterpolantsAssignmentsSource = foldr (mappend . outInterpolantAssignmentSource) mempty outInterpolants
outInterpolantAssignmentSource Temp
{ tempIndex = i
} = "\t" <> interpolantName i <> " = " <> tempName i <> ";\n"
-- outputting targets
targetsSource = foldr (mappend . targetSource) mempty targets
targetSource target = case target of
PositionTarget node -> "\t" <> targetPositionName <> " = " <> nodeSource node <> ";\n"
ColorTarget slot node -> "\t" <> targetColorName slot <> " = " <> nodeSource node <> ";\n"
DualColorTarget nodeA nodeB -> targetSource (ColorTarget 0 nodeA) <> targetSource (ColorTarget 1 nodeB)
DepthTarget node -> "\t" <> targetDepthName <> " = " <> nodeSource node <> ";\n"
-- helper functions
attributeName :: Int -> Int -> Builder
attributeName slot offset = "a" <> fromString (show slot) <> "_" <> fromString (show offset)
interpolantName :: Int -> Builder
interpolantName i = "i" <> fromString (show i)
uniformBlockName :: Int -> Builder
uniformBlockName slot = uniformBlockPrefix <> fromString (show slot)
uniformName :: Int -> Int -> Builder
uniformName slot offset = uniformPrefix <> fromString (show slot) <> "_" <> fromString (show offset)
samplerName :: Int -> Builder
samplerName slot = samplerPrefix <> fromString (show slot)
tempName :: Int -> Builder
tempName i = "_" <> fromString (show i)
targetPositionName :: Builder
targetPositionName = "gl_Position"
targetColorName :: Int -> Builder
targetColorName i
| configInOutSyntax = "r" <> fromString (show i)
| i == 0 = "gl_FragColor"
| otherwise = "gl_FragData[" <> fromString (show i) <> "]"
targetDepthName :: Builder
targetDepthName = "gl_FragDepth"
valueTypeSource :: ValueType -> Builder
valueTypeSource vt = case vt of
ScalarValueType st -> scalarTypeSource st
VectorValueType d st -> scalarShortTypeSource st <> "vec" <> dimensionSource d
MatrixValueType d1 d2 st ->
if d1 == d2 then
scalarShortTypeSource st <> "mat" <> dimensionSource d1
else
scalarShortTypeSource st <> "mat" <> dimensionSource d1 <> "x" <> dimensionSource d2
-- | Special version of valueTypeSource for attributes. Forces use of float type if needed.
attributeValueTypeSource :: ValueType -> Builder
attributeValueTypeSource = if configForceFloatAttributes then valueTypeSource . forceFloatType else valueTypeSource
forceFloatType :: ValueType -> ValueType
forceFloatType vt = case vt of
ScalarValueType st -> ScalarValueType $ forceFloatScalarType st
VectorValueType d st -> VectorValueType d $ forceFloatScalarType st
MatrixValueType d1 d2 st -> MatrixValueType d1 d2 $ forceFloatScalarType st
forceFloatScalarType :: ScalarType -> ScalarType
forceFloatScalarType st = case st of
ScalarFloat -> ScalarFloat
ScalarDouble -> ScalarDouble
ScalarInt -> ScalarFloat
ScalarUint -> ScalarFloat
ScalarBool -> ScalarFloat
scalarTypeSource :: ScalarType -> Builder
scalarTypeSource st = case st of
ScalarFloat -> "float"
ScalarDouble -> "double"
ScalarInt -> "int"
ScalarUint -> if configUnsignedUnsupported then "int" else "uint"
ScalarBool -> "bool"
scalarShortTypeSource :: ScalarType -> Builder
scalarShortTypeSource st = case st of
ScalarFloat -> ""
ScalarDouble -> "d"
ScalarInt -> "i"
ScalarUint -> if configUnsignedUnsupported then "i" else "u"
ScalarBool -> "b"
dimensionSource :: Dimension -> Builder
dimensionSource d = case d of
Dimension1 -> "1"
Dimension2 -> "2"
Dimension3 -> "3"
Dimension4 -> "4"
uniformBlockPrefix :: Builder
uniformBlockPrefix = case stage of
VertexStage -> "UBv"
PixelStage -> "UBp"
_ -> error "wrong stage"
uniformPrefix :: Builder
uniformPrefix = case stage of
VertexStage -> "uv"
PixelStage -> "up"
_ -> error "wrong stage"
samplerPrefix :: Builder
samplerPrefix = case stage of
VertexStage -> "sv"
PixelStage -> "sp"
_ -> error "wrong stage"
nodeSource :: Node a -> Builder
nodeSource node = case node of
AttributeNode Attribute
{ attributeSlot = slot
, attributeOffset = offset
} -> attributeName slot offset
UniformNode Uniform
{ uniformSlot = slot
, uniformOffset = offset
} -> uniformName slot offset
TempNode i -> tempName i
ConstNode t v -> let
s = valueToShowList v
content = case t of
ScalarValueType _ -> head s
VectorValueType _ _ -> intercalate ", " s
MatrixValueType {} -> intercalate ", " s
in valueTypeSource t <> "(" <> fromString content <> ")"
IndexNode _ _ a b -> "(" <> nodeSource a <> ")[" <> nodeSource b <> "]"
AddNode _ a b -> binaryOpSource "+" a b
SubtractNode _ a b -> binaryOpSource "-" a b
MultiplyNode _ a b -> binaryOpSource "*" a b
DivideNode _ a b -> binaryOpSource "/" a b
RecipNode _ a -> func1Source "rcp" a
NegateNode _ a -> "-(" <> nodeSource a <> ")"
AbsNode _ a -> func1Source "abs" a
SignumNode _ a -> func1Source "sign" a
MinNode _ a b -> func2Source "min" a b
MaxNode _ a b -> func2Source "max" a b
ClampNode _ a b c -> func3Source "clamp" a b c
LerpNode _ a b c -> func3Source "mix" a b c
EqualNode _ a b -> binaryOpSource "==" a b
LessNode t a b -> case t of
ScalarValueType _ -> binaryOpSource "<" a b
_ -> func2Source "lessThan" a b
LessEqualNode t a b -> case t of
ScalarValueType _ -> binaryOpSource "<=" a b
_ -> func2Source "lessThanEqual" a b
IfNode _ c a b -> "(" <> nodeSource c <> ") ? (" <> nodeSource a <> ") : (" <> nodeSource b <> ")"
PiNode t -> let
typedPi :: Floating a => Node a -> a
typedPi _ = pi
in nodeSource $ ConstNode t $ typedPi node
ExpNode _ a -> func1Source "exp" a
SqrtNode _ a -> func1Source "sqrt" a
InvSqrtNode _ a -> func1Source "inversesqrt" a
LogNode _ a -> func1Source "log" a
PowNode _ a b -> func2Source "pow" a b
LogBaseNode t a b -> nodeSource $ DivideNode t (LogNode t a) (LogNode t b)
SinNode _ a -> func1Source "sin" a
TanNode _ a -> func1Source "tan" a
CosNode _ a -> func1Source "tan" a
AsinNode _ a -> func1Source "asin" a
AtanNode _ a -> func1Source "atan" a
AcosNode _ a -> func1Source "acos" a
SinhNode _ a -> func1Source "sinh" a
TanhNode _ a -> func1Source "tanh" a
CoshNode _ a -> func1Source "cosh" a
AsinhNode _ a -> func1Source "asinh" a
AtanhNode _ a -> func1Source "atanh" a
AcoshNode _ a -> func1Source "acosh" a
MulNode _ _ _ a b -> binaryOpSource "*" a b
DotNode _ _ a b -> func2Source "dot" a b
CrossNode _ a b -> func2Source "cross" a b
NormNode _ _ a -> func1Source "length" a
Norm2Node _ _ a -> func1Source "length2" a
NormalizeNode _ a -> func1Source "normalize" a
DdxNode _ a -> func1Source "dFdx" a
DdyNode _ a -> func1Source "dFdy" a
FloorNode _ a -> func1Source "floor" a
InstanceIdNode -> "uint(gl_InstanceID)"
ComponentNode _ _ c a -> "(" <> nodeSource a <> ")." <> singleton c
SwizzleNode _ _ s a -> "(" <> nodeSource a <> ")." <> fromString s
SampleNode
{ sampleNodeSamplerNode = SamplerNode Sampler
{ samplerSlot = slot
, samplerSampleType = sampleType
, samplerCoordsType = coordsType
}
, sampleNodeCoordsNode = c
, sampleNodeOffsetNode = mo
, sampleNodeLod = ll
} -> let
sc = samplerName slot <> ", " <> nodeSource c
coordsDim = if configTextureSampleDimensionSpecifier then case coordsType of
ScalarValueType _ -> "1D"
VectorValueType dim _ -> case dim of
Dimension1 -> "1D"
Dimension2 -> "2D"
Dimension3 -> "3D"
Dimension4 -> "4D"
MatrixValueType {} -> error "invalid coords type"
else mempty
f = case mo of
Nothing -> case ll of
SampleNodeAutoLod -> "texture" <> coordsDim <> "(" <> sc <> ")"
SampleNodeLod l -> "texture" <> coordsDim <> "Lod(" <> sc <> ", " <> nodeSource l <> ")"
SampleNodeBiasLod b -> "texture" <> coordsDim <> "Bias(" <> sc <> ", " <> nodeSource b <> ")"
SampleNodeGradLod gx gy -> "texture" <> coordsDim <> "Grad(" <> sc <> ", " <> nodeSource gx <> ", " <> nodeSource gy <> ")"
Just o -> case ll of
SampleNodeAutoLod -> "texture" <> coordsDim <> "Offset(" <> sc <> ", " <> nodeSource o <> ")"
SampleNodeLod l -> "texture" <> coordsDim <> "LodOffset(" <> sc <> ", " <> nodeSource l <> ", " <> nodeSource o <> ")"
SampleNodeBiasLod b -> "texture" <> coordsDim <> "Bias(" <> sc <> ", " <> nodeSource b <> ", " <> nodeSource o <> ")"
SampleNodeGradLod gx gy -> "texture" <> coordsDim <> "Grad(" <> sc <> ", " <> nodeSource gx <> ", " <> nodeSource gy <> ", " <> nodeSource o <> ")"
in f <> case sampleType of
ScalarValueType _ -> ".x"
VectorValueType dim _ -> case dim of
Dimension1 -> ".x"
Dimension2 -> ".xy"
Dimension3 -> ".xyz"
Dimension4 -> mempty
MatrixValueType {} -> mempty
CastNode _ t a -> valueTypeSource t <> "(" <> nodeSource a <> ")"
Combine2VecNode _ _ t a b -> func2Source (valueTypeSource t) a b
Combine3VecNode _ _ _ t a b c -> func3Source (valueTypeSource t) a b c
Combine4VecNode _ _ _ _ t a b c d -> func4Source (valueTypeSource t) a b c d
ScreenToTextureNode _ a -> "(" <> nodeSource a <> ") * vec2(0.5, 0.5) + vec2(0.5, 0.5)"
NormalizeSampledDepthNode a -> "(" <> nodeSource a <> ") * 2 - 1"
FragCoordNode -> "gl_FragCoord"
binaryOpSource :: Builder -> Node a -> Node b -> Builder
binaryOpSource op a b = "(" <> nodeSource a <> ") " <> op <> " (" <> nodeSource b <> ")"
func1Source :: Builder -> Node a -> Builder
func1Source func a = func
<> "("
<> nodeSource a
<> ")"
func2Source :: Builder -> Node a -> Node b -> Builder
func2Source func a b = func
<> "("
<> nodeSource a
<> ", "
<> nodeSource b
<> ")"
func3Source :: Builder -> Node a -> Node b -> Node c -> Builder
func3Source func a b c = func
<> "("
<> nodeSource a
<> ", "
<> nodeSource b
<> ", "
<> nodeSource c
<> ")"
func4Source :: Builder -> Node a -> Node b -> Node c -> Node d -> Builder
func4Source func a b c d = func
<> "("
<> nodeSource a
<> ", "
<> nodeSource b
<> ", "
<> nodeSource c
<> ", "
<> nodeSource d
<> ")"
| quyse/flaw | flaw-gl/Flaw/Graphics/GLSL.hs | mit | 24,512 | 0 | 31 | 6,161 | 6,466 | 3,304 | 3,162 | 513 | 147 |
import Drawing
main = drawPicture myPicture
myPicture points =
coordinates &
drawPoints [a,b,a',b'] &
drawLabels [a,b,a',b'] ["A","B","A'","B'"] &
drawSegment (a,b) &
message "Translation of AB 3 units right and 1 unit down"
where
[a,b] = [(1,1),(2,2)]
a' = translate a (3,-1)
b' = translate b (3,-1)
translate a (x,y) = a'
where (x1,y1) = a
a' = (x1+x,y1+y) | alphalambda/k12math | contrib/MHills/GeometryLessons/code/student/lesson8a.hs | mit | 439 | 3 | 10 | 138 | 212 | 119 | 93 | 14 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.API.Mandrill.Tags where
import Network.API.Mandrill.Response
import Network.API.Mandrill.Types
import Network.API.Mandrill.Utils
-- | Return all of the user-defined tag information
list :: (MonadIO m) => MandrillT m (Either ApiError [Stat])
list = performRequest "/tags/list.json" []
-- | Return more detailed information about a single tag, including aggregates of recent stats
info :: (MonadIO m) =>
Tag ->
MandrillT m (Either ApiError Stat)
info tag = performRequest "/tags/info.json" ["tag" .= tag]
-- | Deletes a tag permanently. Deleting a tag removes the tag from any
-- messages that have been sent, and also deletes the tag's stats.
-- There is no way to undo this operation, so use it carefully.
delete :: (MonadIO m) =>
Tag ->
MandrillT m (Either ApiError Stat)
delete tag = performRequest "/tags/delete.json" ["tag" .= tag]
-- | Return the recent history (hourly stats for the last 30 days) for a tag
timeSeries :: (MonadIO m) =>
Tag ->
MandrillT m (Either ApiError Stat)
timeSeries tag = performRequest "/tags/time-series.json" ["tag" .= tag]
-- | Return the recent history (hourly stats for the last 30 days) for all tags
allTimeSeries :: (MonadIO m) => MandrillT m (Either ApiError Stat)
allTimeSeries = performRequest "/tags/time-series.json" []
| krgn/hamdrill | src/Network/API/Mandrill/Tags.hs | mit | 1,394 | 0 | 9 | 280 | 271 | 149 | 122 | 21 | 1 |
module Soundwave.Persistence where
import Soundwave.Data
import qualified Soundwave.Logger as L
import SoundwaveProtos.Datum
import SoundwaveProtos.Value
import SoundwaveProtos.Request
import SoundwaveProtos.Response
import SoundwaveProtos.Snapshot
import Text.ProtocolBuffers.WireMessage (messageGet, messagePut)
import Text.ProtocolBuffers.Basic(utf8, uFromString, toUtf8)
import Control.Monad.State
import Data.Sequence (fromList)
import Data.Maybe
import qualified Data.Trie as T
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as BC (pack, unpack)
import qualified Data.Map.Strict as M
import Data.Foldable (toList)
import Control.Arrow ((&&&)) -- thanks, hlint!
import System.Directory (doesFileExist)
import Database.PureCDB
initPersistence :: FilePath -> StateT Env IO ()
initPersistence file = do
env <- get
fileExists <- lift $ doesFileExist file
if fileExists then
do
lift $ L.info (logger env) ("Loading existing db: " ++ file)
r <- liftIO $ openCDB file
bs <- liftIO $ getBS r (BC.pack "dbstate")
snapshot <- liftIO $ parseSnapshotBytes (B.concat bs)
let newDb = snapshotToDB snapshot
lift $ L.info (logger env) ("Loaded " ++ show (length (toList (dat snapshot))) ++ " keys.")
put env { storage = Just file, db = newDb }
else
do
makeCDB (addBS (BC.pack "dbstate") (BL.toStrict (messagePut Snapshot {
dat = fromList (map (uncurry makeDatum) (T.toList (db env)))
}))) file
put env { storage = Just file }
return ()
parseSnapshotBytes :: B.ByteString -> IO Snapshot
parseSnapshotBytes s = case messageGet (BL.fromStrict s) of
Right (snapshot, x) | BL.length x == 0 ->
return snapshot
Right (snapshot, x) | BL.length x /= 0 ->
error "Failed to parse snapshot"
Left error_message ->
error $ "Failed to parse snapshot" ++ error_message
snapshotToDB :: Snapshot -> DB
snapshotToDB s = do
let tupleize = map (name &&& vector) (toList (dat s))
let namestransform = map (\(x,y) -> (BL.toStrict (utf8 x),y)) tupleize
let finaltuple = map (\(x,y) -> (x, M.fromList (map (key &&& value) (toList y)))) namestransform
T.fromList finaltuple
saveSnapshot :: DB -> FilePath -> IO ()
saveSnapshot db storage = makeCDB (addBS (BC.pack "dbstate") (BL.toStrict (messagePut Snapshot {
dat = fromList (map (uncurry makeDatum) (T.toList db))
}))) storage
| mrb/soundwave | Soundwave/Persistence.hs | mit | 2,576 | 0 | 25 | 582 | 898 | 470 | 428 | 59 | 3 |
{-# LANGUAGE FlexibleContexts #-}
module Chimera.Scripts.Common where
import FreeGame
import Control.Lens
import Data.Default (def)
import qualified Data.Vector as V
import qualified Data.IntMap as IM
import Data.Reflection (Given, given)
import Chimera.State
import Chimera.Engine.Core
import Chimera.Engine.Scripts
data MotionCommon = Straight | Affine Vec2 | Curve Vec2 | Stay
enemyEffect :: (HasPiece c, HasChara c) => Effect -> Danmaku c ()
enemyEffect e = do
n <- addEffect e
zoom _1 $ effectIndexes %= (n:)
effFadeIn :: Int -> Effect -> Effect
effFadeIn n e = let y x = sin $ x*(pi/2) in
effColored (Color 1 1 1 . y) (drawing .= (e^.drawing)) n e
effFadeOut :: Int -> Effect -> Effect
effFadeOut n e = let y x = cos $ (x*pi/2) in
effColored (Color 1 1 1 . y) (drawing .= (color (Color 1 1 1 0) $ e^.drawing)) n e
& runAuto %~ (>> go)
where
go = do
c <- (^.counter) `fmap` use self
zoom _1 $ when (n == c) $ statePiece .= Dead
effEnemyStart :: (Given Resource) => Vec2 -> Effect
effEnemyStart = go . effCommonAnimated 2 where
go e = e & size .~ V2 0.8 0.8 & slowRate .~ 6 & runAuto %~ (>> (zoom _1 $ size *= 1.01))
effEnemyAttack :: (Given Resource) => Int -> Vec2 -> Effect
effEnemyAttack i p = def & pos .~ p & scaleRate .~ 0 & runAuto .~ run
where
resource = given :: Resource
run :: Danmaku EffectPiece ()
run = zoom _1 $ do
use counter >>= \c -> when (c <= 50) $ scaleRate += 1/50
ang += anglePlus i
use ang >>= \a -> drawing .= do
rotateR a $ bitmap $ (resource^.effectImg) V.! 3 V.! i
anglePlus :: Int -> Double
anglePlus 0 = 1/300
anglePlus 1 = -2/300
anglePlus 2 = 3/300
anglePlus _ = error "otherwise case in anglePlus"
effPlayerBack :: (Given Resource) => Effect
effPlayerBack = def & runAuto .~ do
p <- (^.player) `fmap` use env
c <- (^.counter) `fmap` use self
let n = p^.bombCount
zoom _1 $ do
let r = 65
pos .= (p^.pos)
scaleRate .= 0.6
drawing .= do
color white $ thickness 1.0 $ circleOutline r
forM_ [1..n] $ \i ->
translate (V2 r 0 `rotate2` (2*pi*fromIntegral i/fromIntegral n + fromIntegral c*5*pi/360))
$ color white $ (makeBullet BallMedium Yellow def :: Bullet)^.drawing
character :: (Given Resource) => Int -> Vec2 -> Stage Int
character n p = do
k <- lift $ addEffect $ effFadeIn 30 $ eff
lift $ zoom _2 $ sceneEffects %= (k:)
return k
where
resource = given :: Resource
eff = def
& pos .~ p
& drawing .~ (draw $ bitmap $ (resource^.portraits) V.! n)
& zIndex .~ Foreground
delCharacter :: Int -> Stage ()
delCharacter c = lift $ zoom _2 $ effects %= IM.adjust (effFadeOut 30) c
motionCommon :: (HasPiece c, HasObject c) => Int -> MotionCommon -> State c ()
motionCommon time (Straight) = do
c <- use counter
when (c == 0) $ spXY .= V2 0 1.5
when (c == 120) $ do
spXY .= 0
statePiece .= Attack
when (c == time + 120) $ do
spXY .= V2 0 (-1.5)
statePiece .= Alive
when (c > time + 300) $ statePiece .= Dead
motionCommon time (Affine v) = do
c <- use counter
when (c == 0) $ spXY .= V2 0 1.5
when (c == 120) $ do
spXY .= 0
statePiece .= Attack
when (c == time + 120) $ do
spXY .= v
statePiece .= Alive
when (c > time + 300) $ statePiece .= Dead
motionCommon _ (Curve acc) = do
c <- use counter
when (c == 0) $ spXY .= V2 0 3
when (c == 20) $ statePiece .= Attack
when (c > 300) $ statePiece .= Dead
spXY %= (+ acc)
motionCommon _ (Stay) = do
c <- use counter
when (c == 0) $ spXY .= V2 0 1.5
when (c == 120) $ do
spXY .= 0
statePiece .= Attack
initEnemy :: (Given Resource) => Vec2 -> Int -> Enemy
initEnemy p h = def
& pos .~ p & hp .~ h & size .~ V2 10 10 & ang .~ -pi/2
& statePiece .~ Standby & drawing .~ (bitmap $ (resource^.charaImg) V.! 1)
where
resource = given :: Resource
zakoCommon :: (Given Resource, HasChara c, HasPiece c, HasObject c) =>
Int -> State c () -> Int -> BKind -> BColor -> Danmaku c ()
zakoCommon _ mot time bk c = do
e <- use self
zoom _1 mot
ang' <- anglePlayer
when ((e^.counter) `mod` time == 0 && e^.statePiece == Attack) $
shots $ return $ makeBullet bk c def
& pos .~ (e^.pos) & speed .~ 2 & ang .~ ang'
debug :: (Given Resource) => Danmaku Chara ()
debug = do
setName "デバッグ用弾幕"
e <- use self
zoom _1 $ motionCommon 100 Stay
let cnt = e ^. counter
let n = 20
when (cnt `mod` 4 == 0 && e ^. spXY == 0) $
shots $ flip map [1..n] $ \i ->
makeBullet BallTiny Red def
& pos .~ (e^.pos) & speed .~ 0.5
& ang .~ i*2*pi/n + (fromIntegral cnt)/100
chaosBomb :: (Given Resource) => Vec2 -> Bullet
chaosBomb p = makeBullet BallFrame Magenta def
& pos .~ p & size .~ 100 & group .~ None & runAuto .~ run
where
bomb :: (Given Resource, HasPiece c, HasObject c) => c -> Bullet -> Bullet
bomb e b = chaosBomb (b^.pos) & size .~ (e^.size) / 1.4
inRange :: Piece -> Bullet -> Bool
inRange e b = b^.statePiece /= Dead && b^.group == GEnemy &&
(e^.size^._x)^(2 :: Int) > (quadrance $ b^.pos - e^.pos)
run :: (Given Resource) => Danmaku Piece ()
run = do
e <- use self
when (e^.counter == 1) $ effs $ [eff e]
when (e^.counter == 5) $ zoom _2 $ do
bs <- liftM (IM.filter (inRange e)) (use bullets)
bullets %= \y -> IM.foldrWithKey' (\k _ -> IM.adjust (statePiece .~ Dead) k) y bs
bullets %= \y -> IM.foldr insertIM y $ fmap (bomb e) bs
zoom _1 $ do
c <- use counter
when (c == 10) $ statePiece .= Dead
eff :: (Given Resource) => Piece -> Effect
eff b = effCommonAnimated 4 (b^.pos)
& scaleRate .~ (b^.size^._x / 120) & zIndex .~ Background
silentBomb :: (Given Resource, HasChara c, HasObject c) => State c [Bullet]
silentBomb = use pos >>= return . return . chaosBomb
fourDiamond :: (Given Resource, HasChara c, HasObject c) => State c [Bullet]
fourDiamond = use pos >>= \p -> return $
[def' & pos .~ p + V2 5 0,
def' & pos .~ p + V2 15 0,
def' & pos .~ p - V2 5 0,
def' & pos .~ p - V2 15 0]
where
def' = makeBullet Diamond Red def
& speed .~ 15 & ang .~ pi/2 & group .~ GPlayer
stageTest :: (Given Resource) => Stage ()
stageTest = do
let e r = keeper $ initEnemy (V2 320 (-40)) 10 & runAuto .~ r
e $ zakoCommon 0 (motionCommon 100 Stay) 50 BallLarge Red
e $ zakoCommon 0 (motionCommon 100 Stay) 50 BallMedium Red
e $ zakoCommon 0 (motionCommon 100 Stay) 50 BallSmall Red
e $ zakoCommon 0 (motionCommon 100 Stay) 50 Oval Red
e $ zakoCommon 0 (motionCommon 100 Stay) 50 Diamond Red
e $ zakoCommon 0 (motionCommon 100 Stay) 50 BallFrame Red
e $ zakoCommon 0 (motionCommon 100 Stay) 50 Needle Red
e $ zakoCommon 0 (motionCommon 100 Stay) 50 BallTiny Red
anglePlayer :: (HasObject c) => Danmaku c Double
anglePlayer = do
e <- use self
p <- getPlayer
return $ (pi/2 +) $ (\(V2 x y) -> atan2 x y) $ (e^.pos - p^.pos)
| myuon/Chimera | Chimera/Scripts/Common.hs | mit | 7,018 | 0 | 29 | 1,848 | 3,389 | 1,688 | 1,701 | 176 | 4 |
-- This binary sanitizes html-fragments passed in to avoid
-- cross-site scripting (XSS) attacks.
--
-- The string to be sanitized is read from stdin and the result is written
-- to stdout.
module Main where
import Data.Text.IO as D (hGetContents, hPutStr)
import Text.HTML.SanitizeXSS (sanitizeBalance)
import System.IO as S (hClose, stdin, stdout)
main :: IO ()
main = do
-- sanitize
D.hGetContents S.stdin >>= D.hPutStr S.stdout . sanitizeBalance
-- clean up
S.hClose S.stdin
S.hClose S.stdout
| universal/rails_pandoc | haskell/sanitize.hs | mit | 535 | 0 | 10 | 113 | 116 | 67 | 49 | 9 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module : Data.Binary.Tagged.Internal
-- Copyright : (c) Justin Le 2015
-- License : MIT
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : portable
--
-- Internals for the library, exported in case you should need it.
-- Usually, the parts you would need should be re-exported in
-- "Data.Binary.Tagged".
--
module Data.Binary.Tagged.Internal (
-- * Data types
Tagged -- abstract, instances: Show, Eq, Binary, Typeable, Generic
, TagFingerprint -- abstract, instances: Show, Eq, Ord, Binary, Typeable, Generic, Default
-- * Tagging and extracting data
, tag -- :: Typeable a => a -> Tagged a
, getTagged -- :: Typeable a => Tagged a -> Maybe a
, tagMatched -- :: Typeable a => Tagged a -> Bool
-- * 'TagFingerprint' utilities
, typeFingerprint -- :: Typeable a => a -> TagFingerprint
, tagFingerprint -- :: Tagged a -> TagFingerprint
, bsFingerprint -- :: ByteString -> Maybe TagFingerprint
, emptyTagFP -- :: TagFingerprint
) where
import Control.Monad hiding (mapM_)
import Data.Binary
import Data.Binary.Get
import Data.ByteString.Lazy.Char8 as LC
import Data.Digest.Pure.MD5
import Data.Maybe
import Data.Typeable
import GHC.Generics
import Prelude.Compat
-- | A data type tupling together data with a 'TagFingerprint',
-- representing data tagged with its type.
--
-- It's best to interface directly with data using 'encodeTagged',
-- 'decodeTagged', etc, using 'tag' to tag data and 'extractTagged' to
-- extract data from valid tagged data. This type is exported mostly when
-- you want to specifically decode a 'ByteString' into tagged data, and
-- manually extract it yourself. If you are writing a framework, it is
-- preferred to handle this for the end user.
data Tagged a = Tagged !TagFingerprint !a
deriving (Show, Eq, Generic, Typeable)
instance Binary a => Binary (Tagged a) where
put (Tagged fp x) = do
put TagLead
put fp
put x
get = do
_ <- get :: Get TagLead
Tagged <$> get <*> get
-- | A data type representing a fingerprint for a 'Typeable' type.
-- Ideally, this would be 'Data.Typeable.Internal''s own 'Fingerprint'
-- types; however, for some reason, the fingerprints for the same data type
-- from the same modules differ between different GHC backends. So for
-- now, it is just a 'ByteString' representation of the name of the type.
-- This is literally a bad idea, and so two types with the same name but
-- from different modules will share a non-unique 'TagFingerprint'.
-- Hopefully in the future when I find out a way to fix this or the GHC
-- backend maintainers find a way to provide consistent type fingerprints,
-- this will be fixed.
--
-- This type is mostly used for the ability to categorized Tagged items
-- by their type.
--
-- 'emptyTagFP' gives a 'TagFingerprint' that will most likely never be
-- matched by any actual tag from a real type, so can be used as a test if
-- needed. This replaces functionality that used to come from the
-- 'Default' instance.
newtype TagFingerprint = TagFP MD5Digest
deriving (Show, Typeable, Generic, Eq, Ord)
instance Binary TagFingerprint
-- | 'TagFingerprint' that is meant to never be matched by any actual
-- normal type's 'TagFingerprint'.
emptyTagFP :: TagFingerprint
emptyTagFP = TagFP (md5 "")
-- | Put at the start of a 'Tagged' to signify that it is a 'Tagged'.
data TagLead = TagLead
instance Binary TagLead where
put _ = mapM_ put leadingBytes
get = do
forM_ leadingBytes $ \b ->
guard . (== b) =<< get
return TagLead
leadingBytes :: [Word8]
leadingBytes = [0xfe,0xfe]
-- | Wrap data inside a 'Tagged' tuple.
tag :: Typeable a => a -> Tagged a
tag x = Tagged (typeFingerprint x) x
-- | Compute the 'Fingerprint' representing a type. It is non-strict on
-- its parameter, so passing in undefined should work if you want to just
-- get the 'Fingerprint' of a specific type without having data of that
-- type on hand:
--
-- > typeFingerprint (undefined :: Int)
--
typeFingerprint :: Typeable a => a -> TagFingerprint
typeFingerprint = TagFP . md5 . LC.pack . show . typeOf
-- | Extract data out of a 'Tagged', but only the type of the data matches
-- the type represented by the fingerprint. It is polymorphic on its
-- output and meant to be used when decoding a 'Tagged' item with a desired
-- type.
getTagged :: Typeable a => Tagged a -> Maybe a
getTagged (Tagged tfp x) | tfp == xfp = Just x
| otherwise = Nothing
where
xfp = typeFingerprint x
-- | Check if the type inside the 'Tagged' matches the fingerprint.
tagMatched :: Typeable a => Tagged a -> Bool
tagMatched = isJust . getTagged
-- | Extract the 'Fingerprint' out of a 'Tagged'. Mostly used so that you
-- can categorize and associate Tagged items; to check if a 'Tagged' is
-- of a desired typed, 'getTagged' and 'tagMatched' might be more useful.
tagFingerprint :: Tagged a -> TagFingerprint
tagFingerprint (Tagged fp _) = fp
-- | With a 'ByteString', expecting tagged data, returns the 'Fingerprint'
-- that the data is tagged with. Returns @Nothing@ if the data is not
-- decodable as tagged data. Might accidentally decode untagged data
-- though!
bsFingerprint :: ByteString -> Maybe TagFingerprint
bsFingerprint bs = case getRes of
Left _ -> Nothing
Right (_,_,fp) -> Just fp
where
getRes = flip runGetOrFail bs $ do
_ <- get :: Get TagLead
get
| mstksg/tagged-binary | Data/Binary/Tagged/Internal.hs | mit | 5,732 | 0 | 12 | 1,281 | 722 | 412 | 310 | 70 | 2 |
import Data.List
import Data.Char
import qualified Data.Map as Map
numUnique :: (Eq a) => [a] -> Int
numUnique = length . nub
{- words "hey these are the words in this sentence" -}
{- group [1,1,1,1,2,2,2,2,3,3,2,2,2,5,6,7] -}
{- group ["boom", "bip", "bip", "boom", "boom"] -}
{- sort [5,4,3,7,2,1] -}
{- group $ sort ["boom", "bip", "bip", "boom", "boom"] -}
wordNums :: String -> [(String, Int)]
wordNums = map (\ws -> (head ws, length ws)) . group .sort . words
{- wordNums "hello world" -}
{- tails "party" -}
{- tails [1,2,3] -}
{- "hawaii" `isPrefixOf` "hawaii joe" -}
{- "haha" `isPrefixOf` "ha" -}
{- "ha" `isPrefixOf` "ha" -}
{- any (> 4) [1,2,3] -}
{- any (== 'F') "Frank Sobotka" -}
{- any (\x -> x > 5 && x < 10) [1,4,11] -}
isIn :: (Eq a) => [a] -> [a] -> Bool
needle `isIn` haystack = any (needle `isPrefixOf`) (tails haystack)
{- "art" `isIn` "party" -}
{- [1,2] `isIn` [1,3,5] -}
{- "art" `isInfixOf` "party" -}
{- [1,2] `isInfixOf` [1,3,5] -}
{- map ord "abcdefgh" -}
encode :: Int -> String -> String
encode offset msg = map (\c -> chr $ ord c + offset) msg
{- encode 3 "hey mark" -}
decode :: Int -> String -> String
decode shift msg = encode (negate shift) msg
{- decode 3 "kh|#pdun" -}
{- foldl (+) 0 (replicate 100 1) -}
{- foldl (+) 0 (replicate 1000000 1) -}
{- foldl' (+) 0 (replicate 1000000 1) -}
{- digitToInt '2' -}
digitSum :: Int -> Int
digitSum = sum . map digitToInt . show
{- take 1 [x | x <- [1..], digitSum x == 40] -}
{- find (> 4) [3..7] -}
{- find odd [2,4,6,8,9] -}
{- find (== 'z') "mjolnir" -}
firstTo40 :: Maybe Int
firstTo40 = find (\x -> digitSum x == 40) [1..]
firstTo :: Int -> Maybe Int
firstTo n = find (\x -> digitSum x == n) [1..]
{- phoneBook = -}
{- [("betty", "555-2938"), -}
{- ("bonnie", "452-2928"), -}
{- ("patsy", "493-2928"), -}
{- ("lucille","205-2928"), -}
{- ("wendy", "939-8282"), -}
{- ("penny", "853-2492")] -}
{- findKey :: (Eq k) => k -> [(k, v)] -> v -}
{- findKey key xs = snd . filter (\(k, v) -> key == k) $ xs -}
findKey :: (Eq k) => k -> [(k, v)] -> Maybe v
findKey key [] = Nothing
findKey key ((k, v):xs)
| key == k = Just v
| otherwise = findKey key xs
{- findKey "penny" phoneBook -}
{- findKey "me" phoneBook -}
{- Map.fromList [(3, "shoes"), (4, "trees"), (5, "bees")] -}
{- Map.fromList [("kima", "greggs"), ("jimmy", "mcnulty"), ("jay", "landsman")] -}
{- Map.fromList [("MS", 1), ("MS", 2), ("MS", 3)] -}
{- phoneBook :: Map.Map String String -}
{- phoneBook = Map.fromList $ -}
{- [("betty", "555-2938"), -}
{- ("bonnie", "452-2928"), -}
{- ("patsy", "493-2928"), -}
{- ("lucille","205-2928"), -}
{- ("wendy", "939-8282"), -}
{- ("penny", "853-2492")] -}
{- Map.lookup "betty" phoneBook -}
{- Map.lookup "wendy" phoneBook -}
{- Map.lookup "me" phoneBook -}
{- let newBook = Map.insert "grace" "341-9021" phoneBook -}
{- Map.lookup "grace" phoneBook -}
{- Map.size phoneBook -}
{- Map.size newBook -}
string2digits :: String -> [Int]
string2digits = map digitToInt . filter isDigit
{- string2digits "948-9282" -}
{- let intBook = Map.map string2digits phoneBook -}
{- Map.lookup "betty" intBook -}
phoneBook =
[("betty", "555-2938"),
("betty", "342-2492"),
("bonnie", "452-2928"),
("patsy", "493-2929"),
("patsy", "493-2928"),
("lucille","205-2928"),
("wendy", "939-8282"),
("penny", "853-2492"),
("penny", "555-2111")]
{- phoneBookToMap :: (Ord k) => [(k, String)] -> Map.Map k String -}
{- phoneBookToMap xs = Map.fromListWith add xs -}
{- where add number1 number2 = number1 ++ ", " ++ number2 -}
{- Map.lookup "patsy" $ phoneBookToMap phoneBook -}
{- Map.lookup "wendy" $ phoneBookToMap phoneBook -}
{- Map.lookup "betty" $ phoneBookToMap phoneBook -}
phoneBookToMap :: (Ord k) => [(k, a)] -> Map.Map k [a]
phoneBookToMap xs = Map.fromListWith (++) $ map (\(k, v) -> (k, [v])) xs
{- Map.lookup "patsy" $ phoneBookToMap phoneBook -}
{- Map.fromListWith max [(2,3), (2,5), (2,100), (3,29), (3,22), (3,11), (4,22), (4,15)] -}
{- Map.fromListWith (+) [(2,3), (2,5), (2,100), (3,29), (3,22), (3,11), (4,22), (4,15)] -}
| yhoshino11/learning_haskell | ch6/ch6.hs | mit | 4,115 | 4 | 10 | 799 | 776 | 439 | 337 | 38 | 1 |
module Salad ()
where
import Salad.Image
| ericvoorhis/glitch-replace | src/Salad.hs | mit | 43 | 0 | 4 | 8 | 12 | 8 | 4 | 2 | 0 |
module YML.LinearGradient
(
Parameters (..)
, LinearFunction
, nullF
, cost
, gradientDescent
)
where
import Data.List (intercalate)
import Data.Vector.Unboxed ((!))
import qualified Data.Vector.Unboxed as V
import YML.Dataset
-- swap comments to show debug traces
-- import Debug.Trace (trace)
trace :: a -> b -> b
trace _ x = x
--
data Parameters = Parameters { alpha :: R , threshold :: R} deriving Show
-- | Linear Function type
data LinearFunction = LinearFunction {thetas :: V.Vector R}
instance Show LinearFunction where
show l = intercalate ", " $ map (take 5 . show) $ V.toList (thetas l)
-- | The null function (use the dataset to determine the number of features)
nullF :: Dataset -> LinearFunction
nullF dataset = LinearFunction (V.fromList $ replicate k 0) -- h(x) = 0x + 0
where
k = nbFeatures dataset
-- | The hypothesis function (depends on theta)
h :: LinearFunction -> Value -> Double
h f v = V.foldl (\acc (x,t) -> acc + x*t) 0 (V.zip (xs v) (thetas f))
-- | The function giving the cost of some linear function relatively
-- to some dataset.
cost :: Dataset -> LinearFunction -> Double
cost (Dataset values) f = (sum (map ((**2).dist) values))/(2*m)
where
m = fromIntegral $ length values
dist v = h f v - (y v)
type Variable = Int
cost' :: Variable -> Dataset -> LinearFunction -> Double
cost' i (Dataset values) f = (sum (map term values))/(2*m)
where
m = fromIntegral $ length values
xi val = (xs val) ! i
term v = (h f v - (y v)) * (xi v)
oneStepGradient :: Parameters -> Dataset -> LinearFunction -> LinearFunction
oneStepGradient opts dataset f = if bad f then
error "BAD f: Certainly alpha is too wide"
else
trace ((show f) ++ ": " ++ show (cost dataset f)) $ LinearFunction newthetas
where
bad phi = V.any (\x -> isNaN x || isInfinite x) (thetas phi)
-- new_theta_j = theta_j - alpha * derive cost (theta0, theta1)
newthetas = V.imap newcost (thetas f)
newcost i x = x - (alpha opts) * cost' i dataset f
gradientDescent :: Parameters -> Dataset -> LinearFunction
gradientDescent opts t = snd $ head $ filter close $ zip gradients (tail gradients)
where
close :: (LinearFunction,LinearFunction) -> Bool
close ((LinearFunction us),(LinearFunction vs)) = dist < threshold opts
where
dist = V.foldl (\acc (u,v) -> acc + (u-v)**2 ) 0 (V.zip us vs)
gradients = iterate (oneStepGradient opts t) (nullF t)
| yogsototh/YML | src/YML/LinearGradient.hs | mit | 2,657 | 0 | 15 | 743 | 899 | 477 | 422 | 45 | 2 |
module Math.NumberTheory.PowersBench
( benchSuite
) where
import Criterion.Main
import System.Random
import Math.NumberTheory.Logarithms (integerLog2)
import Math.NumberTheory.Powers.Squares.Internal
genInteger :: Int -> Int -> Integer
genInteger salt bits
= head
. dropWhile ((< bits) . integerLog2)
. scanl (\a r -> a * 2^31 + abs r) 1
. randoms
. mkStdGen
$ salt + bits
compareRoots :: Int -> Benchmark
compareRoots bits = bgroup ("sqrt" ++ show bits)
[ bench "new" $ nf (fst . karatsubaSqrt) n
, bench "old" $ nf isqrtA n
]
where
n = genInteger 0 bits
benchSuite = bgroup "Powers" $ map compareRoots [2300, 2400 .. 2600]
| cfredric/arithmoi | benchmark/Math/NumberTheory/PowersBench.hs | mit | 670 | 0 | 15 | 145 | 239 | 128 | 111 | 20 | 1 |
{- draw sprite with image clipping -}
{-# LANGUAGE OverloadedStrings #-}
module Lesson13 where
--
import qualified SDL
--
import Data.Monoid
import Data.Word (Word8(..))
import Linear.Affine (Point(..))
import Linear.V2 (V2(..))
import Linear.V3 (V3(..))
import Linear.V4 (V4(..))
import Foreign.C.Types (CInt)
--
import Control.Monad (unless)
--
import qualified Config
--
-- definition of LTexture
data LTexture = LTexture {getTx :: SDL.Texture, getWH :: V2 CInt}
--
class Renderable a where
renderQuad :: a -> CInt -> CInt -> Maybe (SDL.Rectangle CInt)
render :: SDL.Renderer -> a -> CInt -> CInt -> IO ()
free :: a -> IO ()
--
instance Renderable LTexture where
renderQuad ltx x y =
Just $ SDL.Rectangle (P $ V2 x y) $ getWH ltx
render rdr ltx x y =
SDL.copy rdr (getTx ltx) Nothing (renderQuad ltx x y)
free ltx = SDL.destroyTexture (getTx ltx)
-- definition of loading function
loadFromFile :: SDL.Renderer -> FilePath -> IO LTexture
loadFromFile rdr path = do
tempSf <- SDL.loadBMP path
wh <- SDL.surfaceDimensions tempSf
-- ************ --
SDL.surfaceColorKey tempSf SDL.$= Just (V4 maxBound maxBound maxBound maxBound)
tx <- SDL.createTextureFromSurface rdr tempSf
SDL.freeSurface tempSf
return (LTexture tx wh)
--
class (Bounded a, Num a, Ord a) => BoundedNum a where
(>+<) :: a -> a -> a
(>+<) a diff
| diff <= maxDiff = a + diff
| diff > maxDiff = maxBound
where maxDiff = maxBound - a
(>-<) :: a -> a -> a
(>-<) a diff
| diff <= minDiff = a - diff
| diff > minDiff = minBound
where minDiff = a - minBound
instance BoundedNum Word8
lesson13 :: IO ()
lesson13 = do
-- initialize SDL
SDL.initialize [SDL.InitVideo]
-- create window
window <- SDL.createWindow "Lesson13" Config.winConfig
renderer <- SDL.createRenderer window (-1) Config.rdrConfig
SDL.HintRenderScaleQuality SDL.$= SDL.ScaleLinear
SDL.rendererDrawColor renderer SDL.$=
V4 maxBound maxBound minBound maxBound
gLeftTexture <- loadFromFile renderer "./img/13/left.bmp"
gRightTexture <- loadFromFile renderer "./img/13/right.bmp"
let
loop alpha = do
events <- SDL.pollEvents
let quit = any (== SDL.QuitEvent) $ map SDL.eventPayload events
-- *** beginning of drawing region ***
let checkKeySym (SDL.KeyboardEvent keysym:xs) = keysym : checkKeySym xs
checkKeySym (sym:xs) = checkKeySym xs
checkKeySym [] = []
let pressedKey = map (SDL.keysymKeycode . SDL.keyboardEventKeysym) $ checkKeySym $ map SDL.eventPayload events
let adjustAlpha SDL.KeycodeW = (>+< 9)
adjustAlpha SDL.KeycodeS = (>-< 9)
adjustAlpha _ = (>+< 0)
let alpha' = foldr adjustAlpha alpha pressedKey
SDL.rendererDrawColor renderer SDL.$= V4 maxBound maxBound maxBound maxBound
SDL.clear renderer
-- render with our own function
render renderer gRightTexture 0 0
SDL.textureAlphaMod (getTx gLeftTexture) SDL.$= alpha'
render renderer gLeftTexture 0 0
SDL.present renderer
-- *** end of drawing region ***
unless quit (loop alpha')
loop 255
free gRightTexture
free gLeftTexture
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
| rueshyna/sdl2-examples | src/Lesson13.hs | mit | 3,368 | 0 | 20 | 836 | 1,095 | 546 | 549 | 78 | 5 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Response where
import Data.Aeson.TH
import Basal (Url, DateTime)
import Helper (snakise)
data Pronunciation = Pron {
_id :: Int,
_word :: String,
_pathogg :: Url,
_pathmp3 :: Url,
_country :: String,
_langname :: String,
_code :: String,
_username :: String,
_sex :: String,
_rate :: Int,
_numPositiveVotes :: Int,
_numVotes :: Int,
_hits :: Int,
_addtime :: DateTime
} deriving (Show)
$(deriveJSON defaultOptions {fieldLabelModifier = snakise . drop 1} ''Pronunciation)
data Attributes = Attributes { _total :: Int } deriving (Show)
$(deriveJSON defaultOptions {fieldLabelModifier = snakise . drop 1} ''Attributes)
data WordPronunciations = WordPronunciations {
_attributes :: Attributes,
_items :: [Pronunciation]
} deriving (Show)
$(deriveJSON defaultOptions {fieldLabelModifier = snakise . drop 1} ''WordPronunciations)
| lesguillemets/forvo-pronounce.hs | Response.hs | mit | 969 | 0 | 11 | 192 | 271 | 160 | 111 | 30 | 0 |
-- Copyright 2012 Mitchell Kember. Subject to the MIT License.
-- | The main module.
module Main (main) where
import System.Environment (getArgs)
import System.FilePath (replaceExtension)
import System.IO (hPutStrLn, stderr)
import Text.JSON (Result(..), decode)
import qualified Data.ByteString.Lazy as L
import Luminosity.Export (export, extension)
import Luminosity.Render (render)
import Luminosity.Parse ()
import Luminosity.Trace (Scene, mResolutionX, mResolutionY, mSettings)
-- | The current version string.
version :: String
version = "1.0"
-- | The help message: general information such as the version and copyright,
-- and instructions on how to use the program (the usage message).
help :: String
help = "Luminosity version " ++ version
++ " - Copyright 2012 Mitchell Kember\n"
++ "Usage: luminosity input_file[.json] [output_file[.tga]]"
-- | @withFiles input output@ parses @input@ as a JSON representation of a
-- 'Scene', renders it and exports it, and writes the image to @output@.
withFiles :: FilePath -> FilePath -> IO ()
withFiles input output = readFile input >>= \file -> case decode file of
Ok scene -> let
width = mResolutionX $ mSettings scene
height = mResolutionY $ mSettings scene
in L.writeFile output $ export width height $ render scene
Error s -> putStrLn s
-- | The main function.
main :: IO ()
main = getArgs >>= \args -> case args of
["-h"] -> putStrLn help
["--help"] -> putStrLn help
[x, y] -> withFiles x y
[x] -> withFiles x $ replaceExtension x extension
_ -> hPutStrLn stderr help
| mk12/luminosity | src/Main.hs | mit | 1,617 | 0 | 15 | 329 | 378 | 207 | 171 | 30 | 5 |
module Feature.PgVersion96Spec where
import Network.Wai (Application)
import Test.Hspec
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Protolude hiding (get)
import SpecHelper
spec :: SpecWith Application
spec =
describe "features supported on PostgreSQL 9.6" $ do
context "GUC headers" $ do
it "succeeds setting the headers" $ do
get "/rpc/get_projects_and_guc_headers?id=eq.2&select=id"
`shouldRespondWith` [json|[{"id": 2}]|]
{matchHeaders = [
matchContentTypeJson,
"X-Test" <:> "key1=val1; someValue; key2=val2",
"X-Test-2" <:> "key1=val1"]}
get "/rpc/get_int_and_guc_headers?num=1"
`shouldRespondWith` [json|1|]
{matchHeaders = [
matchContentTypeJson,
"X-Test" <:> "key1=val1; someValue; key2=val2",
"X-Test-2" <:> "key1=val1"]}
post "/rpc/get_int_and_guc_headers" [json|{"num": 1}|]
`shouldRespondWith` [json|1|]
{matchHeaders = [
matchContentTypeJson,
"X-Test" <:> "key1=val1; someValue; key2=val2",
"X-Test-2" <:> "key1=val1"]}
it "fails when setting headers with wrong json structure" $ do
get "/rpc/bad_guc_headers_1"
`shouldRespondWith`
[json|{"message":"response.headers guc must be a JSON array composed of objects with a single key and a string value"}|]
{ matchStatus = 500
, matchHeaders = [ matchContentTypeJson ]
}
get "/rpc/bad_guc_headers_2"
`shouldRespondWith`
[json|{"message":"response.headers guc must be a JSON array composed of objects with a single key and a string value"}|]
{ matchStatus = 500
, matchHeaders = [ matchContentTypeJson ]
}
get "/rpc/bad_guc_headers_3"
`shouldRespondWith`
[json|{"message":"response.headers guc must be a JSON array composed of objects with a single key and a string value"}|]
{ matchStatus = 500
, matchHeaders = [ matchContentTypeJson ]
}
post "/rpc/bad_guc_headers_1" [json|{}|]
`shouldRespondWith`
[json|{"message":"response.headers guc must be a JSON array composed of objects with a single key and a string value"}|]
{ matchStatus = 500
, matchHeaders = [ matchContentTypeJson ]
}
it "can set the same http header twice" $
get "/rpc/set_cookie_twice"
`shouldRespondWith` "null"
{matchHeaders = [
matchContentTypeJson,
"Set-Cookie" <:> "sessionid=38afes7a8; HttpOnly; Path=/",
"Set-Cookie" <:> "id=a3fWa; Expires=Wed, 21 Oct 2015 07:28:00 GMT; Secure; HttpOnly"]}
context "Use of the phraseto_tsquery function" $ do
it "finds matches" $
get "/tsearch?text_search_vector=phfts.The%20Fat%20Cats" `shouldRespondWith`
[json| [{"text_search_vector": "'ate':3 'cat':2 'fat':1 'rat':4" }] |]
{ matchHeaders = [matchContentTypeJson] }
it "finds matches with different dictionaries" $
get "/tsearch?text_search_vector=phfts(german).Art%20Spass" `shouldRespondWith`
[json| [{"text_search_vector": "'art':4 'spass':5 'unmog':7" }] |]
{ matchHeaders = [matchContentTypeJson] }
it "can be negated with not operator" $
get "/tsearch?text_search_vector=not.phfts(english).The%20Fat%20Cats" `shouldRespondWith`
[json| [
{"text_search_vector": "'fun':5 'imposs':9 'kind':3"},
{"text_search_vector": "'also':2 'fun':3 'possibl':8"},
{"text_search_vector": "'amus':5 'fair':7 'impossibl':9 'peu':4"},
{"text_search_vector": "'art':4 'spass':5 'unmog':7"}]|]
{ matchHeaders = [matchContentTypeJson] }
it "can be used with or query param" $
get "/tsearch?or=(text_search_vector.phfts(german).Art%20Spass, text_search_vector.phfts(french).amusant, text_search_vector.fts(english).impossible)" `shouldRespondWith`
[json|[
{"text_search_vector": "'fun':5 'imposs':9 'kind':3" },
{"text_search_vector": "'amus':5 'fair':7 'impossibl':9 'peu':4" },
{"text_search_vector": "'art':4 'spass':5 'unmog':7"}
]|] { matchHeaders = [matchContentTypeJson] }
it "should work when used with GET RPC" $
get "/rpc/get_tsearch?text_search_vector=phfts(english).impossible" `shouldRespondWith`
[json|[{"text_search_vector":"'fun':5 'imposs':9 'kind':3"}]|]
{ matchHeaders = [matchContentTypeJson] }
| diogob/postgrest | test/Feature/PgVersion96Spec.hs | mit | 4,648 | 0 | 20 | 1,225 | 588 | 349 | 239 | -1 | -1 |
-- | Core module
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnicodeSyntax #-}
module T2A.Core
( AgdaSignature
( Signature
, ScopedSignature
)
, buildSignature
) where
import Data.List (isPrefixOf)
import Data.Map as M (lookup)
import Data.Proof (ProofMap, getParents)
import Data.TSTP
( F (..)
, Formula (..)
, Role (..)
, Source (..)
)
import Utils.Functions (βshow, (▪))
-- Single function signature
-- | An Agda type signature @α : τ@
data AgdaSignature = Signature String [Formula]
-- ^ Regular top level signature
| ScopedSignature String [Formula]
-- ^ Fully scoped signature with no newly
-- introduced type variables
deriving (Eq)
-- Pretty prints an `AgdaSignature`
instance Show AgdaSignature where
show (Signature α ρ) = α ▪ ":" ▪ ρ
show (ScopedSignature α (x:xs)) = α ▪ ":" ▪ ρ
where
-- p ∷ String -- FIX
ρ = foldl ((▪) . (▪ '→')) (βshow x) xs
show (ScopedSignature _ []) = error "ScopedSignature empty" --FIX
instance Ord AgdaSignature where
a <= b = fname a <= fname b
-- | Retrieve signature name
fname ∷ AgdaSignature → String
fname (Signature a _) = a
fname (ScopedSignature a _) = a
-- | Given a proof map ω and some formula name φ, construct
-- the appropriated 'AgdaSignature' based on the parents of φ
buildSignature ∷ ProofMap → String → Maybe AgdaSignature
buildSignature ω φ
| "subgoal" `isPrefixOf` φ = Nothing
| "negate" `isPrefixOf` φ = Nothing
| otherwise = do
φ₁ ∷ F ← M.lookup φ ω
let γ ∷ Role
γ = role φ₁
let ζ ∷ Formula
ζ = formula φ₁
let β ∷ Source
β = source φ₁
let ρ ∷ [Formula]
ρ = case β of
Inference _ _ ρ₁ → map formula $ getParents ω ρ₁
_ → []
if γ `elem` [Axiom, Conjecture]
then Nothing
else return $ Signature ("fun-" ++ φ) (ρ ++ [ζ])
| agomezl/tstp2agda | src/T2A/Core.hs | mit | 2,232 | 12 | 16 | 725 | 584 | 321 | 263 | 52 | 3 |
import Data.Numbers
import Data.Numbers.Primes
import Data.List
import qualified Data.Set as S
toIntegerList :: Integer -> [Integer]
toIntegerList n = map toInt (show n)
where toInt c = read (c:"") :: Integer
digitalSum n = foldr1 (+) (toIntegerList n)
quicksort [] = []
quicksort (x:xs) = quicksort [z | z <- xs, z < x] ++ [x] ++ quicksort [z | z <- xs, z > x]
ans = [digitalSum (a^b) | a <- [1..100], b <- [1..100]] | stefan-j/ProjectEuler | q56.hs | mit | 438 | 1 | 10 | 97 | 230 | 121 | 109 | 11 | 1 |
--
-- Skeleton for Salsa parser
-- To be used at the exam for Advanced Programming, B1-2013
--
module SalsaParser
(
Program
, Error
, parseString
, parseFile
, reserved
) where
import SalsaAst
import SimpleParse
import Data.Char (isLetter, isDigit, isUpper, isLower)
-- A string is used to signal an error
type Error = String
-- Reserved words
reserved :: [String]
reserved = ["viewdef", "rectangle", "circle", "view", "group",
"blue", "plum", "red", "green", "orange"]
isReserved :: String -> Bool
isReserved w = w `elem` reserved
-- top-level parsing function that returns a program, or a string in case of failure
parseString :: String -> Either Error Program
parseString input = let res = parse (do r <- program
spaces -- allows trailing whitespace in a program
eof
return r) input
in case res of
[] -> Left ("unable to parse input: " ++ input)
(r,_):_ -> Right r
-- top-level parsing function that reads its input from a file
parseFile :: FilePath -> IO (Either Error Program)
parseFile path = do input <- readFile path
return $ parseString input
-- Program parser
program :: Parser Program
program = defComs
-- DefComs parser
defComs :: Parser [DefCom]
defComs = do d <- defCom
ds <- defComs_
return (d:ds)
-- DefComs* parser
defComs_ :: Parser [DefCom]
defComs_ = (do d <- defCom
ds <- defComs_
return (d:ds))
<|> return []
-- DefCom parser
defCom :: Parser DefCom
defCom = (do d <- def
return (Def d))
<|> (do c <- com
return (Com c))
-- Definition parser
def :: Parser Definition
def = (do symbol "viewdef"
v <- vIdent
e0 <- expr
e1 <- expr
return (Viewdef v e0 e1))
<|> (do symbol "rectangle"
s <- sIdent
e0 <- expr
e1 <- expr
e2 <- expr
e3 <- expr
c <- col
return (Rectangle s e0 e1 e2 e3 c))
<|> (do symbol "circle"
s <- sIdent
e0 <- expr
e1 <- expr
e2 <- expr
c <- col
return (Circle s e0 e1 e2 c))
<|> (do symbol "view"
v <- vIdent
return (View v))
<|> (do symbol "group"
v <- vIdent
schar '['
vs <- vIdents
schar ']'
return (Group v vs))
-- Command parser
com :: Parser Command
com = com1 >>= com_
-- Command* parser
com_ :: Command -> Parser Command
com_ c0 = (do symbol "||"
c1 <- com1
com_ (Par c0 c1))
<|> return c0
-- Command1 parser
com1 :: Parser Command
com1 = com2 >>= com1_
-- Command1* parser
com1_ :: Command -> Parser Command
com1_ c = (do schar '@'
v <- vIdent
com1_ (At c v))
<|> return c
-- Command2 parser
com2 :: Parser Command
com2 = (do ss <- sIdents
symbol "->"
p <- pos
return (Move ss p))
<|> (do schar '{'
c <- com
schar '}'
return c)
-- VIdents parser
vIdents :: Parser [Ident]
vIdents = do v <- vIdent
vs <- vIdents_
return (v:vs)
-- VIdents* parser
vIdents_ :: Parser [Ident]
vIdents_ = (do many1 space -- identifiers must be separated by whitespace
v <- vIdent
vs <- vIdents_
return (v:vs))
<|> return []
-- SIdents parser
sIdents :: Parser [Ident]
sIdents = do s <- sIdent
ss <- sIdents_
return (s:ss)
-- SIdents* parser
sIdents_ :: Parser [Ident]
sIdents_ = (do many1 space -- identifiers must be separated by whitespace
s <- sIdent
ss <- sIdents_
return (s:ss))
<|> return []
-- Pos parser
pos :: Parser Pos
pos = (do schar '('
e0 <- expr
schar ','
e1 <- expr
schar ')'
return (Abs e0 e1))
<|> (do schar '+'
schar '('
e0 <- expr
schar ','
e1 <- expr
schar ')'
return (Rel e0 e1))
-- Expr parser
expr :: Parser Expr
expr = prim >>= expr_
-- Expr* parser
expr_ :: Expr -> Parser Expr
expr_ e0 = (do schar '+'
e1 <- prim
expr_ (Plus e0 e1))
<|> (do schar '-'
e1 <- prim
expr_ (Minus e0 e1))
<|> return e0
-- Prim parser
prim :: Parser Expr
prim = (do i <- integer
return (Const i))
<|> (do s <- sIdent
schar '.'
proj s)
<|> (do schar '('
e <- expr
schar ')'
return e)
-- This parser function handles the coordinate selection in Prim expressions
proj :: Ident -> Parser Expr
proj s = (do schar 'x'
return (Xproj s))
<|> (do schar 'y'
return (Yproj s))
-- Colour parser
col :: Parser Colour
col = (symbol "blue" >> return Blue)
<|> (symbol "plum" >> return Plum)
<|> (symbol "red" >> return Red)
<|> (symbol "green" >> return Green)
<|> (symbol "orange" >> return Orange)
-- integers
integer :: Parser Integer
integer = token (do intstr <- many1 $ satisfy isDigit
return (read intstr))
-- identifiers
vIdent :: Parser Ident
vIdent = ident isUpper
sIdent :: Parser Ident
sIdent = ident isLower
ident :: (Char -> Bool) -> Parser Ident
ident leading = token (do c <- satisfy leading
cs <- letdigs
if (c:cs) `elem` reserved
then reject
else return (c:cs))
where letter = satisfy isLetter
digit = satisfy isDigit
letdigs = many (letter <|> digit <|> char '_')
| borgsmidt/adv-prog-2013 | src/salsa/SalsaParser.hs | mit | 6,092 | 0 | 14 | 2,445 | 1,882 | 912 | 970 | 177 | 2 |
module CIS194.Week03.GolfSpec where
import CIS194.Week03.Golf
import Test.Hspec
spec :: Spec
spec = do
describe "skips" $ do
it "convert a list into a list of lists" $ do
skips "ABCD" `shouldBe` ["ABCD", "BD", "C", "D"]
skips "hello!" `shouldBe` ["hello!", "el!", "l!", "l", "o", "!"]
skips [1] `shouldBe` [[1]]
skips [True, False] `shouldBe` [[True, False], [False]]
describe "localMaxima" $
it "finds a local maxima of a given list" $ do
localMaxima [2,9,5,6,1] `shouldBe` [9,6]
localMaxima [2,3,4,1,5] `shouldBe` [4]
localMaxima [1,2,3,4,5] `shouldBe` []
describe "histogram" $
it "renders a histogram for a given list" $ do
histogram [1,1,1,5] `shouldBe` unlines [ " * "
, " * "
, " * * "
, "=========="
, "0123456789"
]
histogram [1,4,5,4,6,6,3,4,2,4,9] `shouldBe` unlines [ " * "
, " * "
, " * * "
, " ****** *"
, "=========="
, "0123456789"
]
| acamino/cis-194-2013 | test/CIS194/Week03/GolfSpec.hs | mit | 1,649 | 0 | 15 | 941 | 390 | 225 | 165 | 29 | 1 |
-- ========================================================================== --
-- Main program
--------------------------------------------------------------------------------
module RS_Main where
import RS_MergeSort
-- import HeapSort (heapSort, bottomUpHeapSort)
-- import BraunHeap (braunSort, bottomUpBraunSort)
-- import RedBlackTree (redBlackSort, redBlackSort')
-- import Trees12 (sort12)
-- import SplaySort (splaySort)
-- import PairingHeap (pairingSort, mpPairingSort, optimalMergeSort)
import RS_QuickSort (introSort)
-- import AdaptiveHeapSort (adaptiveHeapSort)
-- import DigitalSort (intRadixSort)
-- import FingerSearchtree (fingerTreeSort) -- requires polymorphic recursion
-- import MargeSort (margeSort, naturalMargeSort) -- requires polymorphic recursion
-- import QuickSortInPlace (qsort)
import RS_TestData
import System
import RS_Benchmark
import RS_Killer
import RS_RandomMonad
import RS_ListLib
--------------------------------------------------------------------------------
ralf_main
= do args <- getArgs
putStrLn "Sort!\n\n"
case args of
[] -> run sorter 10000
["-d"] -> run checkedSorter 10000
"-d" : n : _ -> run checkedSorter (read n)
n : _ -> run sorter (read n)
--------------------------------------------------------------------------------
run sorter n
= sequence [ benchmark sorter (inputs gs) | gs <- generators n ]
--------------------------------------------------------------------------------
-- |checkedSorter :: (Ord a) => [(String, [a] -> Bool)]|
checkedSorter :: [(String, [Int] -> Bool)]
checkedSorter = [ (s, check f) | (s, f) <- sorter ]
--------------------------------------------------------------------------------
check f a
| f a == sort a = True
| otherwise = error ("<error: sequence is not sorted>\n"
++ show (sort a) ++ "\n"
++ show (f a))
--------------------------------------------------------------------------------
-- |sorter :: (Ord a) => [(String, [a] -> [a])]|
sorter :: [(String, [Int] -> [Int])]
sorter
= [
("m", mergeSort)
-- , ("opt", optimalMergeSort)
-- , ("bum", bottomUpMergeSort)
-- , ("sm", straightMergeSort)
-- , ("oem", oddEvenMergeSort)
-- , ("lpm", lpMergeSort)
-- , ("am", adaptiveMergeSort)
-- , ("nm", naturalMergeSort)
-- , ("snm", symmetricNaturalMergeSort)
-- , ("om", onlineMergeSort)
-- , ("om'", onlineMergeSort')
-- , ("fm", flipSort)
-- , ("om3", onlineMergeSort3)
-- , ("h", heapSort)
-- , ("buh", bottomUpHeapSort)
-- , ("b", braunSort)
-- , ("bub", bottomUpBraunSort)
-- , ("rb", redBlackSort)
-- , ("rb'", redBlackSort')
-- , ("12", sort12)
-- , ("sp", splaySort)
-- , ("ph", pairingSort)
-- , ("mph", mpPairingSort)
, ("iq", introSort)
-- , ("rs", intRadixSort) -- works only on |Int|
-- , ("ft", fingerTreeSort)
-- , ("ma", margeSort)
-- , ("nma", naturalMargeSort)
-- , ("qs", qsort) -- an array-based qsort (median of three)
-- , ("ah", adaptiveHeapSort)
]
-- NB the space leaks if |qsort| or |fingerTreeSort| is included!
--------------------------------------------------------------------------------
generators :: Int -> [[(String, RandomMonad [Int])]]
generators n
= [ [ ("<", return [1 .. n])
-- , ("<=", return (increasing n))
, (">", return [n, n - 1 .. 1])
-- , (">=", return (decreasing n))
-- , ("==", return (replicate n 0))
-- , ("!ms", return (bad4merge n))
-- , ("!mph", return (bad4mpp n))
, ("random", randomInts n) ]
-- , [ ("//" ++ show k, return (repIncreasing k n)) | k <- powers2 n ]
-- , [ ("\\\\" ++ show k, return (repDecreasing k n)) | k <- powers2 n ]
-- , [ ("/\\" ++ show k, return (oscillating k n)) | k <- powers2 n ]
-- , [ ("runs " ++ show k, runs k n) | k <- powers2 n ]
-- , [ ("invs " ++ show k, invs k n) | k <- powers2 n ]
-- , [ ("dis " ++ show k, psorted k n) | k <- powers2 n ]
-- , [ ("rem " ++ show k, rems k n) | k <- powers2 n ]
]
--------------------------------------------------------------------------------
inputs gs = generate $ mapM gen $ gs
where gen (s, g) = do { x <- g; return (s, x) }
--------------------------------------------------------------------------------
powers2 n = takeWhile (< n) $ iterate (* 2) 1
-- ========================================================================== --
| gennady-em/haskel | src/RS_Main.hs | gpl-2.0 | 4,611 | 14 | 13 | 1,044 | 597 | 352 | 245 | 39 | 4 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Core
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- The core actions of Yi. This module is the link between the editor
-- and the UI. Key bindings, and libraries should manipulate Yi
-- through the interface defined here.
module Yi.Core
(
-- * Construction and destruction
startEditor
, quitEditor -- :: YiM ()
-- * User interaction
, refreshEditor -- :: YiM ()
, suspendEditor -- :: YiM ()
, userForceRefresh
-- * Global editor actions
, errorEditor -- :: String -> YiM ()
, closeWindow -- :: YiM ()
, closeWindowEmacs
-- * Interacting with external commands
, runProcessWithInput -- :: String -> String -> YiM String
, startSubprocess -- :: FilePath -> [String] -> YiM ()
, sendToProcess
-- * Misc
, runAction
, withSyntax
, focusAllSyntax
, onYiVar
) where
import Prelude hiding (elem, mapM_, or)
import Control.Concurrent (forkOS, modifyMVar, modifyMVar_
,newMVar, readMVar, threadDelay)
import Control.Exc (ignoringException)
import Control.Exception (SomeException, handle)
import Lens.Micro.Platform (mapped, use, view, (%=), (%~),
(&), (.=), (.~), (^.))
import Control.Monad (forever, void, when)
import Control.Monad.Base (MonadBase (liftBase))
import Control.Monad.Except ()
import Control.Monad.Reader (MonadReader (ask), ReaderT (runReaderT), asks)
import qualified Data.DelayList as DelayList (decrease, insert)
import Data.Foldable (elem, find, forM_, mapM_, or, toList)
import Data.List (partition)
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.PointedList.Circular as PL (PointedList (_focus), length)
import Data.List.Split (splitOn)
import qualified Data.Map as M (assocs, delete, empty, fromList, insert, member)
import Data.Maybe (fromMaybe, isNothing)
import Data.Monoid (First (First, getFirst), (<>), mempty)
import qualified Data.Text as T (Text, pack, unwords)
import Data.Time (getCurrentTime)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Data.Traversable (forM)
import GHC.Conc (labelThread)
import System.Directory (doesFileExist)
import System.Exit (ExitCode)
import System.IO (Handle, hPutStr, hWaitForInput)
import System.PosixCompat.Files (getFileStatus, modificationTime)
import System.Process (ProcessHandle,
getProcessExitCode,
readProcessWithExitCode,
terminateProcess)
import Yi.Buffer
import Yi.Config
import Yi.Debug (logPutStrLn)
import Yi.Editor
import Yi.Keymap
import Yi.Keymap.Keys
import Yi.KillRing (krEndCmd)
import Yi.Monad (gets)
import Yi.PersistentState (loadPersistentState, savePersistentState)
import Yi.Process
import qualified Yi.Rope as R (YiString, fromString, readFile)
import Yi.String (chomp, showT)
import Yi.Style (errorStyle, strongHintStyle)
import qualified Yi.UI.Common as UI (UI (end, layout, main, refresh, suspend, userForceRefresh))
import Yi.Utils (io)
import Yi.Window (bufkey, dummyWindow, isMini, winRegion, wkey)
uses l f = f <$> use l
-- | Make an action suitable for an interactive run.
-- UI will be refreshed.
interactive :: IsRefreshNeeded -> [Action] -> YiM ()
interactive isRefreshNeeded action = do
evs <- withEditor $ use pendingEventsA
logPutStrLn $ ">>> interactively" <> showEvs evs
withEditor $ buffersA %= (fmap $ undosA %~ addChangeU InteractivePoint)
mapM_ runAction action
withEditor $ killringA %= krEndCmd
when (isRefreshNeeded == MustRefresh) refreshEditor
logPutStrLn "<<<"
return ()
-- ---------------------------------------------------------------------
-- | Start up the editor, setting any state with the user preferences
-- and file names passed in, and turning on the UI
--
startEditor :: Config -> Maybe Editor -> IO ()
startEditor cfg st = do
let uiStart = startFrontEnd cfg
logPutStrLn "Starting Core"
-- Use an empty state unless resuming from an earlier session and
-- one is already available
let editor = fromMaybe emptyEditor st
-- here to add load history etc?
-- Setting up the 1st window is a bit tricky because most
-- functions assume there exists a "current window"
newSt <- newMVar $ YiVar editor 1 M.empty
(ui, runYi) <- mdo
let handler (exception :: SomeException) =
runYi $ errorEditor (showT exception) >> refreshEditor
inF [] = return ()
inF (e:es) = handle handler $ runYi $ dispatch (e :| es)
outF refreshNeeded acts =
handle handler $ runYi $ interactive refreshNeeded acts
runYi f = runReaderT (runYiM f) yi
yi = Yi ui inF outF cfg newSt
ui <- uiStart cfg inF (outF MustRefresh) editor
return (ui, runYi)
runYi loadPersistentState
runYi $ do
if isNothing st
-- process options if booting for the first time
then postActions NoNeedToRefresh $ startActions cfg
-- otherwise: recover the mode of buffers
else withEditor $ buffersA.mapped %= recoverMode (modeTable cfg)
postActions NoNeedToRefresh $ initialActions cfg ++ [makeAction showErrors]
runYi refreshEditor
UI.main ui -- transfer control to UI
recoverMode :: [AnyMode] -> FBuffer -> FBuffer
recoverMode tbl buffer = case fromMaybe (AnyMode emptyMode) (find (\(AnyMode m) -> modeName m == oldName) tbl) of
AnyMode m -> setMode0 m buffer
where oldName = case buffer of FBuffer {bmode = m} -> modeName m
postActions :: IsRefreshNeeded -> [Action] -> YiM ()
postActions refreshNeeded actions = do yi <- ask; liftBase $ yiOutput yi refreshNeeded actions
-- | Display the errors buffer if it is not already visible.
showErrors :: YiM ()
showErrors = withEditor $ do
bs <- gets $ findBufferWithName "*errors*"
case bs of
[] -> return ()
_ -> do splitE
switchToBufferWithNameE "*errors*"
-- | Process events by advancing the current keymap automaton and
-- executing the generated actions.
dispatch :: NonEmpty Event -> YiM ()
dispatch (ev :| evs) = do
yi <- ask
(userActions, _p') <- withCurrentBuffer $ do
keymap <- gets (withMode0 modeKeymap)
p0 <- use keymapProcessA
let km = extractTopKeymap $ keymap $ defaultKm $ yiConfig yi
let freshP = Chain (configInputPreprocess $ yiConfig yi) (mkAutomaton km)
p = case computeState p0 of
Dead -> freshP
_ -> p0
(actions, p') = processOneEvent p ev
state = computeState p'
ambiguous = case state of
Ambiguous _ -> True
_ -> False
keymapProcessA .= (if ambiguous then freshP else p')
let actions0 = case state of
Dead -> [EditorA $ do
evs' <- use pendingEventsA
printMsg ("Unrecognized input: " <> showEvs (evs' ++ [ev]))]
_ -> actions
actions1 = [ EditorA (printMsg "Keymap was in an ambiguous state! Resetting it.")
| ambiguous]
return (actions0 ++ actions1, p')
let decay, pendingFeedback :: EditorM ()
decay = statusLinesA %= DelayList.decrease 1
pendingFeedback = do pendingEventsA %= (++ [ev])
if null userActions
then printMsg . showEvs =<< use pendingEventsA
else pendingEventsA .= []
allActions = [makeAction decay] ++ userActions ++ [makeAction pendingFeedback]
case evs of
[] -> postActions MustRefresh allActions
(e:es) -> postActions NoNeedToRefresh allActions >> dispatch (e :| es)
showEvs :: [Event] -> T.Text
showEvs = T.unwords . fmap (T.pack . prettyEvent)
-- ---------------------------------------------------------------------
-- Meta operations
-- | Quit.
quitEditor :: YiM ()
quitEditor = do
savePersistentState
onYiVar $ terminateSubprocesses (const True)
withUI (`UI.end` True)
-- | Update (visible) buffers if they have changed on disk.
-- FIXME: since we do IO here we must catch exceptions!
checkFileChanges :: Editor -> IO Editor
checkFileChanges e0 = do
now <- getCurrentTime
-- Find out if any file was modified "behind our back" by
-- other processes.
newBuffers <- forM (buffers e0) $ \b ->
let nothing = return (b, Nothing)
in if bkey b `elem` visibleBuffers
then
case b ^. identA of
FileBuffer fname -> do
fe <- doesFileExist fname
if not fe then nothing else do
modTime <- fileModTime fname
if b ^. lastSyncTimeA < modTime
then if isUnchangedBuffer b
then R.readFile fname >>= return . \case
Left m ->
(runDummy b (readOnlyA .= True), Just $ msg3 m)
Right (newContents, c) ->
(runDummy b (revertB newContents (Just c) now), Just msg1)
else return (b, Just msg2)
else nothing
_ -> nothing
else nothing
-- show appropriate update message if applicable
return $ case getFirst (foldMap (First . snd) newBuffers) of
Just msg -> (statusLinesA %~ DelayList.insert msg) e0 {buffers = fmap fst newBuffers}
Nothing -> e0
where msg1 = (1, (["File was changed by a concurrent process, reloaded!"], strongHintStyle))
msg2 = (1, (["Disk version changed by a concurrent process"], strongHintStyle))
msg3 x = (1, (["File changed on disk to unknown encoding, not updating buffer: " <> x], strongHintStyle))
visibleBuffers = bufkey <$> windows e0
fileModTime f = posixSecondsToUTCTime . realToFrac . modificationTime <$> getFileStatus f
runDummy b act = snd $ runBuffer (dummyWindow $ bkey b) b act
-- | Hide selection, clear "syntax dirty" flag (as appropriate).
clearAllSyntaxAndHideSelection :: Editor -> Editor
clearAllSyntaxAndHideSelection = buffersA %~ fmap (clearSyntax . clearHighlight)
where
clearHighlight fb =
-- if there were updates, then hide the selection.
let h = view highlightSelectionA fb
us = view pendingUpdatesA fb
in highlightSelectionA .~ (h && null us) $ fb
-- Focus syntax tree on the current window, for all visible buffers.
focusAllSyntax :: Editor -> Editor
focusAllSyntax e6 = buffersA %~ fmap (\b -> focusSyntax (regions b) b) $ e6
where regions b = M.fromList [(wkey w, winRegion w) | w <- toList $ windows e6, bufkey w == bkey b]
-- Why bother filtering the region list? After all the trees
-- are lazily computed. Answer: focusing is an incremental
-- algorithm. Each "focused" path depends on the previous
-- one. If we left unforced focused paths, we'd create a
-- long list of thunks: a memory leak.
-- | Redraw
refreshEditor :: YiM ()
refreshEditor = onYiVar $ \yi var -> do
let cfg = yiConfig yi
runOnWins a = runEditor cfg
(do ws <- use windowsA
forM ws $ flip withWindowE a)
style = configScrollStyle $ configUI cfg
let scroll e3 = let (e4, relayout) = runOnWins (snapScreenB style) e3 in
-- Scroll windows to show current points as appropriate
-- Do another layout pass if there was any scrolling;
(if or relayout then UI.layout (yiUi yi) else return) e4
e7 <- (if configCheckExternalChangesObsessively cfg
then checkFileChanges
else return) (yiEditor var) >>=
return . clearAllSyntaxAndHideSelection >>=
-- Adjust window sizes according to UI info
UI.layout (yiUi yi) >>=
scroll >>=
-- Adjust point according to the current layout;
return . fst . runOnWins snapInsB >>=
return . focusAllSyntax >>=
-- Clear "pending updates" and "followUp" from buffers.
return . (buffersA %~ fmap (clearUpdates . clearFollow))
-- Display the new state of the editor
UI.refresh (yiUi yi) e7
-- Terminate stale processes.
terminateSubprocesses (staleProcess $ buffers e7) yi var {yiEditor = e7}
where
clearUpdates = pendingUpdatesA .~ mempty
clearFollow = pointFollowsWindowA .~ mempty
-- Is this process stale? (associated with a deleted buffer)
staleProcess bs p = not (bufRef p `M.member` bs)
-- | Suspend the program
suspendEditor :: YiM ()
suspendEditor = withUI UI.suspend
------------------------------------------------------------------------
------------------------------------------------------------------------
-- | Pipe a string through an external command, returning the stdout
-- chomp any trailing newline (is this desirable?)
--
-- Todo: varients with marks?
--
runProcessWithInput :: String -> String -> YiM String
runProcessWithInput cmd inp = do
let (f:args) = splitOn " " cmd
(_,out,_err) <- liftBase $ readProcessWithExitCode f args inp
return (chomp "\n" out)
------------------------------------------------------------------------
-- | Same as 'Yi.Editor.printMsg', but do nothing instead of printing @()@
msgEditor :: T.Text -> YiM ()
msgEditor "()" = return ()
msgEditor s = printMsg s
runAction :: Action -> YiM ()
runAction (YiA act) = act >>= msgEditor . showT
runAction (EditorA act) = withEditor act >>= msgEditor . showT
runAction (BufferA act) = withCurrentBuffer act >>= msgEditor . showT
-- | Show an error on the status line and log it.
errorEditor :: T.Text -> YiM ()
errorEditor s = do
printStatus (["error: " <> s], errorStyle)
logPutStrLn $ "errorEditor: " <> s
-- | Close the current window.
-- If this is the last window open, quit the program.
--
-- CONSIDER: call quitEditor when there are no other window in the
-- 'interactive' function. (Not possible since the windowset type
-- disallows it -- should it be relaxed?)
closeWindow :: YiM ()
closeWindow = do
winCount <- withEditor $ uses windowsA PL.length
tabCount <- withEditor $ uses tabsA PL.length
when (winCount == 1 && tabCount == 1) quitEditor
withEditor tryCloseE
-- | This is a like 'closeWindow' but with emacs behaviour of C-x 0:
-- if we're trying to close the minibuffer or last buffer in the
-- editor, then just print a message warning the user about it rather
-- closing mini or quitting editor.
closeWindowEmacs :: YiM ()
closeWindowEmacs = do
wins <- withEditor $ use windowsA
let winCount = PL.length wins
tabCount <- withEditor $ uses tabsA PL.length
case () of
_ | winCount == 1 && tabCount == 1 ->
printMsg "Attempt to delete sole ordinary window"
| isMini (PL._focus wins) ->
printMsg "Attempt to delete the minibuffer"
| otherwise -> withEditor tryCloseE
onYiVar :: (Yi -> YiVar -> IO (YiVar, a)) -> YiM a
onYiVar f = do
yi <- ask
io $ modifyMVar (yiVar yi) (f yi)
-- | Kill a given subprocess
terminateSubprocesses :: (SubprocessInfo -> Bool) -> Yi -> YiVar -> IO (YiVar, ())
terminateSubprocesses shouldTerminate _yi var = do
let (toKill, toKeep) =
partition (shouldTerminate . snd) $ M.assocs $ yiSubprocesses var
void $ forM toKill $ terminateProcess . procHandle . snd
return (var & yiSubprocessesA .~ M.fromList toKeep, ())
-- | Start a subprocess with the given command and arguments.
startSubprocess :: FilePath
-> [String]
-> (Either SomeException ExitCode -> YiM x)
-> YiM BufferRef
startSubprocess cmd args onExit = onYiVar $ \yi var -> do
let (e', bufref) = runEditor
(yiConfig yi)
(printMsg ("Launched process: " <> T.pack cmd)
>> newEmptyBufferE (MemBuffer bufferName))
(yiEditor var)
procid = yiSubprocessIdSupply var + 1
procinfo <- createSubprocess cmd args bufref
startSubprocessWatchers procid procinfo yi onExit
return (var & yiEditorA .~ e'
& yiSubprocessIdSupplyA .~ procid
& yiSubprocessesA %~ M.insert procid procinfo
, bufref)
where
bufferName = T.unwords [ "output from", T.pack cmd, showT args ]
startSubprocessWatchers :: SubprocessId
-> SubprocessInfo
-> Yi
-> (Either SomeException ExitCode -> YiM x)
-> IO ()
startSubprocessWatchers procid procinfo yi onExit =
mapM_ (\(labelSuffix, run) -> do
threadId <- forkOS run
labelThread threadId (procCmd procinfo ++ labelSuffix))
([("Err", pipeToBuffer (hErr procinfo) (send . append True)) | separateStdErr procinfo] ++
[("Out", pipeToBuffer (hOut procinfo) (send . append False)),
("Exit", waitForExit (procHandle procinfo) >>= reportExit)])
where
send :: YiM () -> IO ()
send a = yiOutput yi MustRefresh [makeAction a]
-- TODO: This 'String' here is due to 'pipeToBuffer' but I don't
-- know how viable it would be to read from a process as Text.
-- Probably not worse than String but needs benchmarking.
append :: Bool -> String -> YiM ()
append atMark =
withEditor . appendToBuffer atMark (bufRef procinfo) . R.fromString
reportExit :: Either SomeException ExitCode -> IO ()
reportExit ec = send $ do
append True $ "Process exited with " <> show ec
removeSubprocess procid
void $ onExit ec
removeSubprocess :: SubprocessId -> YiM ()
removeSubprocess procid = asks yiVar >>= liftBase . flip modifyMVar_ (pure . (yiSubprocessesA %~ M.delete procid))
-- | Appends a 'R.YiString' to the given buffer.
--
-- TODO: Figure out and document the Bool here. Probably to do with
-- 'startSubprocessWatchers'.
appendToBuffer :: Bool -- Something to do with stdout/stderr?
-> BufferRef -- ^ Buffer to append to
-> R.YiString -- ^ Text to append
-> EditorM ()
appendToBuffer atErr bufref s = withGivenBuffer bufref $ do
-- We make sure stdout is always after stderr. This ensures that
-- the output of the two pipe do not get interleaved. More
-- importantly, GHCi prompt should always come after the error
-- messages.
me <- getMarkB (Just "StdERR")
mo <- getMarkB (Just "StdOUT")
let mms = if atErr then [mo, me] else [mo]
forM_ mms (`modifyMarkB` (markGravityAA .~ Forward))
insertNAt s =<< use (markPointA (if atErr then me else mo))
forM_ mms (`modifyMarkB` (markGravityAA .~ Backward))
sendToProcess :: BufferRef -> String -> YiM ()
sendToProcess bufref s = do
yi <- ask
find ((== bufref) . bufRef) . yiSubprocesses <$> liftBase (readMVar (yiVar yi)) >>= \case
Just subProcessInfo -> io $ hPutStr (hIn subProcessInfo) s
Nothing -> printMsg "Could not get subProcessInfo in sendToProcess"
pipeToBuffer :: Handle -> (String -> IO ()) -> IO ()
pipeToBuffer h append = void . ignoringException . forever $ do
_ <- hWaitForInput h (-1)
r <- readAvailable h
append r
waitForExit :: ProcessHandle -> IO (Either SomeException ExitCode)
waitForExit ph =
handle (\e -> return (Left (e :: SomeException))) $ do
mec <- getProcessExitCode ph
case mec of
Nothing -> threadDelay (500*1000) >> waitForExit ph
Just ec -> return (Right ec)
withSyntax :: (Show x, YiAction a x) => (forall syntax. Mode syntax -> syntax -> a) -> YiM ()
withSyntax f = do
b <- gets currentBuffer
act <- withGivenBuffer b $ withSyntaxB f
runAction $ makeAction act
userForceRefresh :: YiM ()
userForceRefresh = withUI UI.userForceRefresh
| formrre/yi | yi-core/src/Yi/Core.hs | gpl-2.0 | 21,340 | 0 | 32 | 6,460 | 5,124 | 2,675 | 2,449 | -1 | -1 |
{-| Implementation of Ganeti Lock field queries
The actual computation of the field values is done by forwarding
the request; so only have a minimal field definition here.
-}
{-
Copyright (C) 2014 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Query.Locks
( fieldsMap
, RuntimeData
) where
import qualified Text.JSON as J
import Control.Arrow (first)
import Data.Tuple (swap)
import Ganeti.Locking.Allocation (OwnerState(..))
import Ganeti.Locking.Locks (ClientId, ciIdentifier)
import Ganeti.Query.Common
import Ganeti.Query.Language
import Ganeti.Query.Types
-- | The runtime information for locks. As all information about locks
-- is handled by WConfD, the actual information is obtained as live data.
-- The type represents the information for a single lock, even though all
-- locks are queried simultaneously, ahead of time.
type RuntimeData = ( [(ClientId, OwnerState)] -- current state
, [(ClientId, OwnerState)] -- pending requests
)
-- | Obtain the owners of a lock from the runtime data.
getOwners :: RuntimeData -> a -> ResultEntry
getOwners (ownerinfo, _) _ =
rsNormal . map (J.encode . ciIdentifier . fst)
$ ownerinfo
-- | Obtain the mode of a lock from the runtime data.
getMode :: RuntimeData -> a -> ResultEntry
getMode (ownerinfo, _) _
| null ownerinfo = rsNormal J.JSNull
| any ((==) OwnExclusive . snd) ownerinfo = rsNormal "exclusive"
| otherwise = rsNormal "shared"
-- | Obtain the pending requests from the runtime data.
getPending :: RuntimeData -> a -> ResultEntry
getPending (_, pending) _ =
rsNormal . map (swap . first ((:[]) . J.encode . ciIdentifier)) $ pending
-- | List of all lock fields.
lockFields :: FieldList String RuntimeData
lockFields =
[ (FieldDefinition "name" "Name" QFTOther "Lock name",
FieldSimple rsNormal, QffNormal)
, (FieldDefinition "mode" "Mode" QFTOther "Mode in which the lock is\
\ currently acquired\
\ (exclusive or shared)",
FieldRuntime getMode, QffNormal)
, (FieldDefinition "owner" "Owner" QFTOther "Current lock owner(s)",
FieldRuntime getOwners, QffNormal)
, (FieldDefinition "pending" "Pending" QFTOther "Jobs waiting for the lock",
FieldRuntime getPending, QffNormal)
]
-- | The lock fields map.
fieldsMap :: FieldMap String RuntimeData
fieldsMap = fieldListToFieldMap lockFields
| ribag/ganeti-experiments | src/Ganeti/Query/Locks.hs | gpl-2.0 | 3,104 | 0 | 15 | 638 | 468 | 266 | 202 | 37 | 1 |
{-# LANGUAGE CPP #-}
-- Copyright (c) Tuomo Valkonen 2004.
-- Copyright (c) Don Stewart 2004-5.
-- Copyright (c) Jean-Philippe Bernardy 2006,2007.
-- | This is the main module of Yi, called with configuration from the user.
-- Here we mainly process command line arguments.
module Yi.Main (main) where
import Prelude ()
import Control.Monad.Error
import Control.Monad.Trans.Error (Error(..))
import Data.Char
import Data.List (intercalate)
import Distribution.Text (display)
import System.Console.GetOpt
import System.Environment (getArgs)
import System.Exit
#include "ghcconfig.h"
import Yi.Config
import Yi.Config.Default
import Yi.Core
import Yi.Dired
import Paths_yi
#ifdef TESTING
import qualified TestSuite
#endif
#ifdef FRONTEND_COCOA
import HOC (withAutoreleasePool)
#endif
frontendNames :: [String]
frontendNames = fmap fst' availableFrontends
where fst' :: (a,UIBoot) -> a
fst' (x,_) = x
data Err = Err String ExitCode
instance Error Err where
strMsg s = Err s (ExitFailure 1)
-- ---------------------------------------------------------------------
-- | Argument parsing. Pretty standard.
data Opts = Help
| Version
| LineNo String
| EditorNm String
| File String
| Frontend String
| ConfigFile String
| SelfCheck
| Debug
-- | List of editors for which we provide an emulation.
editors :: [(String,Config -> Config)]
editors = [("emacs", toEmacsStyleConfig),
("vim", toVimStyleConfig),
("cua", toCuaStyleConfig)]
options :: [OptDescr Opts]
options =
[ Option [] ["self-check"] (NoArg SelfCheck) "Run self-checks"
, Option ['f'] ["frontend"] (ReqArg Frontend "FRONTEND") frontendHelp
, Option ['y'] ["config-file"] (ReqArg ConfigFile "PATH") "Specify a configuration file"
, Option ['V'] ["version"] (NoArg Version) "Show version information"
, Option ['h'] ["help"] (NoArg Help) "Show this help"
, Option [] ["debug"] (NoArg Debug) "Write debug information in a log file"
, Option ['l'] ["line"] (ReqArg LineNo "NUM") "Start on line number"
, Option [] ["as"] (ReqArg EditorNm "EDITOR") editorHelp
] where frontendHelp = ("Select frontend, which can be one of:\n"
++ intercalate ", " frontendNames)
editorHelp = ("Start with editor keymap, where editor is one of:\n"
++ (intercalate ", " . fmap fst) editors)
-- | usage string.
usage, versinfo :: String
usage = usageInfo ("Usage: yi [option...] [file]") options
versinfo = "yi " ++ display version
-- | Transform the config with options
do_args :: Config -> [String] -> Either Err Config
do_args cfg args =
case (getOpt (ReturnInOrder File) options args) of
(o, [], []) -> foldM getConfig cfg o
(_, _, errs) -> fail (concat errs)
-- | Update the default configuration based on a command-line option.
getConfig :: Config -> Opts -> Either Err Config
getConfig cfg opt =
case opt of
Frontend f -> case lookup f availableFrontends of
Just frontEnd -> return cfg { startFrontEnd = frontEnd }
Nothing -> fail "Panic: frontend not found"
Help -> throwError $ Err usage ExitSuccess
Version -> throwError $ Err versinfo ExitSuccess
Debug -> return cfg { debugMode = True }
LineNo l -> case startActions cfg of
x : xs -> return cfg { startActions = x:makeAction (gotoLn (read l)):xs }
[] -> fail "The `-l' option must come after a file argument"
File filename -> prependAction (fnewE filename)
EditorNm emul -> case lookup (fmap toLower emul) editors of
Just modifyCfg -> return $ modifyCfg cfg
Nothing -> fail $ "Unknown emulation: " ++ show emul
_ -> return cfg
where
prependAction a = return $ cfg { startActions = makeAction a : startActions cfg}
-- ---------------------------------------------------------------------
-- | Static main. This is the front end to the statically linked
-- application, and the real front end, in a sense. 'dynamic_main' calls
-- this after setting preferences passed from the boot loader.
--
main :: Config -> Maybe Editor -> IO ()
main cfg state = do
#ifdef FRONTEND_COCOA
withAutoreleasePool $ do
#endif
args <- getArgs
#ifdef TESTING
when ("--self-check" `elem` args)
TestSuite.main
#endif
case do_args cfg args of
Left (Err err code) ->
do putStrLn err
exitWith code
Right finalCfg ->
do when (debugMode finalCfg) $ initDebug ".yi.dbg"
startEditor finalCfg state
| codemac/yi-editor | src/Yi/Main.hs | gpl-2.0 | 4,903 | 0 | 20 | 1,375 | 1,174 | 629 | 545 | 86 | 11 |
module Main(main) where
import System.IO.Error hiding (catch)
import Control.Monad
import System.Directory
import Data.Maybe
import Data.List
import System.IO (hPutStrLn, stderr)
import System.FilePath
import Control.Exception
import Prelude hiding (catch)
import Data.Ord
import Data.Function
import System.Random
import Control.Monad.State as State
import Control.Applicative
import Data.Binary
import qualified Data.ByteString.Char8 as Str
import Graphics.Rendering.OpenGL as OpenGL
import Graphics.UI.SDL as SDL hiding (SrcAlpha)
import Graphics.Rendering.FTGL as FTGL
import qualified Gen
import SDLUtils
import Tree
import Match.Match
import Drawing
import Listings
import Utils
import Paths_freekick2
data RenderContext = RenderContext {
renderfont :: Font
, smallerfont :: Font
, bgtexture :: TextureObject
}
data WorldContext = WorldContext {
rendercontext :: RenderContext
, worldteams :: TeamStructure
, hometeam :: Maybe (Gen.GenTeam, TeamOwner)
, awayteam :: Maybe (Gen.GenTeam, TeamOwner)
, tactics :: [((Int, Int, Int), Gen.GenFormation)]
}
type TeamStructure = Tree String (String, [Gen.GenTeam])
drawGenScene :: TextureObject -> [Button a] -> IO ()
drawGenScene tex btns = do
clear [ColorBuffer, DepthBuffer]
(w, h) <- getWindowSize
drawBox (Right tex) (color $ Color3 0.05 0.3 (0.1 :: GLfloat)) ((0, 0), (w, h)) (-1) Nothing
mapM_ drawButton btns
glSwapBuffers
type Material = Either SColor TextureObject
data Button a = Button { buttonMaterial :: Material
, buttonBox :: Camera
, buttonLabel :: String
, buttonFont :: Font
, buttonAction :: String -> a
}
drawButton :: Button a -> IO ()
drawButton b = drawBox (buttonMaterial b) (return ()) (buttonBox b) 0 (Just (buttonLabel b, buttonFont b))
modHometeam :: (Maybe (Gen.GenTeam, TeamOwner) -> Maybe (Gen.GenTeam, TeamOwner)) -> WorldContext -> WorldContext
modHometeam f c = c{hometeam = f (hometeam c)}
modAwayteam :: (Maybe (Gen.GenTeam, TeamOwner) -> Maybe (Gen.GenTeam, TeamOwner)) -> WorldContext -> WorldContext
modAwayteam f c = c{awayteam = f (awayteam c)}
type MenuBlock = StateT WorldContext IO
structureTeams :: [Gen.GenTeam] -> TeamStructure
structureTeams ts = f "World" ts (countryContinent . nationToString, continentToString) `g` (Gen.teamnation, nationToString) `g` (Gen.teamdivision, divisionToString)
where f :: (Ord a) => String -> [Gen.GenTeam] -> (Gen.GenTeam -> a, Gen.GenTeam -> String) -> TeamStructure
f n teams (func, nfunc) =
let ts' = splitBy func teams
in Node n (map (\tp -> Leaf (nfunc (head tp), tp)) ts')
g :: (Ord a) => TeamStructure -> (Gen.GenTeam -> a, Gen.GenTeam -> String) -> TeamStructure
g tr (func, nfunc) =
go tr
where go (Node i ts') = Node i (map go ts')
go (Leaf (i, ts')) = f i ts' (func, nfunc)
nationToString = showTeamNation . Gen.teamnation
divisionToString = showDivision . Gen.teamdivision
continentToString = show . countryContinent . nationToString
getFontAndTexture :: MenuBlock (Font, TextureObject)
getFontAndTexture = do
c <- rendercontext <$> State.get
return (renderfont c, bgtexture c)
getTwoFonts :: MenuBlock (Font, Font)
getTwoFonts = do
c <- rendercontext <$> State.get
return (renderfont c, smallerfont c)
getTSLabel :: TeamStructure -> String
getTSLabel (Node i _) = i
getTSLabel (Leaf (i, _)) = i
getTSChildrenTitles :: TeamStructure -> [String]
getTSChildrenTitles = (either (map getTSLabel) (map Gen.genteamname)) . getTSChildren
getTSTitles :: TeamStructure -> (String, [String])
getTSTitles t = (getTSLabel t, getTSChildrenTitles t)
getTSChildren :: TeamStructure -> Either [TeamStructure] [Gen.GenTeam]
getTSChildren (Node _ ts) = Left ts
getTSChildren (Leaf (_, ts)) = Right ts
getTSChildrenByTitle :: TeamStructure -> String -> Maybe (Either TeamStructure Gen.GenTeam)
getTSChildrenByTitle ts n =
case getTSChildren ts of
Left ts' -> liftM Left $ find (\t -> getTSLabel t == n) ts'
Right tms -> liftM Right $ find (\t -> Gen.genteamname t == n) tms
hasJust :: (Eq a) => a -> Maybe a -> Bool
hasJust _ Nothing = False
hasJust n (Just m) = n == m
rotateTeam :: Gen.GenTeam -> Maybe (Gen.GenTeam, TeamOwner) -> Maybe (Gen.GenTeam, TeamOwner)
rotateTeam t Nothing = Just (t, AIOwner)
rotateTeam _ (Just (t, AIOwner)) = Just (t, HumanOwner)
rotateTeam _ (Just (_, HumanOwner)) = Nothing
getOwner :: String -> WorldContext -> Maybe TeamOwner
getOwner t c =
let t1 = case hometeam c of
Nothing -> Nothing
Just (ht, ho) -> if Gen.genteamname ht == t then Just ho else Nothing
t2 = case awayteam c of
Nothing -> Nothing
Just (ht, ho) -> if Gen.genteamname ht == t then Just ho else Nothing
in t1 `mplus` t2
clickedOnTeam :: Gen.GenTeam -> MenuBlock ()
clickedOnTeam t = do
c <- State.get
if hasJust (Gen.genteamname t) (liftM (Gen.genteamname . fst) (hometeam c))
then modify $ modHometeam $ rotateTeam t
else if hasJust (Gen.genteamname t) (liftM (Gen.genteamname . fst) (awayteam c))
then modify $ modAwayteam $ rotateTeam t
else if isNothing (hometeam c)
then modify $ modHometeam $ rotateTeam t
else if isNothing (awayteam c)
then modify $ modAwayteam $ rotateTeam t
else return ()
browseTeams :: TeamStructure -> ButtonHandler
browseTeams toplevel _ = do
let (_, labels) = getTSTitles toplevel
if length labels == 1
then browserButtonHandler toplevel (head labels)
else do
mutLoop (browseTeams' toplevel)
return False
browserButtonHandler :: TeamStructure -> String -> MenuBlock Bool
browserButtonHandler toplevel lbl =
case getTSChildrenByTitle toplevel lbl of
Nothing -> return False
Just (Left t) -> browseTeams t (getTSLabel t)
Just (Right t) -> clickedOnTeam t >> return False
skinMagic, shirtMagic, shortsMagic, socksMagic, shoesMagic,
hair1Magic, hair2Magic, eyesMagic :: Gen.Color
skinMagic = (197, 169, 58)
shirtMagic = (255, 0, 0)
shortsMagic = (255, 240, 0)
socksMagic = (0, 0, 255)
shoesMagic = (3, 3, 3)
hair1Magic = (0, 0, 0)
hair2Magic = (16, 16, 16)
eyesMagic = (140, 85, 14)
colorKit :: Gen.Kit -> ChangeRGB
colorKit k c
| c == shirtMagic = Gen.kitfirstcolor k
| c == shortsMagic = Gen.kitshortcolor k
| c == socksMagic = Gen.kitsockscolor k
| otherwise = c
getRandomGen :: IO (Either Int StdGen)
getRandomGen = handle (\e -> hPutStrLn stderr ("Using random generator: " ++ show (e :: IOException)) >> newStdGen >>= return . Right) $ do
fp <- getAppUserDataDirectory appname
createDirectoryIfMissing True fp
ex <- doesFileExist (fp </> "seed")
if ex
then do
contents <- liftM Str.unpack $ Str.readFile (fp </> "seed")
case safeRead contents of
Nothing -> newStdGen >>= return . Right
Just i -> return (Left i)
else newStdGen >>= return . Right
startMatch
:: Font
-> Font
-> Gen.GenTeam
-> TeamOwner
-> Gen.GenTeam
-> TeamOwner
-> String
-> MenuBlock Bool
startMatch f1 f2 ht ho at ao _ = do
ptex <- liftIO $ loadDataTexture Nothing "share/grass1.png" Nothing Nothing
let hcf = colorKit (Gen.primarykit ht)
pltexhs <- liftIO $ loadDataTexture (Just hcf) "share/player1-s.png" (Just 0) (Just 32)
pltexhn <- liftIO $ loadDataTexture (Just hcf) "share/player1-n.png" (Just 0) (Just 32)
pltexhw <- liftIO $ loadDataTexture (Just hcf) "share/player1-w.png" (Just 0) (Just 32)
pltexhe <- liftIO $ loadDataTexture (Just hcf) "share/player1-e.png" (Just 0) (Just 32)
let acf = colorKit (Gen.primarykit at)
pltexas <- liftIO $ loadDataTexture (Just acf) "share/player1-s.png" (Just 0) (Just 32)
pltexan <- liftIO $ loadDataTexture (Just acf) "share/player1-n.png" (Just 0) (Just 32)
pltexaw <- liftIO $ loadDataTexture (Just acf) "share/player1-w.png" (Just 0) (Just 32)
pltexae <- liftIO $ loadDataTexture (Just acf) "share/player1-e.png" (Just 0) (Just 32)
balltex <- liftIO $ loadDataTexture Nothing "share/ball1.png" (Just 0) (Just 8)
playershadowtex <- liftIO $ loadDataTexture Nothing "share/player1shadow.png" (Just 0) (Just 32)
ballshadowtex <- liftIO $ loadDataTexture Nothing "share/ball1shadow.png" (Just 0) (Just 8)
gl1 <- liftIO $ loadDataTexture Nothing "share/goal1.png" Nothing Nothing
gl2 <- liftIO $ loadDataTexture Nothing "share/goal2.png" Nothing Nothing
gl1shadow <- liftIO $ loadDataTexture Nothing "share/goal1shadow.png" Nothing Nothing
gl2shadow <- liftIO $ loadDataTexture Nothing "share/goal2shadow.png" Nothing Nothing
allTactics <- tactics <$> State.get
let ballimg = ImageInfo balltex (0.4, 0.4)
let playershadowimg = ImageInfo playershadowtex (2, 2)
let ballshadowimg = ImageInfo ballshadowtex (0.4, 0.4)
htac = fromMaybe (snd $ head allTactics) $ lookup (Gen.teamtactics ht) allTactics
atac = fromMaybe (snd $ head allTactics) $ lookup (Gen.teamtactics at) allTactics
rgen <- liftIO $ getRandomGen
liftIO $ playMatch
rgen
(MatchTextureSet ptex
(PlayerTextureSet pltexhs pltexhn pltexhw pltexhe)
(PlayerTextureSet pltexas pltexan pltexaw pltexae)
playershadowimg
ballimg
ballshadowimg
(2, 2)
(ImageInfo gl1 (7.32, 2.44))
(ImageInfo gl2 (7.32, 3.44))
(ImageInfo gl1shadow (9.32, 4.44))
(ImageInfo gl2shadow (9.32, 4.44)))
f1 f2 (ht, htac, ho) (at, atac, ao)
return False
continueToMatch :: MenuBlock ()
continueToMatch = do
c <- State.get
(f1, f2) <- getTwoFonts
(w, h) <- liftIO $ getWindowSize
case hometeam c of
Nothing -> return ()
Just (ht, ho) -> do
case awayteam c of
Nothing -> return ()
Just (at, ao) -> do
let quitlabel = "Back"
title = "Match"
quitbutton = Button (Left SOrange) ((10, 10), (200, 30)) quitlabel f1 (\_ -> return True)
team1buttons = map
(\(n, t) ->
Button (Left SOrange)
((20, h - 100 - n * 25), (240, 20))
t f2 (\_ -> return False))
(zip [0..] t1labels)
t1labels = map Gen.plname (Gen.teamplayers ht)
team2buttons = map
(\(n, t) ->
Button (Left SOrange)
((520, h - 100 - n * 25), (240, 20))
t f2 (\_ -> return False))
(zip [0..] t2labels)
t2labels = map Gen.plname (Gen.teamplayers at)
titlebutton = Button (Left SOrange) ((w `div` 2 - 100, h - 50), (200, 30)) title f1 (\_ -> return False)
contlabel = "Play"
contbutton = Button (Left SOrange)
((w - 210, 10), (200, 30))
contlabel f1
(startMatch f1 f2 ht ho at ao)
allbuttons = contbutton : quitbutton : titlebutton : team1buttons ++ team2buttons
genLoop allbuttons
ownerToColor :: String -> WorldContext -> SColor
ownerToColor t c =
case getOwner t c of
Nothing -> SOrange
Just AIOwner -> SRed
Just HumanOwner -> SBlue
browseTeams' :: TeamStructure -> MenuBlock [Button (MenuBlock Bool)]
browseTeams' toplevel = do
let (title, labels) = getTSTitles toplevel
c <- State.get
(f1, f2) <- getTwoFonts
(w, h) <- liftIO $ getWindowSize
let quitlabel = "Back"
quitbutton = Button (Left SOrange) ((10, 10), (200, 30)) quitlabel f1 (\_ -> return True)
teambuttons = map
(\(n, t) ->
Button (Left (ownerToColor t c))
(if length labels > 10
then ((20 + 250 * (n `mod` 3), h - 100 - (n `div` 3) * 25), (240, 20))
else ((270, h - 150 - n * 35), (240, 30)))
t (if length labels > 10 then f2 else f1) (browserButtonHandler toplevel))
(zip [0..] labels)
titlebutton = Button (Left SOrange) ((w `div` 2 - 100, h - 50), (200, 30)) title f1 (\_ -> return False)
contlabel = "Play"
mcont = if isJust (hometeam c) && isJust (awayteam c)
then Just $ Button (Left SOrange) ((w - 210, 10), (200, 30)) contlabel f1 (\_ -> continueToMatch >> return False)
else Nothing
allbuttons = case mcont of
Nothing -> quitbutton : titlebutton : teambuttons
Just cn -> cn : quitbutton : titlebutton : teambuttons
return allbuttons
splitBy :: (Ord b) => (a -> b) -> [a] -> [[a]]
splitBy f = groupBy ((==) `on` f) . sortBy (comparing f)
checkGenButtonClicks :: (MonadIO m) => [Button (m Bool)] -> [SDL.Event] -> m Bool
checkGenButtonClicks btns evts = do
btnsclicked <- liftIO $ mouseClickInAnyM [ButtonLeft] (map buttonBox btns) evts
let mlbl = liftM buttonLabel $
btnsclicked >>= \b ->
find (\bt -> b == buttonBox bt) btns
case mlbl of
Nothing -> return False
Just lbl -> do
let mbt = find (\b -> buttonLabel b == lbl) btns
case mbt of
Nothing -> return False
Just bt -> (buttonAction bt) lbl
type ButtonHandler = String -> MenuBlock Bool
mutLoop :: MenuBlock [Button (MenuBlock Bool)] -> MenuBlock ()
mutLoop f = do
liftIO $ SDL.delay 40
btns <- f
(_, tex) <- getFontAndTexture
liftIO $ drawGenScene tex btns
evts <- liftIO $ pollAllSDLEvents
back <- checkGenButtonClicks btns evts
let escpressed = isJust $ specificKeyPressed [SDLK_ESCAPE] evts
if back || escpressed
then return ()
else mutLoop f
genLoop :: [Button (MenuBlock Bool)] -> MenuBlock ()
genLoop btns = mutLoop (return btns)
main :: IO ()
main = catch run (\e -> hPutStrLn stderr $ "Exception: " ++ show (e :: IOException))
loadDataFont :: Int -> Int -> FilePath -> IO Font
loadDataFont sz pt fp =
let act = (\fn -> createTextureFont fn >>= \f -> setFontFaceSize f sz pt >> return f)
in loadDataResource fp act
loadDataResource :: FilePath -> (FilePath -> IO a) -> IO a
loadDataResource fp act = do
fn <- getDataFileName fp
exists <- doesFileExist fn
when (not exists) $ do
throwIO $ mkIOError doesNotExistErrorType "loading data resource" Nothing (Just fn)
act fn
loadDataTexture :: Maybe ChangeRGB -> FilePath -> Maybe Int -> Maybe Int -> IO TextureObject
loadDataTexture cf fp mn mx = loadDataResource fp (loadTexture cf mn mx)
getUserDataTeams :: IO [Gen.GenTeam]
getUserDataTeams = handle (\e -> hPutStrLn stderr ("Exception when loading user data teams: " ++ show (e :: IOException)) >> return []) $ do
fp <- getAppUserDataDirectory appname
let dr = fp </> "teams"
createDirectoryIfMissing True dr
readDir dr
appname = "freekick2"
getUserDataTactics :: IO [Gen.SimpleFormation]
getUserDataTactics = handle (\e -> hPutStrLn stderr ("Exception when loading user data tactics: " ++ show (e :: IOException)) >> return []) $ do
fp <- getAppUserDataDirectory appname
let dr = fp </> "tactics"
createDirectoryIfMissing True dr
readDir dr
run :: IO ()
run = do
let width, height :: (Num a) => a
width = 800
height = 600
_ <- setVideoMode width height 0 [OpenGL]
depthFunc $= Nothing
blend $= Enabled
blendFunc $= (SrcAlpha, OneMinusSrcAlpha)
clearColor $= Color4 0 0 0 1
viewport $= (Position 0 0, Size width height)
matrixMode $= Projection
loadIdentity
setCamera ((0, 0), (width, height))
matrixMode $= Modelview 0
texture Texture2D $= Enabled
tex <- loadDataTexture Nothing "share/bg.png" Nothing Nothing
f <- loadDataFont 24 48 "share/DejaVuSans.ttf"
f2 <- loadDataFont 16 48 "share/DejaVuSans.ttf"
teamdir <- getDataFileName "share/teams"
shareteamlist <- readDir teamdir
plteamlist <- getUserDataTeams
let allteams = structureTeams (shareteamlist ++ plteamlist)
tacticdir <- getDataFileName "share/tactics"
sharets <- readDir tacticdir
plts <- getUserDataTactics
let simplets = sharets ++ plts
let ts = zip (map Gen.simpleorder simplets) (map Gen.simpleFormationToGenFormation simplets)
let button1 = Button (Left SOrange) ((300, 200), (200, 30)) quitLabel f (\_ -> return True)
button2 = Button (Left SBlue) ((300, 400), (200, 30)) browseLabel f (browseTeams allteams)
browseLabel = "Friendly"
quitLabel = "Quit"
buttons = [button1, button2]
rc = RenderContext f f2 tex
evalStateT (genLoop buttons) (WorldContext rc allteams Nothing Nothing ts)
readDir :: (Binary a) => FilePath -> IO [a]
readDir fp = do
ex <- doesDirectoryExist fp
if not ex
then return []
else do
fs <- getDirectoryContents fp
tss <- forM fs $ \f -> do
isfile <- doesFileExist (fp </> f)
if isfile
then decodeFile (fp </> f)
else return []
return $ concat tss
| anttisalonen/freekick2 | src/Main.hs | gpl-3.0 | 17,364 | 0 | 28 | 4,591 | 6,303 | 3,248 | 3,055 | 389 | 5 |
{-
This file is part of HNH.
HNH is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
HNH is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with HNH. If not, see <http://www.gnu.org/licenses/>.
Copyright 2010 Francisco Ferreira
-}
module Compile where --TODO add exported methods
import Parser
import Syntax
import CommonTransforms(commonTransforms)
import OneVarLambda(oneVarLambda)
import AddIdentifiers(addIdentifiers)
import ProgToLet(progToLet)
import InferTypes(performTypeInference)
import CPS(cpsTransform)
import RemoveVarK(removeVarK)
import Closure(closureConversion)
import CodeGen(codeGen)
import InitialProgram(buildInitialProgram)
import qualified TransformMonad as T
import ErrorMonad
import Control.Monad.State
import Text.PrettyPrint.Leijen{-(Doc, Pretty, pretty)-}
compileTransform prog =
let (res, docs) = T.runTransform (commonTransforms p
>>= oneVarLambda
>>= addIdentifiers
>>= performTypeInference
>>= progToLet
>>= cpsTransform
>>= removeVarK
>>= closureConversion
>>= codeGen
>>= return)
p = buildInitialProgram prog
in
(res, ("original", (pretty p)):docs) -- adding the original to the list
runTransformations (Success p) = compileTransform p
runTransformations (Error s) = error $ show (pretty s)
checkTransformation (Success program) = program
checkTransformation (Error err) = error err
loadAndEval :: String -> Name -> Bool -> IO Doc
loadAndEval file main showSteps =
do contents <- readFile file
preludeContents <- readFile "prelude.hnh"
parsedPrelude <- return $ parseHNH "prelude.hnh" preludeContents
parsed <- return $ parseHNH file contents
(programRes, docs) <- return $
runTransformations (merge parsedPrelude parsed)
program <- return $ checkTransformation programRes
writeFile "code.c" program
doc <- return $ if showSteps then
T.renderSteps docs
else
compile program main
return doc
merge :: ErrorM Program -> ErrorM Program -> ErrorM Program
merge (Success (Program d1)) (Success (Program d2)) = Success (Program (d1++d2))
merge e@(Error msg) _ = e
merge _ e@(Error msg) = e
compile p name = pretty "Ok" -- p
prettify (t1, t2, d) = pretty t1
<+> pretty "="
<+> pretty t2
<> line <> pretty d <> line | fferreira/hnh | Compile.hs | gpl-3.0 | 3,189 | 0 | 20 | 986 | 635 | 328 | 307 | 57 | 2 |
import VM
main = putStrLn "aoeu" | MichaelBurge/vm | Main.hs | gpl-3.0 | 33 | 0 | 5 | 6 | 12 | 6 | 6 | 2 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-|
module : WebParsing.PrerequisiteParsing
Description : converts a T.Text representation of prerequisites into prereq format
Stability : experimental
Currently parses prerequisites for Arts & Science only. Attempts to preserve only the most
basic internal logic of the prerequisite representation found on course calendar: each element
is either: 1) a 1-element list containig a course name
2) an n-element list containing a list of course names.
-}
module WebParsing.PrerequisiteParsing (parsePrerequisites) where
import Text.Regex.Posix
import qualified Data.Text as T
{- Signatures:
-}
-- | attempts to match characters used to delimit prerequisite expressions
-- returns (before, delim, after). or (input, "","") if no match occurs
matchDelim :: String -> (String, String, String)
matchDelim prereqs =
let pat = "[;,]" :: String
in prereqs =~ pat
-- | returns true if the string begins inside a parenthesized expression.
-- e.g isntDelim "CSC458)" == True
-- isntDelim "(STA247, STA248)" == False
isntDelim :: String -> Bool
isntDelim rest =
let pat = "^.*\\)" :: String
in rest =~ pat
-- |Splits a PrereqString by delimeters ';'' ','.
toPreExprs :: String -> String -> [String]
toPreExprs str expr =
let (before, delim, after) = matchDelim str
in case (before, delim, after) of
("","","") -> [] --if (expr == "") then [] else [expr]
(before, "", "") -> [before++expr]
(before, ",", after) -> if (isntDelim after)
then toPreExprs after (expr ++ before)
else (expr++before):(toPreExprs after "")
(before, ";", after) -> (expr++before):(toPreExprs after "")
-- | attempts to match a course in given string. returns (before, course, after)
-- if no match occurs (input, "", "")
matchCourse :: String -> (String, String, String)
matchCourse prereqs =
let pat = "[A-Z]{3}[0-9]{3}[HY][0-9]" :: String
in prereqs =~ pat
-- | converts a string representing a prerequisite expression into a prerequisite
-- expression. Extracts all course names found within the string, and returns them
-- in a string.
toPrereq :: String -> T.Text
toPrereq expr =
let (before, course, after) = matchCourse expr
in case (before, course, after) of
(_, "", "") -> ""
--guaranteed match
(_, course, "") -> T.pack course
(_, course, after) -> T.concat [(T.pack course), " ", (toPrereq after)]
-- | converts a text representation of Course prerequisites into type of prereqs field
-- in course record.
parsePrerequisites :: Maybe T.Text -> Maybe T.Text
parsePrerequisites Nothing = Nothing
parsePrerequisites (Just prereqStr) =
Just $ T.strip $ T.intercalate "," (map toPrereq (toPreExprs (T.unpack prereqStr) ""))
| cchens/courseography | hs/WebParsing/PrerequisiteParsing.hs | gpl-3.0 | 2,792 | 0 | 14 | 569 | 578 | 324 | 254 | 37 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.GlobalNetworkEndpointGroups.DetachNetworkEndpoints
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Detach the network endpoint from the specified network endpoint group.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.globalNetworkEndpointGroups.detachNetworkEndpoints@.
module Network.Google.Resource.Compute.GlobalNetworkEndpointGroups.DetachNetworkEndpoints
(
-- * REST Resource
GlobalNetworkEndpointGroupsDetachNetworkEndpointsResource
-- * Creating a Request
, globalNetworkEndpointGroupsDetachNetworkEndpoints
, GlobalNetworkEndpointGroupsDetachNetworkEndpoints
-- * Request Lenses
, gnegdneRequestId
, gnegdneProject
, gnegdnePayload
, gnegdneNetworkEndpointGroup
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.globalNetworkEndpointGroups.detachNetworkEndpoints@ method which the
-- 'GlobalNetworkEndpointGroupsDetachNetworkEndpoints' request conforms to.
type GlobalNetworkEndpointGroupsDetachNetworkEndpointsResource
=
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"networkEndpointGroups" :>
Capture "networkEndpointGroup" Text :>
"detachNetworkEndpoints" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
GlobalNetworkEndpointGroupsDetachEndpointsRequest
:> Post '[JSON] Operation
-- | Detach the network endpoint from the specified network endpoint group.
--
-- /See:/ 'globalNetworkEndpointGroupsDetachNetworkEndpoints' smart constructor.
data GlobalNetworkEndpointGroupsDetachNetworkEndpoints =
GlobalNetworkEndpointGroupsDetachNetworkEndpoints'
{ _gnegdneRequestId :: !(Maybe Text)
, _gnegdneProject :: !Text
, _gnegdnePayload :: !GlobalNetworkEndpointGroupsDetachEndpointsRequest
, _gnegdneNetworkEndpointGroup :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GlobalNetworkEndpointGroupsDetachNetworkEndpoints' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gnegdneRequestId'
--
-- * 'gnegdneProject'
--
-- * 'gnegdnePayload'
--
-- * 'gnegdneNetworkEndpointGroup'
globalNetworkEndpointGroupsDetachNetworkEndpoints
:: Text -- ^ 'gnegdneProject'
-> GlobalNetworkEndpointGroupsDetachEndpointsRequest -- ^ 'gnegdnePayload'
-> Text -- ^ 'gnegdneNetworkEndpointGroup'
-> GlobalNetworkEndpointGroupsDetachNetworkEndpoints
globalNetworkEndpointGroupsDetachNetworkEndpoints pGnegdneProject_ pGnegdnePayload_ pGnegdneNetworkEndpointGroup_ =
GlobalNetworkEndpointGroupsDetachNetworkEndpoints'
{ _gnegdneRequestId = Nothing
, _gnegdneProject = pGnegdneProject_
, _gnegdnePayload = pGnegdnePayload_
, _gnegdneNetworkEndpointGroup = pGnegdneNetworkEndpointGroup_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
gnegdneRequestId :: Lens' GlobalNetworkEndpointGroupsDetachNetworkEndpoints (Maybe Text)
gnegdneRequestId
= lens _gnegdneRequestId
(\ s a -> s{_gnegdneRequestId = a})
-- | Project ID for this request.
gnegdneProject :: Lens' GlobalNetworkEndpointGroupsDetachNetworkEndpoints Text
gnegdneProject
= lens _gnegdneProject
(\ s a -> s{_gnegdneProject = a})
-- | Multipart request metadata.
gnegdnePayload :: Lens' GlobalNetworkEndpointGroupsDetachNetworkEndpoints GlobalNetworkEndpointGroupsDetachEndpointsRequest
gnegdnePayload
= lens _gnegdnePayload
(\ s a -> s{_gnegdnePayload = a})
-- | The name of the network endpoint group where you are removing network
-- endpoints. It should comply with RFC1035.
gnegdneNetworkEndpointGroup :: Lens' GlobalNetworkEndpointGroupsDetachNetworkEndpoints Text
gnegdneNetworkEndpointGroup
= lens _gnegdneNetworkEndpointGroup
(\ s a -> s{_gnegdneNetworkEndpointGroup = a})
instance GoogleRequest
GlobalNetworkEndpointGroupsDetachNetworkEndpoints
where
type Rs
GlobalNetworkEndpointGroupsDetachNetworkEndpoints
= Operation
type Scopes
GlobalNetworkEndpointGroupsDetachNetworkEndpoints
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient
GlobalNetworkEndpointGroupsDetachNetworkEndpoints'{..}
= go _gnegdneProject _gnegdneNetworkEndpointGroup
_gnegdneRequestId
(Just AltJSON)
_gnegdnePayload
computeService
where go
= buildClient
(Proxy ::
Proxy
GlobalNetworkEndpointGroupsDetachNetworkEndpointsResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/GlobalNetworkEndpointGroups/DetachNetworkEndpoints.hs | mpl-2.0 | 6,386 | 0 | 18 | 1,345 | 562 | 336 | 226 | 101 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.SafeBrowsing.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.SafeBrowsing.Types.Product where
import Network.Google.Prelude
import Network.Google.SafeBrowsing.Types.Sum
-- | The metadata associated with a specific threat entry. The client is
-- expected to know the metadata key\/value pairs associated with each
-- threat type.
--
-- /See:/ 'threatEntryMetadata' smart constructor.
newtype ThreatEntryMetadata = ThreatEntryMetadata'
{ _temEntries :: Maybe [MetadataEntry]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ThreatEntryMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'temEntries'
threatEntryMetadata
:: ThreatEntryMetadata
threatEntryMetadata =
ThreatEntryMetadata'
{ _temEntries = Nothing
}
-- | The metadata entries.
temEntries :: Lens' ThreatEntryMetadata [MetadataEntry]
temEntries
= lens _temEntries (\ s a -> s{_temEntries = a}) .
_Default
. _Coerce
instance FromJSON ThreatEntryMetadata where
parseJSON
= withObject "ThreatEntryMetadata"
(\ o ->
ThreatEntryMetadata' <$>
(o .:? "entries" .!= mempty))
instance ToJSON ThreatEntryMetadata where
toJSON ThreatEntryMetadata'{..}
= object (catMaybes [("entries" .=) <$> _temEntries])
-- | The expected state of a client\'s local database.
--
-- /See:/ 'checksum' smart constructor.
newtype Checksum = Checksum'
{ _cSha256 :: Maybe Bytes
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Checksum' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cSha256'
checksum
:: Checksum
checksum =
Checksum'
{ _cSha256 = Nothing
}
-- | The SHA256 hash of the client state; that is, of the sorted list of all
-- hashes present in the database.
cSha256 :: Lens' Checksum (Maybe ByteString)
cSha256
= lens _cSha256 (\ s a -> s{_cSha256 = a}) .
mapping _Bytes
instance FromJSON Checksum where
parseJSON
= withObject "Checksum"
(\ o -> Checksum' <$> (o .:? "sha256"))
instance ToJSON Checksum where
toJSON Checksum'{..}
= object (catMaybes [("sha256" .=) <$> _cSha256])
--
-- /See:/ 'findThreatMatchesResponse' smart constructor.
newtype FindThreatMatchesResponse = FindThreatMatchesResponse'
{ _ftmrMatches :: Maybe [ThreatMatch]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FindThreatMatchesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ftmrMatches'
findThreatMatchesResponse
:: FindThreatMatchesResponse
findThreatMatchesResponse =
FindThreatMatchesResponse'
{ _ftmrMatches = Nothing
}
-- | The threat list matches.
ftmrMatches :: Lens' FindThreatMatchesResponse [ThreatMatch]
ftmrMatches
= lens _ftmrMatches (\ s a -> s{_ftmrMatches = a}) .
_Default
. _Coerce
instance FromJSON FindThreatMatchesResponse where
parseJSON
= withObject "FindThreatMatchesResponse"
(\ o ->
FindThreatMatchesResponse' <$>
(o .:? "matches" .!= mempty))
instance ToJSON FindThreatMatchesResponse where
toJSON FindThreatMatchesResponse'{..}
= object
(catMaybes [("matches" .=) <$> _ftmrMatches])
-- | The information regarding one or more threats that a client submits when
-- checking for matches in threat lists.
--
-- /See:/ 'threatInfo' smart constructor.
data ThreatInfo = ThreatInfo'
{ _tiThreatEntries :: !(Maybe [ThreatEntry])
, _tiThreatTypes :: !(Maybe [Text])
, _tiPlatformTypes :: !(Maybe [Text])
, _tiThreatEntryTypes :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ThreatInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tiThreatEntries'
--
-- * 'tiThreatTypes'
--
-- * 'tiPlatformTypes'
--
-- * 'tiThreatEntryTypes'
threatInfo
:: ThreatInfo
threatInfo =
ThreatInfo'
{ _tiThreatEntries = Nothing
, _tiThreatTypes = Nothing
, _tiPlatformTypes = Nothing
, _tiThreatEntryTypes = Nothing
}
-- | The threat entries to be checked.
tiThreatEntries :: Lens' ThreatInfo [ThreatEntry]
tiThreatEntries
= lens _tiThreatEntries
(\ s a -> s{_tiThreatEntries = a})
. _Default
. _Coerce
-- | The threat types to be checked.
tiThreatTypes :: Lens' ThreatInfo [Text]
tiThreatTypes
= lens _tiThreatTypes
(\ s a -> s{_tiThreatTypes = a})
. _Default
. _Coerce
-- | The platform types to be checked.
tiPlatformTypes :: Lens' ThreatInfo [Text]
tiPlatformTypes
= lens _tiPlatformTypes
(\ s a -> s{_tiPlatformTypes = a})
. _Default
. _Coerce
-- | The entry types to be checked.
tiThreatEntryTypes :: Lens' ThreatInfo [Text]
tiThreatEntryTypes
= lens _tiThreatEntryTypes
(\ s a -> s{_tiThreatEntryTypes = a})
. _Default
. _Coerce
instance FromJSON ThreatInfo where
parseJSON
= withObject "ThreatInfo"
(\ o ->
ThreatInfo' <$>
(o .:? "threatEntries" .!= mempty) <*>
(o .:? "threatTypes" .!= mempty)
<*> (o .:? "platformTypes" .!= mempty)
<*> (o .:? "threatEntryTypes" .!= mempty))
instance ToJSON ThreatInfo where
toJSON ThreatInfo'{..}
= object
(catMaybes
[("threatEntries" .=) <$> _tiThreatEntries,
("threatTypes" .=) <$> _tiThreatTypes,
("platformTypes" .=) <$> _tiPlatformTypes,
("threatEntryTypes" .=) <$> _tiThreatEntryTypes])
-- | Describes a Safe Browsing API update request. Clients can request
-- updates for multiple lists in a single request. NOTE: Field index 2 is
-- unused. NEXT: 4
--
-- /See:/ 'fetchThreatListUpdatesRequest' smart constructor.
data FetchThreatListUpdatesRequest = FetchThreatListUpdatesRequest'
{ _ftlurListUpdateRequests :: !(Maybe [ListUpdateRequest])
, _ftlurClient :: !(Maybe ClientInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FetchThreatListUpdatesRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ftlurListUpdateRequests'
--
-- * 'ftlurClient'
fetchThreatListUpdatesRequest
:: FetchThreatListUpdatesRequest
fetchThreatListUpdatesRequest =
FetchThreatListUpdatesRequest'
{ _ftlurListUpdateRequests = Nothing
, _ftlurClient = Nothing
}
-- | The requested threat list updates.
ftlurListUpdateRequests :: Lens' FetchThreatListUpdatesRequest [ListUpdateRequest]
ftlurListUpdateRequests
= lens _ftlurListUpdateRequests
(\ s a -> s{_ftlurListUpdateRequests = a})
. _Default
. _Coerce
-- | The client metadata.
ftlurClient :: Lens' FetchThreatListUpdatesRequest (Maybe ClientInfo)
ftlurClient
= lens _ftlurClient (\ s a -> s{_ftlurClient = a})
instance FromJSON FetchThreatListUpdatesRequest where
parseJSON
= withObject "FetchThreatListUpdatesRequest"
(\ o ->
FetchThreatListUpdatesRequest' <$>
(o .:? "listUpdateRequests" .!= mempty) <*>
(o .:? "client"))
instance ToJSON FetchThreatListUpdatesRequest where
toJSON FetchThreatListUpdatesRequest'{..}
= object
(catMaybes
[("listUpdateRequests" .=) <$>
_ftlurListUpdateRequests,
("client" .=) <$> _ftlurClient])
-- | Request to return full hashes matched by the provided hash prefixes.
--
-- /See:/ 'findFullHashesRequest' smart constructor.
data FindFullHashesRequest = FindFullHashesRequest'
{ _ffhrThreatInfo :: !(Maybe ThreatInfo)
, _ffhrClientStates :: !(Maybe [Bytes])
, _ffhrClient :: !(Maybe ClientInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FindFullHashesRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ffhrThreatInfo'
--
-- * 'ffhrClientStates'
--
-- * 'ffhrClient'
findFullHashesRequest
:: FindFullHashesRequest
findFullHashesRequest =
FindFullHashesRequest'
{ _ffhrThreatInfo = Nothing
, _ffhrClientStates = Nothing
, _ffhrClient = Nothing
}
-- | The lists and hashes to be checked.
ffhrThreatInfo :: Lens' FindFullHashesRequest (Maybe ThreatInfo)
ffhrThreatInfo
= lens _ffhrThreatInfo
(\ s a -> s{_ffhrThreatInfo = a})
-- | The current client states for each of the client\'s local threat lists.
ffhrClientStates :: Lens' FindFullHashesRequest [ByteString]
ffhrClientStates
= lens _ffhrClientStates
(\ s a -> s{_ffhrClientStates = a})
. _Default
. _Coerce
-- | The client metadata.
ffhrClient :: Lens' FindFullHashesRequest (Maybe ClientInfo)
ffhrClient
= lens _ffhrClient (\ s a -> s{_ffhrClient = a})
instance FromJSON FindFullHashesRequest where
parseJSON
= withObject "FindFullHashesRequest"
(\ o ->
FindFullHashesRequest' <$>
(o .:? "threatInfo") <*>
(o .:? "clientStates" .!= mempty)
<*> (o .:? "client"))
instance ToJSON FindFullHashesRequest where
toJSON FindFullHashesRequest'{..}
= object
(catMaybes
[("threatInfo" .=) <$> _ffhrThreatInfo,
("clientStates" .=) <$> _ffhrClientStates,
("client" .=) <$> _ffhrClient])
-- | The constraints for this update.
--
-- /See:/ 'constraints' smart constructor.
data Constraints = Constraints'
{ _cMaxUpdateEntries :: !(Maybe (Textual Int32))
, _cRegion :: !(Maybe Text)
, _cSupportedCompressions :: !(Maybe [Text])
, _cMaxDatabaseEntries :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'Constraints' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cMaxUpdateEntries'
--
-- * 'cRegion'
--
-- * 'cSupportedCompressions'
--
-- * 'cMaxDatabaseEntries'
constraints
:: Constraints
constraints =
Constraints'
{ _cMaxUpdateEntries = Nothing
, _cRegion = Nothing
, _cSupportedCompressions = Nothing
, _cMaxDatabaseEntries = Nothing
}
-- | The maximum size in number of entries. The update will not contain more
-- entries than this value. This should be a power of 2 between 2**10 and
-- 2**20. If zero, no update size limit is set.
cMaxUpdateEntries :: Lens' Constraints (Maybe Int32)
cMaxUpdateEntries
= lens _cMaxUpdateEntries
(\ s a -> s{_cMaxUpdateEntries = a})
. mapping _Coerce
-- | Requests the list for a specific geographic location. If not set the
-- server may pick that value based on the user\'s IP address. Expects ISO
-- 3166-1 alpha-2 format.
cRegion :: Lens' Constraints (Maybe Text)
cRegion = lens _cRegion (\ s a -> s{_cRegion = a})
-- | The compression types supported by the client.
cSupportedCompressions :: Lens' Constraints [Text]
cSupportedCompressions
= lens _cSupportedCompressions
(\ s a -> s{_cSupportedCompressions = a})
. _Default
. _Coerce
-- | Sets the maximum number of entries that the client is willing to have in
-- the local database. This should be a power of 2 between 2**10 and 2**20.
-- If zero, no database size limit is set.
cMaxDatabaseEntries :: Lens' Constraints (Maybe Int32)
cMaxDatabaseEntries
= lens _cMaxDatabaseEntries
(\ s a -> s{_cMaxDatabaseEntries = a})
. mapping _Coerce
instance FromJSON Constraints where
parseJSON
= withObject "Constraints"
(\ o ->
Constraints' <$>
(o .:? "maxUpdateEntries") <*> (o .:? "region") <*>
(o .:? "supportedCompressions" .!= mempty)
<*> (o .:? "maxDatabaseEntries"))
instance ToJSON Constraints where
toJSON Constraints'{..}
= object
(catMaybes
[("maxUpdateEntries" .=) <$> _cMaxUpdateEntries,
("region" .=) <$> _cRegion,
("supportedCompressions" .=) <$>
_cSupportedCompressions,
("maxDatabaseEntries" .=) <$> _cMaxDatabaseEntries])
-- | The Rice-Golomb encoded data. Used for sending compressed 4-byte hashes
-- or compressed removal indices.
--
-- /See:/ 'riceDeltaEncoding' smart constructor.
data RiceDeltaEncoding = RiceDeltaEncoding'
{ _rdeFirstValue :: !(Maybe (Textual Int64))
, _rdeRiceParameter :: !(Maybe (Textual Int32))
, _rdeNumEntries :: !(Maybe (Textual Int32))
, _rdeEncodedData :: !(Maybe Bytes)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RiceDeltaEncoding' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rdeFirstValue'
--
-- * 'rdeRiceParameter'
--
-- * 'rdeNumEntries'
--
-- * 'rdeEncodedData'
riceDeltaEncoding
:: RiceDeltaEncoding
riceDeltaEncoding =
RiceDeltaEncoding'
{ _rdeFirstValue = Nothing
, _rdeRiceParameter = Nothing
, _rdeNumEntries = Nothing
, _rdeEncodedData = Nothing
}
-- | The offset of the first entry in the encoded data, or, if only a single
-- integer was encoded, that single integer\'s value.
rdeFirstValue :: Lens' RiceDeltaEncoding (Maybe Int64)
rdeFirstValue
= lens _rdeFirstValue
(\ s a -> s{_rdeFirstValue = a})
. mapping _Coerce
-- | The Golomb-Rice parameter, which is a number between 2 and 28. This
-- field is missing (that is, zero) if \`num_entries\` is zero.
rdeRiceParameter :: Lens' RiceDeltaEncoding (Maybe Int32)
rdeRiceParameter
= lens _rdeRiceParameter
(\ s a -> s{_rdeRiceParameter = a})
. mapping _Coerce
-- | The number of entries that are delta encoded in the encoded data. If
-- only a single integer was encoded, this will be zero and the single
-- value will be stored in \`first_value\`.
rdeNumEntries :: Lens' RiceDeltaEncoding (Maybe Int32)
rdeNumEntries
= lens _rdeNumEntries
(\ s a -> s{_rdeNumEntries = a})
. mapping _Coerce
-- | The encoded deltas that are encoded using the Golomb-Rice coder.
rdeEncodedData :: Lens' RiceDeltaEncoding (Maybe ByteString)
rdeEncodedData
= lens _rdeEncodedData
(\ s a -> s{_rdeEncodedData = a})
. mapping _Bytes
instance FromJSON RiceDeltaEncoding where
parseJSON
= withObject "RiceDeltaEncoding"
(\ o ->
RiceDeltaEncoding' <$>
(o .:? "firstValue") <*> (o .:? "riceParameter") <*>
(o .:? "numEntries")
<*> (o .:? "encodedData"))
instance ToJSON RiceDeltaEncoding where
toJSON RiceDeltaEncoding'{..}
= object
(catMaybes
[("firstValue" .=) <$> _rdeFirstValue,
("riceParameter" .=) <$> _rdeRiceParameter,
("numEntries" .=) <$> _rdeNumEntries,
("encodedData" .=) <$> _rdeEncodedData])
--
-- /See:/ 'listThreatListsResponse' smart constructor.
newtype ListThreatListsResponse = ListThreatListsResponse'
{ _ltlrThreatLists :: Maybe [ThreatListDescriptor]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListThreatListsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ltlrThreatLists'
listThreatListsResponse
:: ListThreatListsResponse
listThreatListsResponse =
ListThreatListsResponse'
{ _ltlrThreatLists = Nothing
}
-- | The lists available for download by the client.
ltlrThreatLists :: Lens' ListThreatListsResponse [ThreatListDescriptor]
ltlrThreatLists
= lens _ltlrThreatLists
(\ s a -> s{_ltlrThreatLists = a})
. _Default
. _Coerce
instance FromJSON ListThreatListsResponse where
parseJSON
= withObject "ListThreatListsResponse"
(\ o ->
ListThreatListsResponse' <$>
(o .:? "threatLists" .!= mempty))
instance ToJSON ListThreatListsResponse where
toJSON ListThreatListsResponse'{..}
= object
(catMaybes [("threatLists" .=) <$> _ltlrThreatLists])
-- | Describes an individual threat list. A list is defined by three
-- parameters: the type of threat posed, the type of platform targeted by
-- the threat, and the type of entries in the list.
--
-- /See:/ 'threatListDescriptor' smart constructor.
data ThreatListDescriptor = ThreatListDescriptor'
{ _tldThreatEntryType :: !(Maybe Text)
, _tldThreatType :: !(Maybe Text)
, _tldPlatformType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ThreatListDescriptor' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tldThreatEntryType'
--
-- * 'tldThreatType'
--
-- * 'tldPlatformType'
threatListDescriptor
:: ThreatListDescriptor
threatListDescriptor =
ThreatListDescriptor'
{ _tldThreatEntryType = Nothing
, _tldThreatType = Nothing
, _tldPlatformType = Nothing
}
-- | The entry types contained in the list.
tldThreatEntryType :: Lens' ThreatListDescriptor (Maybe Text)
tldThreatEntryType
= lens _tldThreatEntryType
(\ s a -> s{_tldThreatEntryType = a})
-- | The threat type posed by the list\'s entries.
tldThreatType :: Lens' ThreatListDescriptor (Maybe Text)
tldThreatType
= lens _tldThreatType
(\ s a -> s{_tldThreatType = a})
-- | The platform type targeted by the list\'s entries.
tldPlatformType :: Lens' ThreatListDescriptor (Maybe Text)
tldPlatformType
= lens _tldPlatformType
(\ s a -> s{_tldPlatformType = a})
instance FromJSON ThreatListDescriptor where
parseJSON
= withObject "ThreatListDescriptor"
(\ o ->
ThreatListDescriptor' <$>
(o .:? "threatEntryType") <*> (o .:? "threatType")
<*> (o .:? "platformType"))
instance ToJSON ThreatListDescriptor where
toJSON ThreatListDescriptor'{..}
= object
(catMaybes
[("threatEntryType" .=) <$> _tldThreatEntryType,
("threatType" .=) <$> _tldThreatType,
("platformType" .=) <$> _tldPlatformType])
-- | The client metadata associated with Safe Browsing API requests.
--
-- /See:/ 'clientInfo' smart constructor.
data ClientInfo = ClientInfo'
{ _ciClientId :: !(Maybe Text)
, _ciClientVersion :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ClientInfo' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ciClientId'
--
-- * 'ciClientVersion'
clientInfo
:: ClientInfo
clientInfo =
ClientInfo'
{ _ciClientId = Nothing
, _ciClientVersion = Nothing
}
-- | A client ID that (hopefully) uniquely identifies the client
-- implementation of the Safe Browsing API.
ciClientId :: Lens' ClientInfo (Maybe Text)
ciClientId
= lens _ciClientId (\ s a -> s{_ciClientId = a})
-- | The version of the client implementation.
ciClientVersion :: Lens' ClientInfo (Maybe Text)
ciClientVersion
= lens _ciClientVersion
(\ s a -> s{_ciClientVersion = a})
instance FromJSON ClientInfo where
parseJSON
= withObject "ClientInfo"
(\ o ->
ClientInfo' <$>
(o .:? "clientId") <*> (o .:? "clientVersion"))
instance ToJSON ClientInfo where
toJSON ClientInfo'{..}
= object
(catMaybes
[("clientId" .=) <$> _ciClientId,
("clientVersion" .=) <$> _ciClientVersion])
-- | Request to check entries against lists.
--
-- /See:/ 'findThreatMatchesRequest' smart constructor.
data FindThreatMatchesRequest = FindThreatMatchesRequest'
{ _ftmrThreatInfo :: !(Maybe ThreatInfo)
, _ftmrClient :: !(Maybe ClientInfo)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FindThreatMatchesRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ftmrThreatInfo'
--
-- * 'ftmrClient'
findThreatMatchesRequest
:: FindThreatMatchesRequest
findThreatMatchesRequest =
FindThreatMatchesRequest'
{ _ftmrThreatInfo = Nothing
, _ftmrClient = Nothing
}
-- | The lists and entries to be checked for matches.
ftmrThreatInfo :: Lens' FindThreatMatchesRequest (Maybe ThreatInfo)
ftmrThreatInfo
= lens _ftmrThreatInfo
(\ s a -> s{_ftmrThreatInfo = a})
-- | The client metadata.
ftmrClient :: Lens' FindThreatMatchesRequest (Maybe ClientInfo)
ftmrClient
= lens _ftmrClient (\ s a -> s{_ftmrClient = a})
instance FromJSON FindThreatMatchesRequest where
parseJSON
= withObject "FindThreatMatchesRequest"
(\ o ->
FindThreatMatchesRequest' <$>
(o .:? "threatInfo") <*> (o .:? "client"))
instance ToJSON FindThreatMatchesRequest where
toJSON FindThreatMatchesRequest'{..}
= object
(catMaybes
[("threatInfo" .=) <$> _ftmrThreatInfo,
("client" .=) <$> _ftmrClient])
-- | A single list update request.
--
-- /See:/ 'listUpdateRequest' smart constructor.
data ListUpdateRequest = ListUpdateRequest'
{ _lurState :: !(Maybe Bytes)
, _lurThreatEntryType :: !(Maybe Text)
, _lurConstraints :: !(Maybe Constraints)
, _lurThreatType :: !(Maybe Text)
, _lurPlatformType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListUpdateRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lurState'
--
-- * 'lurThreatEntryType'
--
-- * 'lurConstraints'
--
-- * 'lurThreatType'
--
-- * 'lurPlatformType'
listUpdateRequest
:: ListUpdateRequest
listUpdateRequest =
ListUpdateRequest'
{ _lurState = Nothing
, _lurThreatEntryType = Nothing
, _lurConstraints = Nothing
, _lurThreatType = Nothing
, _lurPlatformType = Nothing
}
-- | The current state of the client for the requested list (the encrypted
-- client state that was received from the last successful list update).
lurState :: Lens' ListUpdateRequest (Maybe ByteString)
lurState
= lens _lurState (\ s a -> s{_lurState = a}) .
mapping _Bytes
-- | The types of entries present in the list.
lurThreatEntryType :: Lens' ListUpdateRequest (Maybe Text)
lurThreatEntryType
= lens _lurThreatEntryType
(\ s a -> s{_lurThreatEntryType = a})
-- | The constraints associated with this request.
lurConstraints :: Lens' ListUpdateRequest (Maybe Constraints)
lurConstraints
= lens _lurConstraints
(\ s a -> s{_lurConstraints = a})
-- | The type of threat posed by entries present in the list.
lurThreatType :: Lens' ListUpdateRequest (Maybe Text)
lurThreatType
= lens _lurThreatType
(\ s a -> s{_lurThreatType = a})
-- | The type of platform at risk by entries present in the list.
lurPlatformType :: Lens' ListUpdateRequest (Maybe Text)
lurPlatformType
= lens _lurPlatformType
(\ s a -> s{_lurPlatformType = a})
instance FromJSON ListUpdateRequest where
parseJSON
= withObject "ListUpdateRequest"
(\ o ->
ListUpdateRequest' <$>
(o .:? "state") <*> (o .:? "threatEntryType") <*>
(o .:? "constraints")
<*> (o .:? "threatType")
<*> (o .:? "platformType"))
instance ToJSON ListUpdateRequest where
toJSON ListUpdateRequest'{..}
= object
(catMaybes
[("state" .=) <$> _lurState,
("threatEntryType" .=) <$> _lurThreatEntryType,
("constraints" .=) <$> _lurConstraints,
("threatType" .=) <$> _lurThreatType,
("platformType" .=) <$> _lurPlatformType])
-- | An individual threat; for example, a malicious URL or its hash
-- representation. Only one of these fields should be set.
--
-- /See:/ 'threatEntry' smart constructor.
data ThreatEntry = ThreatEntry'
{ _teHash :: !(Maybe Bytes)
, _teURL :: !(Maybe Text)
, _teDigest :: !(Maybe Bytes)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ThreatEntry' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'teHash'
--
-- * 'teURL'
--
-- * 'teDigest'
threatEntry
:: ThreatEntry
threatEntry =
ThreatEntry'
{ _teHash = Nothing
, _teURL = Nothing
, _teDigest = Nothing
}
-- | A hash prefix, consisting of the most significant 4-32 bytes of a SHA256
-- hash. This field is in binary format.
teHash :: Lens' ThreatEntry (Maybe ByteString)
teHash
= lens _teHash (\ s a -> s{_teHash = a}) .
mapping _Bytes
-- | A URL.
teURL :: Lens' ThreatEntry (Maybe Text)
teURL = lens _teURL (\ s a -> s{_teURL = a})
-- | The digest of an executable in SHA256 format. The API supports both
-- binary and hex digests.
teDigest :: Lens' ThreatEntry (Maybe ByteString)
teDigest
= lens _teDigest (\ s a -> s{_teDigest = a}) .
mapping _Bytes
instance FromJSON ThreatEntry where
parseJSON
= withObject "ThreatEntry"
(\ o ->
ThreatEntry' <$>
(o .:? "hash") <*> (o .:? "url") <*>
(o .:? "digest"))
instance ToJSON ThreatEntry where
toJSON ThreatEntry'{..}
= object
(catMaybes
[("hash" .=) <$> _teHash, ("url" .=) <$> _teURL,
("digest" .=) <$> _teDigest])
-- | A match when checking a threat entry in the Safe Browsing threat lists.
--
-- /See:/ 'threatMatch' smart constructor.
data ThreatMatch = ThreatMatch'
{ _tmThreatEntryMetadata :: !(Maybe ThreatEntryMetadata)
, _tmThreatEntryType :: !(Maybe Text)
, _tmThreatType :: !(Maybe Text)
, _tmPlatformType :: !(Maybe Text)
, _tmCacheDuration :: !(Maybe Text)
, _tmThreat :: !(Maybe ThreatEntry)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ThreatMatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tmThreatEntryMetadata'
--
-- * 'tmThreatEntryType'
--
-- * 'tmThreatType'
--
-- * 'tmPlatformType'
--
-- * 'tmCacheDuration'
--
-- * 'tmThreat'
threatMatch
:: ThreatMatch
threatMatch =
ThreatMatch'
{ _tmThreatEntryMetadata = Nothing
, _tmThreatEntryType = Nothing
, _tmThreatType = Nothing
, _tmPlatformType = Nothing
, _tmCacheDuration = Nothing
, _tmThreat = Nothing
}
-- | Optional metadata associated with this threat.
tmThreatEntryMetadata :: Lens' ThreatMatch (Maybe ThreatEntryMetadata)
tmThreatEntryMetadata
= lens _tmThreatEntryMetadata
(\ s a -> s{_tmThreatEntryMetadata = a})
-- | The threat entry type matching this threat.
tmThreatEntryType :: Lens' ThreatMatch (Maybe Text)
tmThreatEntryType
= lens _tmThreatEntryType
(\ s a -> s{_tmThreatEntryType = a})
-- | The threat type matching this threat.
tmThreatType :: Lens' ThreatMatch (Maybe Text)
tmThreatType
= lens _tmThreatType (\ s a -> s{_tmThreatType = a})
-- | The platform type matching this threat.
tmPlatformType :: Lens' ThreatMatch (Maybe Text)
tmPlatformType
= lens _tmPlatformType
(\ s a -> s{_tmPlatformType = a})
-- | The cache lifetime for the returned match. Clients must not cache this
-- response for more than this duration to avoid false positives.
tmCacheDuration :: Lens' ThreatMatch (Maybe Text)
tmCacheDuration
= lens _tmCacheDuration
(\ s a -> s{_tmCacheDuration = a})
-- | The threat matching this threat.
tmThreat :: Lens' ThreatMatch (Maybe ThreatEntry)
tmThreat = lens _tmThreat (\ s a -> s{_tmThreat = a})
instance FromJSON ThreatMatch where
parseJSON
= withObject "ThreatMatch"
(\ o ->
ThreatMatch' <$>
(o .:? "threatEntryMetadata") <*>
(o .:? "threatEntryType")
<*> (o .:? "threatType")
<*> (o .:? "platformType")
<*> (o .:? "cacheDuration")
<*> (o .:? "threat"))
instance ToJSON ThreatMatch where
toJSON ThreatMatch'{..}
= object
(catMaybes
[("threatEntryMetadata" .=) <$>
_tmThreatEntryMetadata,
("threatEntryType" .=) <$> _tmThreatEntryType,
("threatType" .=) <$> _tmThreatType,
("platformType" .=) <$> _tmPlatformType,
("cacheDuration" .=) <$> _tmCacheDuration,
("threat" .=) <$> _tmThreat])
-- | The uncompressed threat entries in hash format of a particular prefix
-- length. Hashes can be anywhere from 4 to 32 bytes in size. A large
-- majority are 4 bytes, but some hashes are lengthened if they collide
-- with the hash of a popular URL. Used for sending ThreatEntrySet to
-- clients that do not support compression, or when sending non-4-byte
-- hashes to clients that do support compression.
--
-- /See:/ 'rawHashes' smart constructor.
data RawHashes = RawHashes'
{ _rhPrefixSize :: !(Maybe (Textual Int32))
, _rhRawHashes :: !(Maybe Bytes)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RawHashes' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rhPrefixSize'
--
-- * 'rhRawHashes'
rawHashes
:: RawHashes
rawHashes =
RawHashes'
{ _rhPrefixSize = Nothing
, _rhRawHashes = Nothing
}
-- | The number of bytes for each prefix encoded below. This field can be
-- anywhere from 4 (shortest prefix) to 32 (full SHA256 hash).
rhPrefixSize :: Lens' RawHashes (Maybe Int32)
rhPrefixSize
= lens _rhPrefixSize (\ s a -> s{_rhPrefixSize = a})
. mapping _Coerce
-- | The hashes, all concatenated into one long string. Each hash has a
-- prefix size of |prefix_size| above. Hashes are sorted in lexicographic
-- order.
rhRawHashes :: Lens' RawHashes (Maybe ByteString)
rhRawHashes
= lens _rhRawHashes (\ s a -> s{_rhRawHashes = a}) .
mapping _Bytes
instance FromJSON RawHashes where
parseJSON
= withObject "RawHashes"
(\ o ->
RawHashes' <$>
(o .:? "prefixSize") <*> (o .:? "rawHashes"))
instance ToJSON RawHashes where
toJSON RawHashes'{..}
= object
(catMaybes
[("prefixSize" .=) <$> _rhPrefixSize,
("rawHashes" .=) <$> _rhRawHashes])
-- | An update to an individual list.
--
-- /See:/ 'listUpdateResponse' smart constructor.
data ListUpdateResponse = ListUpdateResponse'
{ _lAdditions :: !(Maybe [ThreatEntrySet])
, _lThreatEntryType :: !(Maybe Text)
, _lChecksum :: !(Maybe Checksum)
, _lThreatType :: !(Maybe Text)
, _lPlatformType :: !(Maybe Text)
, _lNewClientState :: !(Maybe Bytes)
, _lRemovals :: !(Maybe [ThreatEntrySet])
, _lResponseType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListUpdateResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lAdditions'
--
-- * 'lThreatEntryType'
--
-- * 'lChecksum'
--
-- * 'lThreatType'
--
-- * 'lPlatformType'
--
-- * 'lNewClientState'
--
-- * 'lRemovals'
--
-- * 'lResponseType'
listUpdateResponse
:: ListUpdateResponse
listUpdateResponse =
ListUpdateResponse'
{ _lAdditions = Nothing
, _lThreatEntryType = Nothing
, _lChecksum = Nothing
, _lThreatType = Nothing
, _lPlatformType = Nothing
, _lNewClientState = Nothing
, _lRemovals = Nothing
, _lResponseType = Nothing
}
-- | A set of entries to add to a local threat type\'s list. Repeated to
-- allow for a combination of compressed and raw data to be sent in a
-- single response.
lAdditions :: Lens' ListUpdateResponse [ThreatEntrySet]
lAdditions
= lens _lAdditions (\ s a -> s{_lAdditions = a}) .
_Default
. _Coerce
-- | The format of the threats.
lThreatEntryType :: Lens' ListUpdateResponse (Maybe Text)
lThreatEntryType
= lens _lThreatEntryType
(\ s a -> s{_lThreatEntryType = a})
-- | The expected SHA256 hash of the client state; that is, of the sorted
-- list of all hashes present in the database after applying the provided
-- update. If the client state doesn\'t match the expected state, the
-- client must disregard this update and retry later.
lChecksum :: Lens' ListUpdateResponse (Maybe Checksum)
lChecksum
= lens _lChecksum (\ s a -> s{_lChecksum = a})
-- | The threat type for which data is returned.
lThreatType :: Lens' ListUpdateResponse (Maybe Text)
lThreatType
= lens _lThreatType (\ s a -> s{_lThreatType = a})
-- | The platform type for which data is returned.
lPlatformType :: Lens' ListUpdateResponse (Maybe Text)
lPlatformType
= lens _lPlatformType
(\ s a -> s{_lPlatformType = a})
-- | The new client state, in encrypted format. Opaque to clients.
lNewClientState :: Lens' ListUpdateResponse (Maybe ByteString)
lNewClientState
= lens _lNewClientState
(\ s a -> s{_lNewClientState = a})
. mapping _Bytes
-- | A set of entries to remove from a local threat type\'s list. Repeated
-- for the same reason as above.
lRemovals :: Lens' ListUpdateResponse [ThreatEntrySet]
lRemovals
= lens _lRemovals (\ s a -> s{_lRemovals = a}) .
_Default
. _Coerce
-- | The type of response. This may indicate that an action is required by
-- the client when the response is received.
lResponseType :: Lens' ListUpdateResponse (Maybe Text)
lResponseType
= lens _lResponseType
(\ s a -> s{_lResponseType = a})
instance FromJSON ListUpdateResponse where
parseJSON
= withObject "ListUpdateResponse"
(\ o ->
ListUpdateResponse' <$>
(o .:? "additions" .!= mempty) <*>
(o .:? "threatEntryType")
<*> (o .:? "checksum")
<*> (o .:? "threatType")
<*> (o .:? "platformType")
<*> (o .:? "newClientState")
<*> (o .:? "removals" .!= mempty)
<*> (o .:? "responseType"))
instance ToJSON ListUpdateResponse where
toJSON ListUpdateResponse'{..}
= object
(catMaybes
[("additions" .=) <$> _lAdditions,
("threatEntryType" .=) <$> _lThreatEntryType,
("checksum" .=) <$> _lChecksum,
("threatType" .=) <$> _lThreatType,
("platformType" .=) <$> _lPlatformType,
("newClientState" .=) <$> _lNewClientState,
("removals" .=) <$> _lRemovals,
("responseType" .=) <$> _lResponseType])
-- | A set of threats that should be added or removed from a client\'s local
-- database.
--
-- /See:/ 'threatEntrySet' smart constructor.
data ThreatEntrySet = ThreatEntrySet'
{ _tesRiceHashes :: !(Maybe RiceDeltaEncoding)
, _tesRiceIndices :: !(Maybe RiceDeltaEncoding)
, _tesRawHashes :: !(Maybe RawHashes)
, _tesRawIndices :: !(Maybe RawIndices)
, _tesCompressionType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ThreatEntrySet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tesRiceHashes'
--
-- * 'tesRiceIndices'
--
-- * 'tesRawHashes'
--
-- * 'tesRawIndices'
--
-- * 'tesCompressionType'
threatEntrySet
:: ThreatEntrySet
threatEntrySet =
ThreatEntrySet'
{ _tesRiceHashes = Nothing
, _tesRiceIndices = Nothing
, _tesRawHashes = Nothing
, _tesRawIndices = Nothing
, _tesCompressionType = Nothing
}
-- | The encoded 4-byte prefixes of SHA256-formatted entries, using a
-- Golomb-Rice encoding.
tesRiceHashes :: Lens' ThreatEntrySet (Maybe RiceDeltaEncoding)
tesRiceHashes
= lens _tesRiceHashes
(\ s a -> s{_tesRiceHashes = a})
-- | The encoded local, lexicographically-sorted list indices, using a
-- Golomb-Rice encoding. Used for sending compressed removal indices.
tesRiceIndices :: Lens' ThreatEntrySet (Maybe RiceDeltaEncoding)
tesRiceIndices
= lens _tesRiceIndices
(\ s a -> s{_tesRiceIndices = a})
-- | The raw SHA256-formatted entries.
tesRawHashes :: Lens' ThreatEntrySet (Maybe RawHashes)
tesRawHashes
= lens _tesRawHashes (\ s a -> s{_tesRawHashes = a})
-- | The raw removal indices for a local list.
tesRawIndices :: Lens' ThreatEntrySet (Maybe RawIndices)
tesRawIndices
= lens _tesRawIndices
(\ s a -> s{_tesRawIndices = a})
-- | The compression type for the entries in this set.
tesCompressionType :: Lens' ThreatEntrySet (Maybe Text)
tesCompressionType
= lens _tesCompressionType
(\ s a -> s{_tesCompressionType = a})
instance FromJSON ThreatEntrySet where
parseJSON
= withObject "ThreatEntrySet"
(\ o ->
ThreatEntrySet' <$>
(o .:? "riceHashes") <*> (o .:? "riceIndices") <*>
(o .:? "rawHashes")
<*> (o .:? "rawIndices")
<*> (o .:? "compressionType"))
instance ToJSON ThreatEntrySet where
toJSON ThreatEntrySet'{..}
= object
(catMaybes
[("riceHashes" .=) <$> _tesRiceHashes,
("riceIndices" .=) <$> _tesRiceIndices,
("rawHashes" .=) <$> _tesRawHashes,
("rawIndices" .=) <$> _tesRawIndices,
("compressionType" .=) <$> _tesCompressionType])
-- | A set of raw indices to remove from a local list.
--
-- /See:/ 'rawIndices' smart constructor.
newtype RawIndices = RawIndices'
{ _riIndices :: Maybe [Textual Int32]
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RawIndices' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'riIndices'
rawIndices
:: RawIndices
rawIndices =
RawIndices'
{ _riIndices = Nothing
}
-- | The indices to remove from a lexicographically-sorted local list.
riIndices :: Lens' RawIndices [Int32]
riIndices
= lens _riIndices (\ s a -> s{_riIndices = a}) .
_Default
. _Coerce
instance FromJSON RawIndices where
parseJSON
= withObject "RawIndices"
(\ o -> RawIndices' <$> (o .:? "indices" .!= mempty))
instance ToJSON RawIndices where
toJSON RawIndices'{..}
= object (catMaybes [("indices" .=) <$> _riIndices])
--
-- /See:/ 'findFullHashesResponse' smart constructor.
data FindFullHashesResponse = FindFullHashesResponse'
{ _ffhrMatches :: !(Maybe [ThreatMatch])
, _ffhrNegativeCacheDuration :: !(Maybe Text)
, _ffhrMinimumWaitDuration :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FindFullHashesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ffhrMatches'
--
-- * 'ffhrNegativeCacheDuration'
--
-- * 'ffhrMinimumWaitDuration'
findFullHashesResponse
:: FindFullHashesResponse
findFullHashesResponse =
FindFullHashesResponse'
{ _ffhrMatches = Nothing
, _ffhrNegativeCacheDuration = Nothing
, _ffhrMinimumWaitDuration = Nothing
}
-- | The full hashes that matched the requested prefixes.
ffhrMatches :: Lens' FindFullHashesResponse [ThreatMatch]
ffhrMatches
= lens _ffhrMatches (\ s a -> s{_ffhrMatches = a}) .
_Default
. _Coerce
-- | For requested entities that did not match the threat list, how long to
-- cache the response.
ffhrNegativeCacheDuration :: Lens' FindFullHashesResponse (Maybe Text)
ffhrNegativeCacheDuration
= lens _ffhrNegativeCacheDuration
(\ s a -> s{_ffhrNegativeCacheDuration = a})
-- | The minimum duration the client must wait before issuing any find hashes
-- request. If this field is not set, clients can issue a request as soon
-- as they want.
ffhrMinimumWaitDuration :: Lens' FindFullHashesResponse (Maybe Text)
ffhrMinimumWaitDuration
= lens _ffhrMinimumWaitDuration
(\ s a -> s{_ffhrMinimumWaitDuration = a})
instance FromJSON FindFullHashesResponse where
parseJSON
= withObject "FindFullHashesResponse"
(\ o ->
FindFullHashesResponse' <$>
(o .:? "matches" .!= mempty) <*>
(o .:? "negativeCacheDuration")
<*> (o .:? "minimumWaitDuration"))
instance ToJSON FindFullHashesResponse where
toJSON FindFullHashesResponse'{..}
= object
(catMaybes
[("matches" .=) <$> _ffhrMatches,
("negativeCacheDuration" .=) <$>
_ffhrNegativeCacheDuration,
("minimumWaitDuration" .=) <$>
_ffhrMinimumWaitDuration])
-- | A single metadata entry.
--
-- /See:/ 'metadataEntry' smart constructor.
data MetadataEntry = MetadataEntry'
{ _meValue :: !(Maybe Bytes)
, _meKey :: !(Maybe Bytes)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'MetadataEntry' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'meValue'
--
-- * 'meKey'
metadataEntry
:: MetadataEntry
metadataEntry =
MetadataEntry'
{ _meValue = Nothing
, _meKey = Nothing
}
-- | The metadata entry value.
meValue :: Lens' MetadataEntry (Maybe ByteString)
meValue
= lens _meValue (\ s a -> s{_meValue = a}) .
mapping _Bytes
-- | The metadata entry key.
meKey :: Lens' MetadataEntry (Maybe ByteString)
meKey
= lens _meKey (\ s a -> s{_meKey = a}) .
mapping _Bytes
instance FromJSON MetadataEntry where
parseJSON
= withObject "MetadataEntry"
(\ o ->
MetadataEntry' <$> (o .:? "value") <*> (o .:? "key"))
instance ToJSON MetadataEntry where
toJSON MetadataEntry'{..}
= object
(catMaybes
[("value" .=) <$> _meValue, ("key" .=) <$> _meKey])
--
-- /See:/ 'fetchThreatListUpdatesResponse' smart constructor.
data FetchThreatListUpdatesResponse = FetchThreatListUpdatesResponse'
{ _ftlurListUpdateResponses :: !(Maybe [ListUpdateResponse])
, _ftlurMinimumWaitDuration :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'FetchThreatListUpdatesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ftlurListUpdateResponses'
--
-- * 'ftlurMinimumWaitDuration'
fetchThreatListUpdatesResponse
:: FetchThreatListUpdatesResponse
fetchThreatListUpdatesResponse =
FetchThreatListUpdatesResponse'
{ _ftlurListUpdateResponses = Nothing
, _ftlurMinimumWaitDuration = Nothing
}
-- | The list updates requested by the clients.
ftlurListUpdateResponses :: Lens' FetchThreatListUpdatesResponse [ListUpdateResponse]
ftlurListUpdateResponses
= lens _ftlurListUpdateResponses
(\ s a -> s{_ftlurListUpdateResponses = a})
. _Default
. _Coerce
-- | The minimum duration the client must wait before issuing any update
-- request. If this field is not set clients may update as soon as they
-- want.
ftlurMinimumWaitDuration :: Lens' FetchThreatListUpdatesResponse (Maybe Text)
ftlurMinimumWaitDuration
= lens _ftlurMinimumWaitDuration
(\ s a -> s{_ftlurMinimumWaitDuration = a})
instance FromJSON FetchThreatListUpdatesResponse
where
parseJSON
= withObject "FetchThreatListUpdatesResponse"
(\ o ->
FetchThreatListUpdatesResponse' <$>
(o .:? "listUpdateResponses" .!= mempty) <*>
(o .:? "minimumWaitDuration"))
instance ToJSON FetchThreatListUpdatesResponse where
toJSON FetchThreatListUpdatesResponse'{..}
= object
(catMaybes
[("listUpdateResponses" .=) <$>
_ftlurListUpdateResponses,
("minimumWaitDuration" .=) <$>
_ftlurMinimumWaitDuration])
| rueshyna/gogol | gogol-safebrowsing/gen/Network/Google/SafeBrowsing/Types/Product.hs | mpl-2.0 | 45,961 | 0 | 19 | 11,428 | 8,450 | 4,848 | 3,602 | 979 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Storage.DefaultObjectAccessControls.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the default object ACL entry for the specified entity on the
-- specified bucket.
--
-- /See:/ <https://developers.google.com/storage/docs/json_api/ Cloud Storage JSON API Reference> for @storage.defaultObjectAccessControls.get@.
module Network.Google.Resource.Storage.DefaultObjectAccessControls.Get
(
-- * REST Resource
DefaultObjectAccessControlsGetResource
-- * Creating a Request
, defaultObjectAccessControlsGet
, DefaultObjectAccessControlsGet
-- * Request Lenses
, doacgBucket
, doacgUserProject
, doacgProvisionalUserProject
, doacgEntity
) where
import Network.Google.Prelude
import Network.Google.Storage.Types
-- | A resource alias for @storage.defaultObjectAccessControls.get@ method which the
-- 'DefaultObjectAccessControlsGet' request conforms to.
type DefaultObjectAccessControlsGetResource =
"storage" :>
"v1" :>
"b" :>
Capture "bucket" Text :>
"defaultObjectAcl" :>
Capture "entity" Text :>
QueryParam "userProject" Text :>
QueryParam "provisionalUserProject" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ObjectAccessControl
-- | Returns the default object ACL entry for the specified entity on the
-- specified bucket.
--
-- /See:/ 'defaultObjectAccessControlsGet' smart constructor.
data DefaultObjectAccessControlsGet =
DefaultObjectAccessControlsGet'
{ _doacgBucket :: !Text
, _doacgUserProject :: !(Maybe Text)
, _doacgProvisionalUserProject :: !(Maybe Text)
, _doacgEntity :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DefaultObjectAccessControlsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'doacgBucket'
--
-- * 'doacgUserProject'
--
-- * 'doacgProvisionalUserProject'
--
-- * 'doacgEntity'
defaultObjectAccessControlsGet
:: Text -- ^ 'doacgBucket'
-> Text -- ^ 'doacgEntity'
-> DefaultObjectAccessControlsGet
defaultObjectAccessControlsGet pDoacgBucket_ pDoacgEntity_ =
DefaultObjectAccessControlsGet'
{ _doacgBucket = pDoacgBucket_
, _doacgUserProject = Nothing
, _doacgProvisionalUserProject = Nothing
, _doacgEntity = pDoacgEntity_
}
-- | Name of a bucket.
doacgBucket :: Lens' DefaultObjectAccessControlsGet Text
doacgBucket
= lens _doacgBucket (\ s a -> s{_doacgBucket = a})
-- | The project to be billed for this request. Required for Requester Pays
-- buckets.
doacgUserProject :: Lens' DefaultObjectAccessControlsGet (Maybe Text)
doacgUserProject
= lens _doacgUserProject
(\ s a -> s{_doacgUserProject = a})
-- | The project to be billed for this request if the target bucket is
-- requester-pays bucket.
doacgProvisionalUserProject :: Lens' DefaultObjectAccessControlsGet (Maybe Text)
doacgProvisionalUserProject
= lens _doacgProvisionalUserProject
(\ s a -> s{_doacgProvisionalUserProject = a})
-- | The entity holding the permission. Can be user-userId,
-- user-emailAddress, group-groupId, group-emailAddress, allUsers, or
-- allAuthenticatedUsers.
doacgEntity :: Lens' DefaultObjectAccessControlsGet Text
doacgEntity
= lens _doacgEntity (\ s a -> s{_doacgEntity = a})
instance GoogleRequest DefaultObjectAccessControlsGet
where
type Rs DefaultObjectAccessControlsGet =
ObjectAccessControl
type Scopes DefaultObjectAccessControlsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/devstorage.full_control"]
requestClient DefaultObjectAccessControlsGet'{..}
= go _doacgBucket _doacgEntity _doacgUserProject
_doacgProvisionalUserProject
(Just AltJSON)
storageService
where go
= buildClient
(Proxy ::
Proxy DefaultObjectAccessControlsGetResource)
mempty
| brendanhay/gogol | gogol-storage/gen/Network/Google/Resource/Storage/DefaultObjectAccessControls/Get.hs | mpl-2.0 | 4,860 | 0 | 16 | 1,048 | 550 | 327 | 223 | 89 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudIOT.Projects.Locations.Registries.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates a device registry configuration.
--
-- /See:/ <https://cloud.google.com/iot Cloud IoT API Reference> for @cloudiot.projects.locations.registries.patch@.
module Network.Google.Resource.CloudIOT.Projects.Locations.Registries.Patch
(
-- * REST Resource
ProjectsLocationsRegistriesPatchResource
-- * Creating a Request
, projectsLocationsRegistriesPatch
, ProjectsLocationsRegistriesPatch
-- * Request Lenses
, plrpXgafv
, plrpUploadProtocol
, plrpUpdateMask
, plrpAccessToken
, plrpUploadType
, plrpPayload
, plrpName
, plrpCallback
) where
import Network.Google.CloudIOT.Types
import Network.Google.Prelude
-- | A resource alias for @cloudiot.projects.locations.registries.patch@ method which the
-- 'ProjectsLocationsRegistriesPatch' request conforms to.
type ProjectsLocationsRegistriesPatchResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "updateMask" GFieldMask :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] DeviceRegistry :>
Patch '[JSON] DeviceRegistry
-- | Updates a device registry configuration.
--
-- /See:/ 'projectsLocationsRegistriesPatch' smart constructor.
data ProjectsLocationsRegistriesPatch =
ProjectsLocationsRegistriesPatch'
{ _plrpXgafv :: !(Maybe Xgafv)
, _plrpUploadProtocol :: !(Maybe Text)
, _plrpUpdateMask :: !(Maybe GFieldMask)
, _plrpAccessToken :: !(Maybe Text)
, _plrpUploadType :: !(Maybe Text)
, _plrpPayload :: !DeviceRegistry
, _plrpName :: !Text
, _plrpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsRegistriesPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plrpXgafv'
--
-- * 'plrpUploadProtocol'
--
-- * 'plrpUpdateMask'
--
-- * 'plrpAccessToken'
--
-- * 'plrpUploadType'
--
-- * 'plrpPayload'
--
-- * 'plrpName'
--
-- * 'plrpCallback'
projectsLocationsRegistriesPatch
:: DeviceRegistry -- ^ 'plrpPayload'
-> Text -- ^ 'plrpName'
-> ProjectsLocationsRegistriesPatch
projectsLocationsRegistriesPatch pPlrpPayload_ pPlrpName_ =
ProjectsLocationsRegistriesPatch'
{ _plrpXgafv = Nothing
, _plrpUploadProtocol = Nothing
, _plrpUpdateMask = Nothing
, _plrpAccessToken = Nothing
, _plrpUploadType = Nothing
, _plrpPayload = pPlrpPayload_
, _plrpName = pPlrpName_
, _plrpCallback = Nothing
}
-- | V1 error format.
plrpXgafv :: Lens' ProjectsLocationsRegistriesPatch (Maybe Xgafv)
plrpXgafv
= lens _plrpXgafv (\ s a -> s{_plrpXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plrpUploadProtocol :: Lens' ProjectsLocationsRegistriesPatch (Maybe Text)
plrpUploadProtocol
= lens _plrpUploadProtocol
(\ s a -> s{_plrpUploadProtocol = a})
-- | Required. Only updates the \`device_registry\` fields indicated by this
-- mask. The field mask must not be empty, and it must not contain fields
-- that are immutable or only set by the server. Mutable top-level fields:
-- \`event_notification_config\`, \`http_config\`, \`mqtt_config\`, and
-- \`state_notification_config\`.
plrpUpdateMask :: Lens' ProjectsLocationsRegistriesPatch (Maybe GFieldMask)
plrpUpdateMask
= lens _plrpUpdateMask
(\ s a -> s{_plrpUpdateMask = a})
-- | OAuth access token.
plrpAccessToken :: Lens' ProjectsLocationsRegistriesPatch (Maybe Text)
plrpAccessToken
= lens _plrpAccessToken
(\ s a -> s{_plrpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plrpUploadType :: Lens' ProjectsLocationsRegistriesPatch (Maybe Text)
plrpUploadType
= lens _plrpUploadType
(\ s a -> s{_plrpUploadType = a})
-- | Multipart request metadata.
plrpPayload :: Lens' ProjectsLocationsRegistriesPatch DeviceRegistry
plrpPayload
= lens _plrpPayload (\ s a -> s{_plrpPayload = a})
-- | The resource path name. For example,
-- \`projects\/example-project\/locations\/us-central1\/registries\/my-registry\`.
plrpName :: Lens' ProjectsLocationsRegistriesPatch Text
plrpName = lens _plrpName (\ s a -> s{_plrpName = a})
-- | JSONP
plrpCallback :: Lens' ProjectsLocationsRegistriesPatch (Maybe Text)
plrpCallback
= lens _plrpCallback (\ s a -> s{_plrpCallback = a})
instance GoogleRequest
ProjectsLocationsRegistriesPatch
where
type Rs ProjectsLocationsRegistriesPatch =
DeviceRegistry
type Scopes ProjectsLocationsRegistriesPatch =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloudiot"]
requestClient ProjectsLocationsRegistriesPatch'{..}
= go _plrpName _plrpXgafv _plrpUploadProtocol
_plrpUpdateMask
_plrpAccessToken
_plrpUploadType
_plrpCallback
(Just AltJSON)
_plrpPayload
cloudIOTService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsRegistriesPatchResource)
mempty
| brendanhay/gogol | gogol-cloudiot/gen/Network/Google/Resource/CloudIOT/Projects/Locations/Registries/Patch.hs | mpl-2.0 | 6,247 | 0 | 17 | 1,366 | 864 | 505 | 359 | 128 | 1 |
module OSRIC.Main (main) where
import Safe
import System.Console.CmdArgs.Implicit hiding (args)
import EasyIRCBot hiding (logfile, channel, server, nick)
import OSRIC.Dice (rollDice)
import OSRIC.Voting
import OSRIC.DM
import OSRIC.State
data BotArgs = BotArgs { server :: String
, channel :: String
, admins :: String
, logfile :: String
, errorlogfile :: String
, nick :: String
} deriving (Show, Data, Typeable)
botArgs = BotArgs { server = "irc.freenode.net" &= help "Server to connect to" &= typ "SERVER"
, channel = "#hs-irc-osric-test" &= help "Channel to join on connecting to server" &= typ "CHANNEL"
, admins = "kaashif pizza" &= help "List of users to be considered admins" &= typ "NICKS"
, logfile = "osric-irc.log" &= help "File to log INFO messages to" &= typ "FILE"
, errorlogfile = "osric-irc-errors.log" &= help "File to log ERROR messages to" &= typ "FILE"
, nick = "osric-bot" &= help "The bot's name" &= typ "NICK"
} &= summary "irc-osric v0.0.1" &= program "irc-osric"
helptext = "Valid commands: quit, id, roll, vote, results, dm, help."
bot :: BotArgs -> BotBuilder OSRICState ()
bot a = do
connectTo $ server a
join $ channel a
logInfoTo $ logfile a
logErrorsTo $ errorlogfile a
myNameIs $ nick a
setInitialState initialOSRIC
on PRIVMSG $ \msg -> do
if "!" `isPrefixOf` (msg^.text)
then if (msg^.address) == "osric-bot"
then replyTo (msg^.sender) =<< (makeResponse msg)
else replyTo (msg^.address) =<< (makeResponse msg)
else return ()
where makeResponse msg = if (msg^.sender) `elem` adm
then makeAdminResponse msg
else makeNormalResponse msg
makeNormalResponse msg = case v msg of
"quit" -> return $ quit NoReason
"id" -> return $ privmsg $ intercalate " " $ ar msg
"roll" -> lift $ fmap (privmsg.show) $
rollDice (readDef 1 $ atDef "1" (ar msg) 0)
(readDef 6 $ atDef "6" (ar msg) 1)
"vote" -> (msg^.sender) `voteFor` (atDef "no-one" (ar msg) 0)
"results" -> tellResults
"dm" -> tellDM
"help" -> return $ privmsg helptext
_ -> return $ privmsg "Invalid command"
makeAdminResponse msg = case v msg of
"dm" -> case length $ ar msg of
0 -> tellDM
_ -> setDM $ (ar msg) !! 0
_ -> makeNormalResponse msg
adm = words $ admins a
v m = verb $ p m
ar m = args $ p m
p m = parse (m^.text)
main :: IO ()
main = cmdArgs botArgs >>= (run.bot)
| kaashif/hs-irc-osric | lib/OSRIC/Main.hs | agpl-3.0 | 3,298 | 0 | 16 | 1,430 | 848 | 434 | 414 | 62 | 13 |
{-# LANGUAGE OverloadedStrings #-}
--------------------------------------------------------------------------------
-- See end of this file for licence information.
--------------------------------------------------------------------------------
-- |
-- Module : Swish.RDF.Vocabulary.Provenance
-- Copyright : (c) 2012 Douglas Burke
-- License : GPL V2
--
-- Maintainer : Douglas Burke
-- Stability : experimental
-- Portability : OverloadedStrings
--
-- This module defines some vocabulary terms from the Provenance Ontology
-- <http://www.w3.org/TR/prov-o/> by the W3C Provenance Working Group
-- (<http://www.w3.org/2011/prov/wiki/Main_Page/>).
-- This is /experimental/ since the Ontology is still a Working Draft.
--
--------------------------------------------------------------------------------
module Swish.RDF.Vocabulary.Provenance
(
-- | The version used for this module is
-- \"W3C Working Draft 13 December 2011\",
-- <http://www.w3.org/TR/2011/WD-prov-o-20111213/>.
namespacePROV
-- * Classes
, provActivity
, provAgent
, provControl
, provEntity
, provGeneration
, provLocation
, provParticipation
, provProvenanceContainer
, provQualifiedInvolvement
, provRecipe
, provRole
, provUsage
-- * Properties
, provdependedOn
, provendedAt
, provhadLocation
, provhadOriginalSource
, provhadParticipant
, provhadQualifiedControl
, provhadQualifiedEntity
, provhadQualifiedGeneration
, provhadQualifiedParticipation
, provhadQualifiedUsage
, provhadRecipe
, provhadRole
, provhadTemporalValue
, provstartedAt
, provused
, provwasAttributedTo
, provwasComplementOf
, provwasControlledBy
, provwasDerivedFrom
, provwasEventuallyDerivedFrom
, provwasGeneratedAt
, provwasGeneratedBy
, provwasInformedBy
, provwasQuoteOf
, provwasRevisionOf
, provwasScheduledAfter
, provwasSummaryOf
)
where
import Swish.Namespace (Namespace, makeNamespace, ScopedName, makeNSScopedName)
import Swish.QName (LName)
import Data.Maybe (fromMaybe)
import Network.URI (URI, parseURI)
------------------------------------------------------------
-- Namespace
------------------------------------------------------------
provURI :: URI
provURI = fromMaybe (error "Internal error processing PROV URI") $ parseURI "http://www.w3.org/ns/prov-o/"
-- | Maps @prov@ to <http://www.w3.org/ns/prov-o/>.
namespacePROV :: Namespace
namespacePROV = makeNamespace (Just "prov") provURI
------------------------------------------------------------
-- Terms
------------------------------------------------------------
toS :: LName -> ScopedName
toS = makeNSScopedName namespacePROV
-- Classes
-- | @prov:Activity@ from <http://www.w3.org/TR/prov-o/#activity>.
provActivity :: ScopedName
provActivity = toS "Activity"
-- | @prov:Agent@ from <http://www.w3.org/TR/prov-o/#agent>.
provAgent :: ScopedName
provAgent = toS "Agent"
-- | @prov:Control@ from <http://www.w3.org/TR/prov-o/#control>.
provControl :: ScopedName
provControl = toS "Control"
-- | @prov:Entity@ from <http://www.w3.org/TR/prov-o/#entity>.
provEntity :: ScopedName
provEntity = toS "Entity"
-- | @prov:Generation@ from <http://www.w3.org/TR/prov-o/#generation>.
provGeneration :: ScopedName
provGeneration = toS "Generation"
-- | @prov:Location@ from <http://www.w3.org/TR/prov-o/#location>.
provLocation :: ScopedName
provLocation = toS "Location"
-- | @prov:Participation@ from <http://www.w3.org/TR/prov-o/#participation>.
provParticipation :: ScopedName
provParticipation = toS "Participation"
-- | @prov:ProvenanceContainer@ from <http://www.w3.org/TR/prov-o/#provenancecontainer>.
provProvenanceContainer :: ScopedName
provProvenanceContainer = toS "ProvenanceContainer"
-- | @prov:QualifiedInvolvement@ from <http://www.w3.org/TR/prov-o/#qualifiedinvolvement>.
provQualifiedInvolvement :: ScopedName
provQualifiedInvolvement = toS "QualifiedInvolvement"
-- | @prov:Recipe@ from <http://www.w3.org/TR/prov-o/#recipe>.
provRecipe :: ScopedName
provRecipe = toS "Recipe"
-- | @prov:Role@ from <http://www.w3.org/TR/prov-o/#role>.
provRole :: ScopedName
provRole = toS "Role"
-- | @prov:Usage@ from <http://www.w3.org/TR/prov-o/#usage>.
provUsage :: ScopedName
provUsage = toS "Usage"
-- Properties
-- | @prov:dependedOn@ from <http://www.w3.org/TR/prov-o/#dependedon>.
provdependedOn :: ScopedName
provdependedOn = toS "dependedOn"
-- | @prov:endedAt@ from <http://www.w3.org/TR/prov-o/#endedat>.
provendedAt :: ScopedName
provendedAt = toS "endedAt"
-- | @prov:hadLocation@ from <http://www.w3.org/TR/prov-o/#hadlocation>.
provhadLocation :: ScopedName
provhadLocation = toS "hadLocation"
-- | @prov:hadOriginalSource@ from <http://www.w3.org/TR/prov-o/#hadoriginalsource>.
provhadOriginalSource :: ScopedName
provhadOriginalSource = toS "hadOriginalSource"
-- | @prov:hadParticipant@ from <http://www.w3.org/TR/prov-o/#hadparticipant>.
provhadParticipant :: ScopedName
provhadParticipant = toS "hadParticipant"
-- | @prov:hadQualifiedControl@ from <http://www.w3.org/TR/prov-o/#hadqualifiedcontrol>.
provhadQualifiedControl :: ScopedName
provhadQualifiedControl = toS "hadQualifiedControl"
-- | @prov:hadQualifiedEntity@ from <http://www.w3.org/TR/prov-o/#hadqualifiedentity>.
provhadQualifiedEntity :: ScopedName
provhadQualifiedEntity = toS "hadQualifiedEntity"
-- | @prov:hadQualifiedGeneration@ from <http://www.w3.org/TR/prov-o/#hadqualifiedgeneration>.
provhadQualifiedGeneration :: ScopedName
provhadQualifiedGeneration = toS "hadQualifiedGeneration"
-- | @prov:hadQualifiedParticipation@ from <http://www.w3.org/TR/prov-o/#hadqualifiedparticipation>.
provhadQualifiedParticipation :: ScopedName
provhadQualifiedParticipation = toS "hadQualifiedParticipation"
-- | @prov:hadQualifiedUsage@ from <http://www.w3.org/TR/prov-o/#hadqualifiedusage>.
provhadQualifiedUsage :: ScopedName
provhadQualifiedUsage = toS "hadQualifiedUsage"
-- | @prov:hadRecipe@ from <http://www.w3.org/TR/prov-o/#hadrecipe>.
provhadRecipe :: ScopedName
provhadRecipe = toS "hadRecipe"
-- | @prov:hadRole@ from <http://www.w3.org/TR/prov-o/#hadrole>.
provhadRole :: ScopedName
provhadRole = toS "hadRole"
-- | @prov:hadTemporalValue@ from <http://www.w3.org/TR/prov-o/#hadtemporalvalue>.
provhadTemporalValue :: ScopedName
provhadTemporalValue = toS "hadTemporalValue"
-- | @prov:startedAt@ from <http://www.w3.org/TR/prov-o/#startedat>.
provstartedAt :: ScopedName
provstartedAt = toS "startedAt"
-- | @prov:used@ from <http://www.w3.org/TR/prov-o/#used>.
provused :: ScopedName
provused = toS "used"
-- | @prov:wasAttributedTo@ from <http://www.w3.org/TR/prov-o/#wasattributedto>.
provwasAttributedTo :: ScopedName
provwasAttributedTo = toS "wasAttributedTo"
-- | @prov:wasComplementOf@ from <http://www.w3.org/TR/prov-o/#wascomplementof>.
provwasComplementOf :: ScopedName
provwasComplementOf = toS "wasComplementOf"
-- | @prov:wasControlledBy@ from <http://www.w3.org/TR/prov-o/#wascontrolledby>.
provwasControlledBy :: ScopedName
provwasControlledBy = toS "wasControlledBy"
-- | @prov:wasDerivedFrom@ from <http://www.w3.org/TR/prov-o/#wasderivedfrom>.
provwasDerivedFrom :: ScopedName
provwasDerivedFrom = toS "wasDerivedFrom"
-- | @prov:wasEventuallyDerivedFrom@ from <http://www.w3.org/TR/prov-o/#waseventuallyderivedfrom>.
provwasEventuallyDerivedFrom :: ScopedName
provwasEventuallyDerivedFrom = toS "wasEventuallyDerivedFrom"
-- | @prov:wasGeneratedAt@ from <http://www.w3.org/TR/prov-o/#wasgeneratedat>.
provwasGeneratedAt :: ScopedName
provwasGeneratedAt = toS "wasGeneratedAt"
-- | @prov:wasGeneratedBy@ from <http://www.w3.org/TR/prov-o/#wasgeneratedby>.
provwasGeneratedBy :: ScopedName
provwasGeneratedBy = toS "wasGeneratedBy"
-- | @prov:wasInformedBy@ from <http://www.w3.org/TR/prov-o/#wasinformedby>.
provwasInformedBy :: ScopedName
provwasInformedBy = toS "wasInformedBy"
-- | @prov:wasQuoteOf@ from <http://www.w3.org/TR/prov-o/#wasquoteof>.
provwasQuoteOf :: ScopedName
provwasQuoteOf = toS "wasQuoteOf"
-- | @prov:wasRevisionOf@ from <http://www.w3.org/TR/prov-o/#wasrevisionof>.
provwasRevisionOf :: ScopedName
provwasRevisionOf = toS "wasRevisionOf"
-- | @prov:wasScheduledAfter@ from <http://www.w3.org/TR/prov-o/#wasscheduledafter>.
provwasScheduledAfter :: ScopedName
provwasScheduledAfter = toS "wasScheduledAfter"
-- | @prov:wasSummaryOf@ from <http://www.w3.org/TR/prov-o/#wassummaryof>.
provwasSummaryOf :: ScopedName
provwasSummaryOf = toS "wasSummaryOf"
--------------------------------------------------------------------------------
--
-- Copyright (c) 2012 Douglas Burke
-- All rights reserved.
--
-- This file is part of Swish.
--
-- Swish is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- Swish is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Swish; if not, write to:
-- The Free Software Foundation, Inc.,
-- 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
--------------------------------------------------------------------------------
| DougBurke/swish | src/Swish/RDF/Vocabulary/Provenance.hs | lgpl-2.1 | 9,686 | 0 | 8 | 1,289 | 848 | 520 | 328 | 131 | 1 |
module Zipping where
myZip :: [a] -> [b] -> [(a,b)]
myZip (x1:xs1) (x2:xs2) = (x1,x2) : myZip xs1 xs2
myZip _ [] = []
myZip [] _ = []
myZipWith :: (a -> b -> c) -> [a] -> [b] -> [c]
myZipWith f (x1:xs1) (x2:xs2) = f x1 x2 : myZipWith f xs1 xs2
myZipWith _ _ [] = []
myZipWith _ [] _ = []
| thewoolleyman/haskellbook | 09/11/maor/zipping.hs | unlicense | 290 | 0 | 8 | 69 | 210 | 113 | 97 | 9 | 1 |
module External.A274923 (a274923) where
import HelperSequences.A001057 (a001057)
import HelperSequences.A002061 (a002061)
import HelperSequences.A002522 (a002522)
import HelperSequences.A003059 (a003059)
a274923 :: Int -> Integer
a274923 1 = 0
a274923 n'
| a002061 k <= n && n <= a002522 k = a001057 (k - 1)
| even k = a001057 (k - 2) + j
| otherwise = a001057 (k - 2) - j where
k = a003059 (n' - 1)
j = n - a002522 (k - 1)
n = toInteger n'
| peterokagey/haskellOEIS | src/External/A274923.hs | apache-2.0 | 509 | 0 | 11 | 148 | 203 | 105 | 98 | 14 | 1 |
-- |
-- Module : Spark.Context
--
-- Context defines the stratum on which the distributed process will
-- be based.
module Spark.Context where
import Control.Distributed.Process
import Control.Distributed.Static
data Strategy = Distributed { masterNode :: NodeId
, slaveNodes :: [NodeId] }
-- | Context for creating spark workflow.
-- Defines the context for processing RDD tasks. The context stores
-- list of peers (slaves) where the tasks can be run. The peers, in
-- the context of Distributed.Process are nodes on which tasks can
-- run.
data Context = Context { _lookupTable :: RemoteTable
, _strategy :: Strategy
}
-- | Creates context from slave nodes
createContextFrom :: RemoteTable -> NodeId -> [NodeId] -> IO Context
createContextFrom rt master = return . Context rt . Distributed master
-- | Creates the context.
-- Note that there can only one context in the given cluster. This
-- is not enforced yet, and creationg more than one context is not
-- tested either.
| yogeshsajanikar/hspark | src/Spark/Context.hs | apache-2.0 | 1,081 | 0 | 9 | 264 | 125 | 78 | 47 | 9 | 1 |
{-# LANGUAGE TemplateHaskell, QuasiQuotes, TypeFamilies, GADTs #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Model where
import Prelude
import Yesod
import Data.Text (Text)
import Text.Blaze (ToHtml(toHtml))
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist sqlSettings, mkMigrate "migrateAll"] $(persistFile "config/models")
data Sex = None | Male | Female
deriving (Show, Read, Eq, Ord, Enum, Bounded)
derivePersistField "Sex"
instance ToHtml Sex where
toHtml = toHtml . show
| cutsea110/tut | Model.hs | bsd-2-clause | 691 | 0 | 8 | 107 | 131 | 74 | 57 | 14 | 0 |
module Data.Mathematica.Job where
startJob :: IO ()
startJob = do
putStrLn "job started"
| wavewave/mathematica-data | lib/Data/Mathematica/Job.hs | bsd-2-clause | 94 | 0 | 7 | 18 | 28 | 15 | 13 | 4 | 1 |
-- | Paragon Abstract Syntax Tree. Modifiers.
module Language.Java.Paragon.Syntax.Modifiers
(
module Language.Java.Paragon.Syntax.Modifiers
, module Language.Java.Paragon.Syntax.Expressions
) where
import Language.Java.Paragon.Syntax.Expressions
import Language.Java.Paragon.Annotation
import Language.Java.Paragon.Annotated
-- | Modifiers for declarations.
data Modifier
= Public Annotation -- ^ public
| Protected Annotation -- ^ protected
| Private Annotation -- ^ private
| Static Annotation -- ^ static
| Abstract Annotation -- ^ abstract
| Final Annotation -- ^ final
| Native Annotation -- ^ native
| Synchronized Annotation -- ^ synchronized
| Transient Annotation -- ^ transient
| Volatile Annotation -- ^ volatile
| StrictFP Annotation -- ^ strictfp
-- Paragon specific
| Typemethod Annotation -- ^ typemethod
| Reflexive Annotation -- ^ reflexive
| Transitive Annotation -- ^ transitive
| Symmetric Annotation -- ^ symmetric
| Readonly Annotation -- ^ readonly
| Notnull Annotation -- ^ notnull
| Reads Annotation Policy -- ^ ?
| Writes Annotation Policy -- ^ !
-- TODO: more Paragon modifiers
deriving (Show, Eq)
instance Annotated Modifier where
ann (Public x) = x
ann (Protected x) = x
ann (Private x) = x
ann (Static x) = x
ann (Abstract x) = x
ann (Final x) = x
ann (Native x) = x
ann (Synchronized x) = x
ann (Transient x) = x
ann (Volatile x) = x
ann (StrictFP x) = x
ann (Typemethod x) = x
ann (Reflexive x) = x
ann (Transitive x) = x
ann (Symmetric x) = x
ann (Readonly x) = x
ann (Notnull x) = x
ann (Reads x _) = x
ann (Writes x _) = x
| bvdelft/paragon | src/Language/Java/Paragon/Syntax/Modifiers.hs | bsd-3-clause | 1,816 | 0 | 8 | 516 | 466 | 267 | 199 | 48 | 0 |
-- | Re-export "Database.Persist.Sql" without any clashes with
-- @esqueleto@.
module Database.Esqueleto.Internal.PersistentImport
( module Database.Persist.Sql
) where
import Database.Persist.Sql hiding
( BackendSpecificFilter, Filter(..), PersistQuery(..), SelectOpt(..)
, Update(..), delete, deleteWhereCount, updateWhereCount, selectList
, update
, selectKeysList, deleteCascadeWhere, (=.), (+=.), (-=.), (*=.), (/=.)
, (==.), (!=.), (<.), (>.), (<=.), (>=.), (<-.), (/<-.), (||.)
, listToJSON, mapToJSON, getPersistMap, limitOffsetOrder, selectSource )
| fpco/esqueleto | src/Database/Esqueleto/Internal/PersistentImport.hs | bsd-3-clause | 575 | 0 | 6 | 75 | 165 | 122 | 43 | 9 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Ivory.Language (
-- * Kinds
Area(..)
, Proc(..)
-- * Types
, IvoryType(), IvoryArea()
, IvoryVar()
, IvoryExpr()
, OpaqueType()
-- ** Non-null References
, IvoryRef()
, ConstRef()
, IvoryStore()
, Ref(), refToPtr, constRef, deref, store, refCopy
-- ** Stack Allocation
, IvoryInit(..), Init()
, IvoryZeroVal(izeroval)
, IvoryZero(izero)
, iarray
, InitStruct(), (.=), istruct
, local
-- ** SizeOf
, IvorySizeOf, sizeOf
-- ** Nullable Pointers
, Ptr(), nullPtr
-- ** Booleans
, IBool(), true, false
-- ** Characters
, IChar(), char
-- ** Constant strings
, IString()
-- ** Dynamic bounded-length strings
, IvoryString(..)
-- ** Signed Integers
, Sint8()
, Sint16()
, Sint32()
, Sint64()
-- ** Unsigned Integers
, Uint8()
, Uint16()
, Uint32()
, Uint64()
-- ** Floating-point Numbers
, IFloat()
, IDouble()
, isnan, isinf, roundF, ceilF, floorF, atan2F
, ifloat, idouble
-- * Effects
, Effects(..)
, BreakEff(..), GetBreaks(), AllowBreak(), ClearBreak(), noBreak
, ReturnEff(..), GetReturn(), ClearReturn(), noReturn
, AllocEff(..), GetAlloc(), ClearAlloc(), noAlloc
, AllocEffects, ProcEffects, NoEffects
-- * Language
-- ** Monadic Interface
, Ivory()
, RefScope(..)
-- ** Subexpression naming
, assign
-- ** Constants
, extern, inclSym
-- ** Arithmetic (operators from the 'Num' class are also provided).
, IvoryIntegral((.%), iDiv), (./)
-- ** Comparisons
, IvoryEq((==?),(/=?))
, IvoryOrd((>?),(>=?),(<?),(<=?))
-- ** Boolean operators
, iNot, (.&&), (.||)
-- ** Bit operators
, IvoryBits((.&),(.|),(.^),iComplement,iShiftL,iShiftR, iBitSize), extractByte
, BitSplit(lbits, ubits), BitCast(bitCast)
, TwosComplementCast(twosComplementCast, twosComplementRep)
-- ** Bit data
-- * bit types
, Bits(), Bit, BitArray(), BitRep()
, repToBits, bitsToRep, zeroBits
, bitLength, bitIx
-- * bit data
, BitData(), BitDataField(), BitDataRep
-- * bit data conversions
, toBits, fromBits
, toRep, fromRep
-- * bit data field operations
, setBitDataBit, clearBitDataBit, getBitDataField, setBitDataField
-- * bit data operators
, (#!) -- access nth element of BitArray
, (#.) -- flip getBitDataField
, (#>) -- BitDataField composition (like Control.Category.>>>)
-- * bit actions
, BitDataM(), runBits, withBits, withBitsRef
, clear, setBit, clearBit, setField
, bitToBool, boolToBit
-- ** External memory areas
, MemArea(), area, importArea
, ConstMemArea(), constArea, importConstArea
, IvoryAddrOf(addrOf)
-- ** Procedures
, Def()
, ProcPtr(), procPtr
, proc, voidProc, importProc
, Body(), body, importFrom
-- *** Pre/Post-Conditions
, requires
, checkStored
, ensures
, ensures_
-- ** Assumption/Assertion statements
, assert
, assume
-- ** Structures
, IvoryStruct(..), StructDef(), (~>), Label()
, ASymbol
-- ** Arrays
, (!)
, fromIx, toIx, Ix(), ixSize
, arrayLen
, toCArray
, ANat
, fromTypeNat
-- ** Looping
, for, times
, breakOut
, arrayMap
, forever
-- ** Call
, call, indirect
, call_, indirect_
-- ** Conditional Branching
, ifte_, (?), withRef
-- ** Return
, ret, retVoid
-- ** Type-safe casting.
, SafeCast(), RuntimeCast(), Default()
, safeCast, castWith, castDefault
, SignCast(), signCast
-- ** Module Definitions
, AST.Module(), moduleName, package
, ModuleDef, incl, depend, defStruct
, defStringType
, defMemArea, defConstMemArea
, private, public
-- * Quasiquoters
, ivory
, ivoryFile
, ivoryBlk
-- * Utilities
, Proxy(..), comment
, module Ivory.Language.Coroutine
) where
import Ivory.Language.Area
import Ivory.Language.Array
import Ivory.Language.Assert
import Ivory.Language.Bits
import Ivory.Language.CArray
import Ivory.Language.Cast
import Ivory.Language.Comment
import Ivory.Language.Cond
import Ivory.Language.Const
import Ivory.Language.Coroutine
import Ivory.Language.Effects
import Ivory.Language.Float
import Ivory.Language.IBool
import Ivory.Language.IChar
import Ivory.Language.IIntegral
import Ivory.Language.IString
import Ivory.Language.Init
import Ivory.Language.Loop
import Ivory.Language.MemArea
import Ivory.Language.Module
import Ivory.Language.Monad
import Ivory.Language.Proc
import Ivory.Language.Proxy
import Ivory.Language.Ptr
import Ivory.Language.Ref
import Ivory.Language.Scope
import Ivory.Language.Sint
import Ivory.Language.SizeOf
import Ivory.Language.String
import Ivory.Language.Struct
import Ivory.Language.Type
import Ivory.Language.Uint
import Ivory.Language.Syntax.Concrete.QQ
import Ivory.Language.BitData.Array
import Ivory.Language.BitData.BitData
import Ivory.Language.BitData.Bits
import Ivory.Language.BitData.Monad
import qualified Ivory.Language.Syntax.AST as AST
| Hodapp87/ivory | ivory/src/Ivory/Language.hs | bsd-3-clause | 5,204 | 0 | 5 | 1,022 | 1,202 | 851 | 351 | 276 | 0 |
module Mock where
import Bluetooth (UUID)
import Bluetooth.Internal.Interfaces (bluezName, bluezPath)
import Data.IORef
import Paths_ble
import System.IO
import qualified System.Process as P
withAService :: IO a -> IO a
withAService action = do
writeIORef bluezName "org.bluez.Mock"
writeIORef bluezPath "/org/bluez/hci0"
hSetBuffering stdout LineBuffering
hSetBuffering stdin LineBuffering
file <- getDataFileName "test/Mock/start_mock.sh"
(_inHandle, Just outHandle, _errHandle, proc)
<- P.createProcess (P.shell $ "bash " ++ file) { P.std_out = P.CreatePipe }
_ <- hGetLine outHandle
val <- action
P.terminateProcess proc
return val
mockServiceUUID :: UUID
mockServiceUUID = "4ea7235c-8d49-4a6f-abe6-1883218a93a7"
mockCharUUID :: UUID
mockCharUUID = "6fe4afc7-ebf8-4369-90aa-0fe45064e3f9"
mockCharValue :: Int
mockCharValue = 1797
| plow-technologies/ble | test/Mock.hs | bsd-3-clause | 948 | 0 | 13 | 209 | 225 | 116 | 109 | 26 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
************************************************************************
* *
\section[OccurAnal]{Occurrence analysis pass}
* *
************************************************************************
The occurrence analyser re-typechecks a core expression, returning a new
core expression with (hopefully) improved usage information.
-}
{-# LANGUAGE CPP, BangPatterns, MultiWayIf, ViewPatterns #-}
module OccurAnal (
occurAnalysePgm, occurAnalyseExpr, occurAnalyseExpr_NoBinderSwap
) where
#include "HsVersions.h"
import GhcPrelude
import CoreSyn
import CoreFVs
import CoreUtils ( exprIsTrivial, isDefaultAlt, isExpandableApp,
stripTicksTopE, mkTicks )
import CoreArity ( joinRhsArity )
import Id
import IdInfo
import Name( localiseName )
import BasicTypes
import Module( Module )
import Coercion
import Type
import VarSet
import VarEnv
import Var
import Demand ( argOneShots, argsOneShots )
import Digraph ( SCC(..), Node(..)
, stronglyConnCompFromEdgedVerticesUniq
, stronglyConnCompFromEdgedVerticesUniqR )
import Unique
import UniqFM
import UniqSet
import Util
import Outputable
import Data.List
import Control.Arrow ( second )
{-
************************************************************************
* *
occurAnalysePgm, occurAnalyseExpr, occurAnalyseExpr_NoBinderSwap
* *
************************************************************************
Here's the externally-callable interface:
-}
occurAnalysePgm :: Module -- Used only in debug output
-> (Id -> Bool) -- Active unfoldings
-> (Activation -> Bool) -- Active rules
-> [CoreRule] -> [CoreVect] -> VarSet
-> CoreProgram -> CoreProgram
occurAnalysePgm this_mod active_unf active_rule imp_rules vects vectVars binds
| isEmptyDetails final_usage
= occ_anald_binds
| otherwise -- See Note [Glomming]
= WARN( True, hang (text "Glomming in" <+> ppr this_mod <> colon)
2 (ppr final_usage ) )
occ_anald_glommed_binds
where
init_env = initOccEnv { occ_rule_act = active_rule
, occ_unf_act = active_unf }
(final_usage, occ_anald_binds) = go init_env binds
(_, occ_anald_glommed_binds) = occAnalRecBind init_env TopLevel
imp_rule_edges
(flattenBinds occ_anald_binds)
initial_uds
-- It's crucial to re-analyse the glommed-together bindings
-- so that we establish the right loop breakers. Otherwise
-- we can easily create an infinite loop (Trac #9583 is an example)
initial_uds = addManyOccsSet emptyDetails
(rulesFreeVars imp_rules `unionVarSet`
vectsFreeVars vects `unionVarSet`
vectVars)
-- The RULES and VECTORISE declarations keep things alive! (For VECTORISE declarations,
-- we only get them *until* the vectoriser runs. Afterwards, these dependencies are
-- reflected in 'vectors' — see Note [Vectorisation declarations and occurrences].)
-- Note [Preventing loops due to imported functions rules]
imp_rule_edges = foldr (plusVarEnv_C unionVarSet) emptyVarEnv
[ mapVarEnv (const maps_to) $
getUniqSet (exprFreeIds arg `delVarSetList` ru_bndrs imp_rule)
| imp_rule <- imp_rules
, not (isBuiltinRule imp_rule) -- See Note [Plugin rules]
, let maps_to = exprFreeIds (ru_rhs imp_rule)
`delVarSetList` ru_bndrs imp_rule
, arg <- ru_args imp_rule ]
go :: OccEnv -> [CoreBind] -> (UsageDetails, [CoreBind])
go _ []
= (initial_uds, [])
go env (bind:binds)
= (final_usage, bind' ++ binds')
where
(bs_usage, binds') = go env binds
(final_usage, bind') = occAnalBind env TopLevel imp_rule_edges bind
bs_usage
occurAnalyseExpr :: CoreExpr -> CoreExpr
-- Do occurrence analysis, and discard occurrence info returned
occurAnalyseExpr = occurAnalyseExpr' True -- do binder swap
occurAnalyseExpr_NoBinderSwap :: CoreExpr -> CoreExpr
occurAnalyseExpr_NoBinderSwap = occurAnalyseExpr' False -- do not do binder swap
occurAnalyseExpr' :: Bool -> CoreExpr -> CoreExpr
occurAnalyseExpr' enable_binder_swap expr
= snd (occAnal env expr)
where
env = initOccEnv { occ_binder_swap = enable_binder_swap }
{- Note [Plugin rules]
~~~~~~~~~~~~~~~~~~~~~~
Conal Elliott (Trac #11651) built a GHC plugin that added some
BuiltinRules (for imported Ids) to the mg_rules field of ModGuts, to
do some domain-specific transformations that could not be expressed
with an ordinary pattern-matching CoreRule. But then we can't extract
the dependencies (in imp_rule_edges) from ru_rhs etc, because a
BuiltinRule doesn't have any of that stuff.
So we simply assume that BuiltinRules have no dependencies, and filter
them out from the imp_rule_edges comprehension.
-}
{-
************************************************************************
* *
Bindings
* *
************************************************************************
Note [Recursive bindings: the grand plan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we come across a binding group
Rec { x1 = r1; ...; xn = rn }
we treat it like this (occAnalRecBind):
1. Occurrence-analyse each right hand side, and build a
"Details" for each binding to capture the results.
Wrap the details in a Node (details, node-id, dep-node-ids),
where node-id is just the unique of the binder, and
dep-node-ids lists all binders on which this binding depends.
We'll call these the "scope edges".
See Note [Forming the Rec groups].
All this is done by makeNode.
2. Do SCC-analysis on these Nodes. Each SCC will become a new Rec or
NonRec. The key property is that every free variable of a binding
is accounted for by the scope edges, so that when we are done
everything is still in scope.
3. For each Cyclic SCC of the scope-edge SCC-analysis in (2), we
identify suitable loop-breakers to ensure that inlining terminates.
This is done by occAnalRec.
4. To do so we form a new set of Nodes, with the same details, but
different edges, the "loop-breaker nodes". The loop-breaker nodes
have both more and fewer dependencies than the scope edges
(see Note [Choosing loop breakers])
More edges: if f calls g, and g has an active rule that mentions h
then we add an edge from f -> h
Fewer edges: we only include dependencies on active rules, on rule
RHSs (not LHSs) and if there is an INLINE pragma only
on the stable unfolding (and vice versa). The scope
edges must be much more inclusive.
5. The "weak fvs" of a node are, by definition:
the scope fvs - the loop-breaker fvs
See Note [Weak loop breakers], and the nd_weak field of Details
6. Having formed the loop-breaker nodes
Note [Dead code]
~~~~~~~~~~~~~~~~
Dropping dead code for a cyclic Strongly Connected Component is done
in a very simple way:
the entire SCC is dropped if none of its binders are mentioned
in the body; otherwise the whole thing is kept.
The key observation is that dead code elimination happens after
dependency analysis: so 'occAnalBind' processes SCCs instead of the
original term's binding groups.
Thus 'occAnalBind' does indeed drop 'f' in an example like
letrec f = ...g...
g = ...(...g...)...
in
...g...
when 'g' no longer uses 'f' at all (eg 'f' does not occur in a RULE in
'g'). 'occAnalBind' first consumes 'CyclicSCC g' and then it consumes
'AcyclicSCC f', where 'body_usage' won't contain 'f'.
------------------------------------------------------------
Note [Forming Rec groups]
~~~~~~~~~~~~~~~~~~~~~~~~~
We put bindings {f = ef; g = eg } in a Rec group if "f uses g"
and "g uses f", no matter how indirectly. We do a SCC analysis
with an edge f -> g if "f uses g".
More precisely, "f uses g" iff g should be in scope wherever f is.
That is, g is free in:
a) the rhs 'ef'
b) or the RHS of a rule for f (Note [Rules are extra RHSs])
c) or the LHS or a rule for f (Note [Rule dependency info])
These conditions apply regardless of the activation of the RULE (eg it might be
inactive in this phase but become active later). Once a Rec is broken up
it can never be put back together, so we must be conservative.
The principle is that, regardless of rule firings, every variable is
always in scope.
* Note [Rules are extra RHSs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
A RULE for 'f' is like an extra RHS for 'f'. That way the "parent"
keeps the specialised "children" alive. If the parent dies
(because it isn't referenced any more), then the children will die
too (unless they are already referenced directly).
To that end, we build a Rec group for each cyclic strongly
connected component,
*treating f's rules as extra RHSs for 'f'*.
More concretely, the SCC analysis runs on a graph with an edge
from f -> g iff g is mentioned in
(a) f's rhs
(b) f's RULES
These are rec_edges.
Under (b) we include variables free in *either* LHS *or* RHS of
the rule. The former might seems silly, but see Note [Rule
dependency info]. So in Example [eftInt], eftInt and eftIntFB
will be put in the same Rec, even though their 'main' RHSs are
both non-recursive.
* Note [Rule dependency info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
The VarSet in a RuleInfo is used for dependency analysis in the
occurrence analyser. We must track free vars in *both* lhs and rhs.
Hence use of idRuleVars, rather than idRuleRhsVars in occAnalBind.
Why both? Consider
x = y
RULE f x = v+4
Then if we substitute y for x, we'd better do so in the
rule's LHS too, so we'd better ensure the RULE appears to mention 'x'
as well as 'v'
* Note [Rules are visible in their own rec group]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want the rules for 'f' to be visible in f's right-hand side.
And we'd like them to be visible in other functions in f's Rec
group. E.g. in Note [Specialisation rules] we want f' rule
to be visible in both f's RHS, and fs's RHS.
This means that we must simplify the RULEs first, before looking
at any of the definitions. This is done by Simplify.simplRecBind,
when it calls addLetIdInfo.
------------------------------------------------------------
Note [Choosing loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Loop breaking is surprisingly subtle. First read the section 4 of
"Secrets of the GHC inliner". This describes our basic plan.
We avoid infinite inlinings by choosing loop breakers, and
ensuring that a loop breaker cuts each loop.
See also Note [Inlining and hs-boot files] in ToIface, which deals
with a closely related source of infinite loops.
Fundamentally, we do SCC analysis on a graph. For each recursive
group we choose a loop breaker, delete all edges to that node,
re-analyse the SCC, and iterate.
But what is the graph? NOT the same graph as was used for Note
[Forming Rec groups]! In particular, a RULE is like an equation for
'f' that is *always* inlined if it is applicable. We do *not* disable
rules for loop-breakers. It's up to whoever makes the rules to make
sure that the rules themselves always terminate. See Note [Rules for
recursive functions] in Simplify.hs
Hence, if
f's RHS (or its INLINE template if it has one) mentions g, and
g has a RULE that mentions h, and
h has a RULE that mentions f
then we *must* choose f to be a loop breaker. Example: see Note
[Specialisation rules].
In general, take the free variables of f's RHS, and augment it with
all the variables reachable by RULES from those starting points. That
is the whole reason for computing rule_fv_env in occAnalBind. (Of
course we only consider free vars that are also binders in this Rec
group.) See also Note [Finding rule RHS free vars]
Note that when we compute this rule_fv_env, we only consider variables
free in the *RHS* of the rule, in contrast to the way we build the
Rec group in the first place (Note [Rule dependency info])
Note that if 'g' has RHS that mentions 'w', we should add w to
g's loop-breaker edges. More concretely there is an edge from f -> g
iff
(a) g is mentioned in f's RHS `xor` f's INLINE rhs
(see Note [Inline rules])
(b) or h is mentioned in f's RHS, and
g appears in the RHS of an active RULE of h
or a transitive sequence of active rules starting with h
Why "active rules"? See Note [Finding rule RHS free vars]
Note that in Example [eftInt], *neither* eftInt *nor* eftIntFB is
chosen as a loop breaker, because their RHSs don't mention each other.
And indeed both can be inlined safely.
Note again that the edges of the graph we use for computing loop breakers
are not the same as the edges we use for computing the Rec blocks.
That's why we compute
- rec_edges for the Rec block analysis
- loop_breaker_nodes for the loop breaker analysis
* Note [Finding rule RHS free vars]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this real example from Data Parallel Haskell
tagZero :: Array Int -> Array Tag
{-# INLINE [1] tagZeroes #-}
tagZero xs = pmap (\x -> fromBool (x==0)) xs
{-# RULES "tagZero" [~1] forall xs n.
pmap fromBool <blah blah> = tagZero xs #-}
So tagZero's RHS mentions pmap, and pmap's RULE mentions tagZero.
However, tagZero can only be inlined in phase 1 and later, while
the RULE is only active *before* phase 1. So there's no problem.
To make this work, we look for the RHS free vars only for
*active* rules. That's the reason for the occ_rule_act field
of the OccEnv.
* Note [Weak loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~
There is a last nasty wrinkle. Suppose we have
Rec { f = f_rhs
RULE f [] = g
h = h_rhs
g = h
...more...
}
Remember that we simplify the RULES before any RHS (see Note
[Rules are visible in their own rec group] above).
So we must *not* postInlineUnconditionally 'g', even though
its RHS turns out to be trivial. (I'm assuming that 'g' is
not choosen as a loop breaker.) Why not? Because then we
drop the binding for 'g', which leaves it out of scope in the
RULE!
Here's a somewhat different example of the same thing
Rec { g = h
; h = ...f...
; f = f_rhs
RULE f [] = g }
Here the RULE is "below" g, but we *still* can't postInlineUnconditionally
g, because the RULE for f is active throughout. So the RHS of h
might rewrite to h = ...g...
So g must remain in scope in the output program!
We "solve" this by:
Make g a "weak" loop breaker (OccInfo = IAmLoopBreaker True)
iff g is a "missing free variable" of the Rec group
A "missing free variable" x is one that is mentioned in an RHS or
INLINE or RULE of a binding in the Rec group, but where the
dependency on x may not show up in the loop_breaker_nodes (see
note [Choosing loop breakers} above).
A normal "strong" loop breaker has IAmLoopBreaker False. So
Inline postInlineUnconditionally
strong IAmLoopBreaker False no no
weak IAmLoopBreaker True yes no
other yes yes
The **sole** reason for this kind of loop breaker is so that
postInlineUnconditionally does not fire. Ugh. (Typically it'll
inline via the usual callSiteInline stuff, so it'll be dead in the
next pass, so the main Ugh is the tiresome complication.)
Note [Rules for imported functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
f = /\a. B.g a
RULE B.g Int = 1 + f Int
Note that
* The RULE is for an imported function.
* f is non-recursive
Now we
can get
f Int --> B.g Int Inlining f
--> 1 + f Int Firing RULE
and so the simplifier goes into an infinite loop. This
would not happen if the RULE was for a local function,
because we keep track of dependencies through rules. But
that is pretty much impossible to do for imported Ids. Suppose
f's definition had been
f = /\a. C.h a
where (by some long and devious process), C.h eventually inlines to
B.g. We could only spot such loops by exhaustively following
unfoldings of C.h etc, in case we reach B.g, and hence (via the RULE)
f.
Note that RULES for imported functions are important in practice; they
occur a lot in the libraries.
We regard this potential infinite loop as a *programmer* error.
It's up the programmer not to write silly rules like
RULE f x = f x
and the example above is just a more complicated version.
Note [Preventing loops due to imported functions rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider:
import GHC.Base (foldr)
{-# RULES "filterList" forall p. foldr (filterFB (:) p) [] = filter p #-}
filter p xs = build (\c n -> foldr (filterFB c p) n xs)
filterFB c p = ...
f = filter p xs
Note that filter is not a loop-breaker, so what happens is:
f = filter p xs
= {inline} build (\c n -> foldr (filterFB c p) n xs)
= {inline} foldr (filterFB (:) p) [] xs
= {RULE} filter p xs
We are in an infinite loop.
A more elaborate example (that I actually saw in practice when I went to
mark GHC.List.filter as INLINABLE) is as follows. Say I have this module:
{-# LANGUAGE RankNTypes #-}
module GHCList where
import Prelude hiding (filter)
import GHC.Base (build)
{-# INLINABLE filter #-}
filter :: (a -> Bool) -> [a] -> [a]
filter p [] = []
filter p (x:xs) = if p x then x : filter p xs else filter p xs
{-# NOINLINE [0] filterFB #-}
filterFB :: (a -> b -> b) -> (a -> Bool) -> a -> b -> b
filterFB c p x r | p x = x `c` r
| otherwise = r
{-# RULES
"filter" [~1] forall p xs. filter p xs = build (\c n -> foldr
(filterFB c p) n xs)
"filterList" [1] forall p. foldr (filterFB (:) p) [] = filter p
#-}
Then (because RULES are applied inside INLINABLE unfoldings, but inlinings
are not), the unfolding given to "filter" in the interface file will be:
filter p [] = []
filter p (x:xs) = if p x then x : build (\c n -> foldr (filterFB c p) n xs)
else build (\c n -> foldr (filterFB c p) n xs
Note that because this unfolding does not mention "filter", filter is not
marked as a strong loop breaker. Therefore at a use site in another module:
filter p xs
= {inline}
case xs of [] -> []
(x:xs) -> if p x then x : build (\c n -> foldr (filterFB c p) n xs)
else build (\c n -> foldr (filterFB c p) n xs)
build (\c n -> foldr (filterFB c p) n xs)
= {inline} foldr (filterFB (:) p) [] xs
= {RULE} filter p xs
And we are in an infinite loop again, except that this time the loop is producing an
infinitely large *term* (an unrolling of filter) and so the simplifier finally
dies with "ticks exhausted"
Because of this problem, we make a small change in the occurrence analyser
designed to mark functions like "filter" as strong loop breakers on the basis that:
1. The RHS of filter mentions the local function "filterFB"
2. We have a rule which mentions "filterFB" on the LHS and "filter" on the RHS
So for each RULE for an *imported* function we are going to add
dependency edges between the *local* FVS of the rule LHS and the
*local* FVS of the rule RHS. We don't do anything special for RULES on
local functions because the standard occurrence analysis stuff is
pretty good at getting loop-breakerness correct there.
It is important to note that even with this extra hack we aren't always going to get
things right. For example, it might be that the rule LHS mentions an imported Id,
and another module has a RULE that can rewrite that imported Id to one of our local
Ids.
Note [Specialising imported functions] (referred to from Specialise)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT for *automatically-generated* rules, the programmer can't be
responsible for the "programmer error" in Note [Rules for imported
functions]. In paricular, consider specialising a recursive function
defined in another module. If we specialise a recursive function B.g,
we get
g_spec = .....(B.g Int).....
RULE B.g Int = g_spec
Here, g_spec doesn't look recursive, but when the rule fires, it
becomes so. And if B.g was mutually recursive, the loop might
not be as obvious as it is here.
To avoid this,
* When specialising a function that is a loop breaker,
give a NOINLINE pragma to the specialised function
Note [Glomming]
~~~~~~~~~~~~~~~
RULES for imported Ids can make something at the top refer to something at the bottom:
f = \x -> B.g (q x)
h = \y -> 3
RULE: B.g (q x) = h x
Applying this rule makes f refer to h, although f doesn't appear to
depend on h. (And, as in Note [Rules for imported functions], the
dependency might be more indirect. For example, f might mention C.t
rather than B.g, where C.t eventually inlines to B.g.)
NOTICE that this cannot happen for rules whose head is a
locally-defined function, because we accurately track dependencies
through RULES. It only happens for rules whose head is an imported
function (B.g in the example above).
Solution:
- When simplifying, bring all top level identifiers into
scope at the start, ignoring the Rec/NonRec structure, so
that when 'h' pops up in f's rhs, we find it in the in-scope set
(as the simplifier generally expects). This happens in simplTopBinds.
- In the occurrence analyser, if there are any out-of-scope
occurrences that pop out of the top, which will happen after
firing the rule: f = \x -> h x
h = \y -> 3
then just glom all the bindings into a single Rec, so that
the *next* iteration of the occurrence analyser will sort
them all out. This part happens in occurAnalysePgm.
------------------------------------------------------------
Note [Inline rules]
~~~~~~~~~~~~~~~~~~~
None of the above stuff about RULES applies to Inline Rules,
stored in a CoreUnfolding. The unfolding, if any, is simplified
at the same time as the regular RHS of the function (ie *not* like
Note [Rules are visible in their own rec group]), so it should be
treated *exactly* like an extra RHS.
Or, rather, when computing loop-breaker edges,
* If f has an INLINE pragma, and it is active, we treat the
INLINE rhs as f's rhs
* If it's inactive, we treat f as having no rhs
* If it has no INLINE pragma, we look at f's actual rhs
There is a danger that we'll be sub-optimal if we see this
f = ...f...
[INLINE f = ..no f...]
where f is recursive, but the INLINE is not. This can just about
happen with a sufficiently odd set of rules; eg
foo :: Int -> Int
{-# INLINE [1] foo #-}
foo x = x+1
bar :: Int -> Int
{-# INLINE [1] bar #-}
bar x = foo x + 1
{-# RULES "foo" [~1] forall x. foo x = bar x #-}
Here the RULE makes bar recursive; but it's INLINE pragma remains
non-recursive. It's tempting to then say that 'bar' should not be
a loop breaker, but an attempt to do so goes wrong in two ways:
a) We may get
$df = ...$cfoo...
$cfoo = ...$df....
[INLINE $cfoo = ...no-$df...]
But we want $cfoo to depend on $df explicitly so that we
put the bindings in the right order to inline $df in $cfoo
and perhaps break the loop altogether. (Maybe this
b)
Example [eftInt]
~~~~~~~~~~~~~~~
Example (from GHC.Enum):
eftInt :: Int# -> Int# -> [Int]
eftInt x y = ...(non-recursive)...
{-# INLINE [0] eftIntFB #-}
eftIntFB :: (Int -> r -> r) -> r -> Int# -> Int# -> r
eftIntFB c n x y = ...(non-recursive)...
{-# RULES
"eftInt" [~1] forall x y. eftInt x y = build (\ c n -> eftIntFB c n x y)
"eftIntList" [1] eftIntFB (:) [] = eftInt
#-}
Note [Specialisation rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this group, which is typical of what SpecConstr builds:
fs a = ....f (C a)....
f x = ....f (C a)....
{-# RULE f (C a) = fs a #-}
So 'f' and 'fs' are in the same Rec group (since f refers to fs via its RULE).
But watch out! If 'fs' is not chosen as a loop breaker, we may get an infinite loop:
- the RULE is applied in f's RHS (see Note [Self-recursive rules] in Simplify
- fs is inlined (say it's small)
- now there's another opportunity to apply the RULE
This showed up when compiling Control.Concurrent.Chan.getChanContents.
------------------------------------------------------------
Note [Finding join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~
It's the occurrence analyser's job to find bindings that we can turn into join
points, but it doesn't perform that transformation right away. Rather, it marks
the eligible bindings as part of their occurrence data, leaving it to the
simplifier (or to simpleOptPgm) to actually change the binder's 'IdDetails'.
The simplifier then eta-expands the RHS if needed and then updates the
occurrence sites. Dividing the work this way means that the occurrence analyser
still only takes one pass, yet one can always tell the difference between a
function call and a jump by looking at the occurrence (because the same pass
changes the 'IdDetails' and propagates the binders to their occurrence sites).
To track potential join points, we use the 'occ_tail' field of OccInfo. A value
of `AlwaysTailCalled n` indicates that every occurrence of the variable is a
tail call with `n` arguments (counting both value and type arguments). Otherwise
'occ_tail' will be 'NoTailCallInfo'. The tail call info flows bottom-up with the
rest of 'OccInfo' until it goes on the binder.
Note [Rules and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Things get fiddly with rules. Suppose we have:
let j :: Int -> Int
j y = 2 * y
k :: Int -> Int -> Int
{-# RULES "SPEC k 0" k 0 = j #-}
k x y = x + 2 * y
in ...
Now suppose that both j and k appear only as saturated tail calls in the body.
Thus we would like to make them both join points. The rule complicates matters,
though, as its RHS has an unapplied occurrence of j. *However*, if we were to
eta-expand the rule, all would be well:
{-# RULES "SPEC k 0" forall a. k 0 a = j a #-}
So conceivably we could notice that a potential join point would have an
"undersaturated" rule and account for it. This would mean we could make
something that's been specialised a join point, for instance. But local bindings
are rarely specialised, and being overly cautious about rules only
costs us anything when, for some `j`:
* Before specialisation, `j` has non-tail calls, so it can't be a join point.
* During specialisation, `j` gets specialised and thus acquires rules.
* Sometime afterward, the non-tail calls to `j` disappear (as dead code, say),
and so now `j` *could* become a join point.
This appears to be very rare in practice. TODO Perhaps we should gather
statistics to be sure.
------------------------------------------------------------
Note [Adjusting right-hand sides]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There's a bit of a dance we need to do after analysing a lambda expression or
a right-hand side. In particular, we need to
a) call 'markAllInsideLam' *unless* the binding is for a thunk, a one-shot
lambda, or a non-recursive join point; and
b) call 'markAllNonTailCalled' *unless* the binding is for a join point.
Some examples, with how the free occurrences in e (assumed not to be a value
lambda) get marked:
inside lam non-tail-called
------------------------------------------------------------
let x = e No Yes
let f = \x -> e Yes Yes
let f = \x{OneShot} -> e No Yes
\x -> e Yes Yes
join j x = e No No
joinrec j x = e Yes No
There are a few other caveats; most importantly, if we're marking a binding as
'AlwaysTailCalled', it's *going* to be a join point, so we treat it as one so
that the effect cascades properly. Consequently, at the time the RHS is
analysed, we won't know what adjustments to make; thus 'occAnalLamOrRhs' must
return the unadjusted 'UsageDetails', to be adjusted by 'adjustRhsUsage' once
join-point-hood has been decided.
Thus the overall sequence taking place in 'occAnalNonRecBind' and
'occAnalRecBind' is as follows:
1. Call 'occAnalLamOrRhs' to find usage information for the RHS.
2. Call 'tagNonRecBinder' or 'tagRecBinders', which decides whether to make
the binding a join point.
3. Call 'adjustRhsUsage' accordingly. (Done as part of 'tagRecBinders' when
recursive.)
(In the recursive case, this logic is spread between 'makeNode' and
'occAnalRec'.)
-}
------------------------------------------------------------------
-- occAnalBind
------------------------------------------------------------------
occAnalBind :: OccEnv -- The incoming OccEnv
-> TopLevelFlag
-> ImpRuleEdges
-> CoreBind
-> UsageDetails -- Usage details of scope
-> (UsageDetails, -- Of the whole let(rec)
[CoreBind])
occAnalBind env lvl top_env (NonRec binder rhs) body_usage
= occAnalNonRecBind env lvl top_env binder rhs body_usage
occAnalBind env lvl top_env (Rec pairs) body_usage
= occAnalRecBind env lvl top_env pairs body_usage
-----------------
occAnalNonRecBind :: OccEnv -> TopLevelFlag -> ImpRuleEdges -> Var -> CoreExpr
-> UsageDetails -> (UsageDetails, [CoreBind])
occAnalNonRecBind env lvl imp_rule_edges binder rhs body_usage
| isTyVar binder -- A type let; we don't gather usage info
= (body_usage, [NonRec binder rhs])
| not (binder `usedIn` body_usage) -- It's not mentioned
= (body_usage, [])
| otherwise -- It's mentioned in the body
= (body_usage' +++ rhs_usage', [NonRec tagged_binder rhs'])
where
(body_usage', tagged_binder) = tagNonRecBinder lvl body_usage binder
mb_join_arity = willBeJoinId_maybe tagged_binder
(bndrs, body) = collectBinders rhs
(rhs_usage1, bndrs', body') = occAnalNonRecRhs env tagged_binder bndrs body
rhs' = mkLams (markJoinOneShots mb_join_arity bndrs') body'
-- For a /non-recursive/ join point we can mark all
-- its join-lambda as one-shot; and it's a good idea to do so
-- Unfoldings
-- See Note [Unfoldings and join points]
rhs_usage2 = case occAnalUnfolding env NonRecursive binder of
Just unf_usage -> rhs_usage1 +++ unf_usage
Nothing -> rhs_usage1
-- Rules
-- See Note [Rules are extra RHSs] and Note [Rule dependency info]
rules_w_uds = occAnalRules env mb_join_arity NonRecursive tagged_binder
rhs_usage3 = rhs_usage2 +++ combineUsageDetailsList
(map (\(_, l, r) -> l +++ r) rules_w_uds)
rhs_usage4 = maybe rhs_usage3 (addManyOccsSet rhs_usage3) $
lookupVarEnv imp_rule_edges binder
-- See Note [Preventing loops due to imported functions rules]
-- Final adjustment
rhs_usage' = adjustRhsUsage mb_join_arity NonRecursive bndrs' rhs_usage4
-----------------
occAnalRecBind :: OccEnv -> TopLevelFlag -> ImpRuleEdges -> [(Var,CoreExpr)]
-> UsageDetails -> (UsageDetails, [CoreBind])
occAnalRecBind env lvl imp_rule_edges pairs body_usage
= foldr (occAnalRec env lvl) (body_usage, []) sccs
-- For a recursive group, we
-- * occ-analyse all the RHSs
-- * compute strongly-connected components
-- * feed those components to occAnalRec
-- See Note [Recursive bindings: the grand plan]
where
sccs :: [SCC Details]
sccs = {-# SCC "occAnalBind.scc" #-}
stronglyConnCompFromEdgedVerticesUniq nodes
nodes :: [LetrecNode]
nodes = {-# SCC "occAnalBind.assoc" #-}
map (makeNode env imp_rule_edges bndr_set) pairs
bndr_set = mkVarSet (map fst pairs)
{-
Note [Unfoldings and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We assume that anything in an unfolding occurs multiple times, since unfoldings
are often copied (that's the whole point!). But we still need to track tail
calls for the purpose of finding join points.
-}
-----------------------------
occAnalRec :: OccEnv -> TopLevelFlag
-> SCC Details
-> (UsageDetails, [CoreBind])
-> (UsageDetails, [CoreBind])
-- The NonRec case is just like a Let (NonRec ...) above
occAnalRec _ lvl (AcyclicSCC (ND { nd_bndr = bndr, nd_rhs = rhs
, nd_uds = rhs_uds, nd_rhs_bndrs = rhs_bndrs }))
(body_uds, binds)
| not (bndr `usedIn` body_uds)
= (body_uds, binds) -- See Note [Dead code]
| otherwise -- It's mentioned in the body
= (body_uds' +++ rhs_uds',
NonRec tagged_bndr rhs : binds)
where
(body_uds', tagged_bndr) = tagNonRecBinder lvl body_uds bndr
rhs_uds' = adjustRhsUsage (willBeJoinId_maybe tagged_bndr) NonRecursive
rhs_bndrs rhs_uds
-- The Rec case is the interesting one
-- See Note [Recursive bindings: the grand plan]
-- See Note [Loop breaking]
occAnalRec env lvl (CyclicSCC details_s) (body_uds, binds)
| not (any (`usedIn` body_uds) bndrs) -- NB: look at body_uds, not total_uds
= (body_uds, binds) -- See Note [Dead code]
| otherwise -- At this point we always build a single Rec
= -- pprTrace "occAnalRec" (vcat
-- [ text "weak_fvs" <+> ppr weak_fvs
-- , text "lb nodes" <+> ppr loop_breaker_nodes])
(final_uds, Rec pairs : binds)
where
bndrs = map nd_bndr details_s
bndr_set = mkVarSet bndrs
------------------------------
-- See Note [Choosing loop breakers] for loop_breaker_nodes
final_uds :: UsageDetails
loop_breaker_nodes :: [LetrecNode]
(final_uds, loop_breaker_nodes)
= mkLoopBreakerNodes env lvl bndr_set body_uds details_s
------------------------------
weak_fvs :: VarSet
weak_fvs = mapUnionVarSet nd_weak details_s
---------------------------
-- Now reconstruct the cycle
pairs :: [(Id,CoreExpr)]
pairs | isEmptyVarSet weak_fvs = reOrderNodes 0 bndr_set weak_fvs loop_breaker_nodes []
| otherwise = loopBreakNodes 0 bndr_set weak_fvs loop_breaker_nodes []
-- If weak_fvs is empty, the loop_breaker_nodes will include
-- all the edges in the original scope edges [remember,
-- weak_fvs is the difference between scope edges and
-- lb-edges], so a fresh SCC computation would yield a
-- single CyclicSCC result; and reOrderNodes deals with
-- exactly that case
------------------------------------------------------------------
-- Loop breaking
------------------------------------------------------------------
type Binding = (Id,CoreExpr)
loopBreakNodes :: Int
-> VarSet -- All binders
-> VarSet -- Binders whose dependencies may be "missing"
-- See Note [Weak loop breakers]
-> [LetrecNode]
-> [Binding] -- Append these to the end
-> [Binding]
{-
loopBreakNodes is applied to the list of nodes for a cyclic strongly
connected component (there's guaranteed to be a cycle). It returns
the same nodes, but
a) in a better order,
b) with some of the Ids having a IAmALoopBreaker pragma
The "loop-breaker" Ids are sufficient to break all cycles in the SCC. This means
that the simplifier can guarantee not to loop provided it never records an inlining
for these no-inline guys.
Furthermore, the order of the binds is such that if we neglect dependencies
on the no-inline Ids then the binds are topologically sorted. This means
that the simplifier will generally do a good job if it works from top bottom,
recording inlinings for any Ids which aren't marked as "no-inline" as it goes.
-}
-- Return the bindings sorted into a plausible order, and marked with loop breakers.
loopBreakNodes depth bndr_set weak_fvs nodes binds
= -- pprTrace "loopBreakNodes" (ppr nodes) $
go (stronglyConnCompFromEdgedVerticesUniqR nodes) binds
where
go [] binds = binds
go (scc:sccs) binds = loop_break_scc scc (go sccs binds)
loop_break_scc scc binds
= case scc of
AcyclicSCC node -> mk_non_loop_breaker weak_fvs node : binds
CyclicSCC nodes -> reOrderNodes depth bndr_set weak_fvs nodes binds
----------------------------------
reOrderNodes :: Int -> VarSet -> VarSet -> [LetrecNode] -> [Binding] -> [Binding]
-- Choose a loop breaker, mark it no-inline,
-- and call loopBreakNodes on the rest
reOrderNodes _ _ _ [] _ = panic "reOrderNodes"
reOrderNodes _ _ _ [node] binds = mk_loop_breaker node : binds
reOrderNodes depth bndr_set weak_fvs (node : nodes) binds
= -- pprTrace "reOrderNodes" (vcat [ text "unchosen" <+> ppr unchosen
-- , text "chosen" <+> ppr chosen_nodes ]) $
loopBreakNodes new_depth bndr_set weak_fvs unchosen $
(map mk_loop_breaker chosen_nodes ++ binds)
where
(chosen_nodes, unchosen) = chooseLoopBreaker approximate_lb
(nd_score (node_payload node))
[node] [] nodes
approximate_lb = depth >= 2
new_depth | approximate_lb = 0
| otherwise = depth+1
-- After two iterations (d=0, d=1) give up
-- and approximate, returning to d=0
mk_loop_breaker :: LetrecNode -> Binding
mk_loop_breaker (node_payload -> ND { nd_bndr = bndr, nd_rhs = rhs})
= (bndr `setIdOccInfo` strongLoopBreaker { occ_tail = tail_info }, rhs)
where
tail_info = tailCallInfo (idOccInfo bndr)
mk_non_loop_breaker :: VarSet -> LetrecNode -> Binding
-- See Note [Weak loop breakers]
mk_non_loop_breaker weak_fvs (node_payload -> ND { nd_bndr = bndr
, nd_rhs = rhs})
| bndr `elemVarSet` weak_fvs = (setIdOccInfo bndr occ', rhs)
| otherwise = (bndr, rhs)
where
occ' = weakLoopBreaker { occ_tail = tail_info }
tail_info = tailCallInfo (idOccInfo bndr)
----------------------------------
chooseLoopBreaker :: Bool -- True <=> Too many iterations,
-- so approximate
-> NodeScore -- Best score so far
-> [LetrecNode] -- Nodes with this score
-> [LetrecNode] -- Nodes with higher scores
-> [LetrecNode] -- Unprocessed nodes
-> ([LetrecNode], [LetrecNode])
-- This loop looks for the bind with the lowest score
-- to pick as the loop breaker. The rest accumulate in
chooseLoopBreaker _ _ loop_nodes acc []
= (loop_nodes, acc) -- Done
-- If approximate_loop_breaker is True, we pick *all*
-- nodes with lowest score, else just one
-- See Note [Complexity of loop breaking]
chooseLoopBreaker approx_lb loop_sc loop_nodes acc (node : nodes)
| approx_lb
, rank sc == rank loop_sc
= chooseLoopBreaker approx_lb loop_sc (node : loop_nodes) acc nodes
| sc `betterLB` loop_sc -- Better score so pick this new one
= chooseLoopBreaker approx_lb sc [node] (loop_nodes ++ acc) nodes
| otherwise -- Worse score so don't pick it
= chooseLoopBreaker approx_lb loop_sc loop_nodes (node : acc) nodes
where
sc = nd_score (node_payload node)
{-
Note [Complexity of loop breaking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The loop-breaking algorithm knocks out one binder at a time, and
performs a new SCC analysis on the remaining binders. That can
behave very badly in tightly-coupled groups of bindings; in the
worst case it can be (N**2)*log N, because it does a full SCC
on N, then N-1, then N-2 and so on.
To avoid this, we switch plans after 2 (or whatever) attempts:
Plan A: pick one binder with the lowest score, make it
a loop breaker, and try again
Plan B: pick *all* binders with the lowest score, make them
all loop breakers, and try again
Since there are only a small finite number of scores, this will
terminate in a constant number of iterations, rather than O(N)
iterations.
You might thing that it's very unlikely, but RULES make it much
more likely. Here's a real example from Trac #1969:
Rec { $dm = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm1
forall d. $dm Bool d = $s$dm2 #-}
dInt = MkD .... opInt ...
dInt = MkD .... opBool ...
opInt = $dm dInt
opBool = $dm dBool
$s$dm1 = \x. op dInt
$s$dm2 = \x. op dBool }
The RULES stuff means that we can't choose $dm as a loop breaker
(Note [Choosing loop breakers]), so we must choose at least (say)
opInt *and* opBool, and so on. The number of loop breakders is
linear in the number of instance declarations.
Note [Loop breakers and INLINE/INLINABLE pragmas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Avoid choosing a function with an INLINE pramga as the loop breaker!
If such a function is mutually-recursive with a non-INLINE thing,
then the latter should be the loop-breaker.
It's vital to distinguish between INLINE and INLINABLE (the
Bool returned by hasStableCoreUnfolding_maybe). If we start with
Rec { {-# INLINABLE f #-}
f x = ...f... }
and then worker/wrapper it through strictness analysis, we'll get
Rec { {-# INLINABLE $wf #-}
$wf p q = let x = (p,q) in ...f...
{-# INLINE f #-}
f x = case x of (p,q) -> $wf p q }
Now it is vital that we choose $wf as the loop breaker, so we can
inline 'f' in '$wf'.
Note [DFuns should not be loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's particularly bad to make a DFun into a loop breaker. See
Note [How instance declarations are translated] in TcInstDcls
We give DFuns a higher score than ordinary CONLIKE things because
if there's a choice we want the DFun to be the non-loop breaker. Eg
rec { sc = /\ a \$dC. $fBWrap (T a) ($fCT @ a $dC)
$fCT :: forall a_afE. (Roman.C a_afE) => Roman.C (Roman.T a_afE)
{-# DFUN #-}
$fCT = /\a \$dC. MkD (T a) ((sc @ a $dC) |> blah) ($ctoF @ a $dC)
}
Here 'sc' (the superclass) looks CONLIKE, but we'll never get to it
if we can't unravel the DFun first.
Note [Constructor applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's really really important to inline dictionaries. Real
example (the Enum Ordering instance from GHC.Base):
rec f = \ x -> case d of (p,q,r) -> p x
g = \ x -> case d of (p,q,r) -> q x
d = (v, f, g)
Here, f and g occur just once; but we can't inline them into d.
On the other hand we *could* simplify those case expressions if
we didn't stupidly choose d as the loop breaker.
But we won't because constructor args are marked "Many".
Inlining dictionaries is really essential to unravelling
the loops in static numeric dictionaries, see GHC.Float.
Note [Closure conversion]
~~~~~~~~~~~~~~~~~~~~~~~~~
We treat (\x. C p q) as a high-score candidate in the letrec scoring algorithm.
The immediate motivation came from the result of a closure-conversion transformation
which generated code like this:
data Clo a b = forall c. Clo (c -> a -> b) c
($:) :: Clo a b -> a -> b
Clo f env $: x = f env x
rec { plus = Clo plus1 ()
; plus1 _ n = Clo plus2 n
; plus2 Zero n = n
; plus2 (Succ m) n = Succ (plus $: m $: n) }
If we inline 'plus' and 'plus1', everything unravels nicely. But if
we choose 'plus1' as the loop breaker (which is entirely possible
otherwise), the loop does not unravel nicely.
@occAnalUnfolding@ deals with the question of bindings where the Id is marked
by an INLINE pragma. For these we record that anything which occurs
in its RHS occurs many times. This pessimistically assumes that this
inlined binder also occurs many times in its scope, but if it doesn't
we'll catch it next time round. At worst this costs an extra simplifier pass.
ToDo: try using the occurrence info for the inline'd binder.
[March 97] We do the same for atomic RHSs. Reason: see notes with loopBreakSCC.
[June 98, SLPJ] I've undone this change; I don't understand it. See notes with loopBreakSCC.
************************************************************************
* *
Making nodes
* *
************************************************************************
-}
type ImpRuleEdges = IdEnv IdSet -- Mapping from FVs of imported RULE LHSs to RHS FVs
noImpRuleEdges :: ImpRuleEdges
noImpRuleEdges = emptyVarEnv
type LetrecNode = Node Unique Details -- Node comes from Digraph
-- The Unique key is gotten from the Id
data Details
= ND { nd_bndr :: Id -- Binder
, nd_rhs :: CoreExpr -- RHS, already occ-analysed
, nd_rhs_bndrs :: [CoreBndr] -- Outer lambdas of RHS
-- INVARIANT: (nd_rhs_bndrs nd, _) ==
-- collectBinders (nd_rhs nd)
, nd_uds :: UsageDetails -- Usage from RHS, and RULES, and stable unfoldings
-- ignoring phase (ie assuming all are active)
-- See Note [Forming Rec groups]
, nd_inl :: IdSet -- Free variables of
-- the stable unfolding (if present and active)
-- or the RHS (if not)
-- but excluding any RULES
-- This is the IdSet that may be used if the Id is inlined
, nd_weak :: IdSet -- Binders of this Rec that are mentioned in nd_uds
-- but are *not* in nd_inl. These are the ones whose
-- dependencies might not be respected by loop_breaker_nodes
-- See Note [Weak loop breakers]
, nd_active_rule_fvs :: IdSet -- Free variables of the RHS of active RULES
, nd_score :: NodeScore
}
instance Outputable Details where
ppr nd = text "ND" <> braces
(sep [ text "bndr =" <+> ppr (nd_bndr nd)
, text "uds =" <+> ppr (nd_uds nd)
, text "inl =" <+> ppr (nd_inl nd)
, text "weak =" <+> ppr (nd_weak nd)
, text "rule =" <+> ppr (nd_active_rule_fvs nd)
, text "score =" <+> ppr (nd_score nd)
])
-- The NodeScore is compared lexicographically;
-- e.g. lower rank wins regardless of size
type NodeScore = ( Int -- Rank: lower => more likely to be picked as loop breaker
, Int -- Size of rhs: higher => more likely to be picked as LB
-- Maxes out at maxExprSize; we just use it to prioritise
-- small functions
, Bool ) -- Was it a loop breaker before?
-- True => more likely to be picked
-- Note [Loop breakers, node scoring, and stability]
rank :: NodeScore -> Int
rank (r, _, _) = r
makeNode :: OccEnv -> ImpRuleEdges -> VarSet
-> (Var, CoreExpr) -> LetrecNode
-- See Note [Recursive bindings: the grand plan]
makeNode env imp_rule_edges bndr_set (bndr, rhs)
= DigraphNode details (varUnique bndr) (nonDetKeysUniqSet node_fvs)
-- It's OK to use nonDetKeysUniqSet here as stronglyConnCompFromEdgedVerticesR
-- is still deterministic with edges in nondeterministic order as
-- explained in Note [Deterministic SCC] in Digraph.
where
details = ND { nd_bndr = bndr
, nd_rhs = rhs'
, nd_rhs_bndrs = bndrs'
, nd_uds = rhs_usage3
, nd_inl = inl_fvs
, nd_weak = node_fvs `minusVarSet` inl_fvs
, nd_active_rule_fvs = active_rule_fvs
, nd_score = pprPanic "makeNodeDetails" (ppr bndr) }
-- Constructing the edges for the main Rec computation
-- See Note [Forming Rec groups]
(bndrs, body) = collectBinders rhs
(rhs_usage1, bndrs', body') = occAnalRecRhs env bndrs body
rhs' = mkLams bndrs' body'
rhs_usage2 = rhs_usage1 +++ all_rule_uds
-- Note [Rules are extra RHSs]
-- Note [Rule dependency info]
rhs_usage3 = case mb_unf_uds of
Just unf_uds -> rhs_usage2 +++ unf_uds
Nothing -> rhs_usage2
node_fvs = udFreeVars bndr_set rhs_usage3
-- Finding the free variables of the rules
is_active = occ_rule_act env :: Activation -> Bool
rules_w_uds :: [(CoreRule, UsageDetails, UsageDetails)]
rules_w_uds = occAnalRules env (Just (length bndrs)) Recursive bndr
rules_w_rhs_fvs :: [(Activation, VarSet)] -- Find the RHS fvs
rules_w_rhs_fvs = maybe id (\ids -> ((AlwaysActive, ids):))
(lookupVarEnv imp_rule_edges bndr)
-- See Note [Preventing loops due to imported functions rules]
[ (ru_act rule, udFreeVars bndr_set rhs_uds)
| (rule, _, rhs_uds) <- rules_w_uds ]
all_rule_uds = combineUsageDetailsList $
concatMap (\(_, l, r) -> [l, r]) rules_w_uds
active_rule_fvs = unionVarSets [fvs | (a,fvs) <- rules_w_rhs_fvs
, is_active a]
-- Finding the usage details of the INLINE pragma (if any)
mb_unf_uds = occAnalUnfolding env Recursive bndr
-- Find the "nd_inl" free vars; for the loop-breaker phase
inl_fvs = case mb_unf_uds of
Nothing -> udFreeVars bndr_set rhs_usage1 -- No INLINE, use RHS
Just unf_uds -> udFreeVars bndr_set unf_uds
-- We could check for an *active* INLINE (returning
-- emptyVarSet for an inactive one), but is_active
-- isn't the right thing (it tells about
-- RULE activation), so we'd need more plumbing
mkLoopBreakerNodes :: OccEnv -> TopLevelFlag
-> VarSet
-> UsageDetails -- for BODY of let
-> [Details]
-> (UsageDetails, -- adjusted
[LetrecNode])
-- Does four things
-- a) tag each binder with its occurrence info
-- b) add a NodeScore to each node
-- c) make a Node with the right dependency edges for
-- the loop-breaker SCC analysis
-- d) adjust each RHS's usage details according to
-- the binder's (new) shotness and join-point-hood
mkLoopBreakerNodes env lvl bndr_set body_uds details_s
= (final_uds, zipWith mk_lb_node details_s bndrs')
where
(final_uds, bndrs') = tagRecBinders lvl body_uds
[ ((nd_bndr nd)
,(nd_uds nd)
,(nd_rhs_bndrs nd))
| nd <- details_s ]
mk_lb_node nd@(ND { nd_bndr = bndr, nd_rhs = rhs, nd_inl = inl_fvs }) bndr'
= DigraphNode nd' (varUnique bndr) (nonDetKeysUniqSet lb_deps)
-- It's OK to use nonDetKeysUniqSet here as
-- stronglyConnCompFromEdgedVerticesR is still deterministic with edges
-- in nondeterministic order as explained in
-- Note [Deterministic SCC] in Digraph.
where
nd' = nd { nd_bndr = bndr', nd_score = score }
score = nodeScore env bndr bndr' rhs lb_deps
lb_deps = extendFvs_ rule_fv_env inl_fvs
rule_fv_env :: IdEnv IdSet
-- Maps a variable f to the variables from this group
-- mentioned in RHS of active rules for f
-- Domain is *subset* of bound vars (others have no rule fvs)
rule_fv_env = transClosureFV (mkVarEnv init_rule_fvs)
init_rule_fvs -- See Note [Finding rule RHS free vars]
= [ (b, trimmed_rule_fvs)
| ND { nd_bndr = b, nd_active_rule_fvs = rule_fvs } <- details_s
, let trimmed_rule_fvs = rule_fvs `intersectVarSet` bndr_set
, not (isEmptyVarSet trimmed_rule_fvs) ]
------------------------------------------
nodeScore :: OccEnv
-> Id -- Binder has old occ-info (just for loop-breaker-ness)
-> Id -- Binder with new occ-info
-> CoreExpr -- RHS
-> VarSet -- Loop-breaker dependencies
-> NodeScore
nodeScore env old_bndr new_bndr bind_rhs lb_deps
| not (isId old_bndr) -- A type or cercion variable is never a loop breaker
= (100, 0, False)
| old_bndr `elemVarSet` lb_deps -- Self-recursive things are great loop breakers
= (0, 0, True) -- See Note [Self-recursion and loop breakers]
| not (occ_unf_act env old_bndr) -- A binder whose inlining is inactive (e.g. has
= (0, 0, True) -- a NOINLINE pragam) makes a great loop breaker
| exprIsTrivial rhs
= mk_score 10 -- Practically certain to be inlined
-- Used to have also: && not (isExportedId bndr)
-- But I found this sometimes cost an extra iteration when we have
-- rec { d = (a,b); a = ...df...; b = ...df...; df = d }
-- where df is the exported dictionary. Then df makes a really
-- bad choice for loop breaker
| DFunUnfolding { df_args = args } <- id_unfolding
-- Never choose a DFun as a loop breaker
-- Note [DFuns should not be loop breakers]
= (9, length args, is_lb)
-- Data structures are more important than INLINE pragmas
-- so that dictionary/method recursion unravels
| CoreUnfolding { uf_guidance = UnfWhen {} } <- id_unfolding
= mk_score 6
| is_con_app rhs -- Data types help with cases:
= mk_score 5 -- Note [Constructor applications]
| isStableUnfolding id_unfolding
, can_unfold
= mk_score 3
| isOneOcc (idOccInfo new_bndr)
= mk_score 2 -- Likely to be inlined
| can_unfold -- The Id has some kind of unfolding
= mk_score 1
| otherwise
= (0, 0, is_lb)
where
mk_score :: Int -> NodeScore
mk_score rank = (rank, rhs_size, is_lb)
is_lb = isStrongLoopBreaker (idOccInfo old_bndr)
rhs = case id_unfolding of
CoreUnfolding { uf_src = src, uf_tmpl = unf_rhs }
| isStableSource src
-> unf_rhs
_ -> bind_rhs
-- 'bind_rhs' is irrelevant for inlining things with a stable unfolding
rhs_size = case id_unfolding of
CoreUnfolding { uf_guidance = guidance }
| UnfIfGoodArgs { ug_size = size } <- guidance
-> size
_ -> cheapExprSize rhs
can_unfold = canUnfold id_unfolding
id_unfolding = realIdUnfolding old_bndr
-- realIdUnfolding: Ignore loop-breaker-ness here because
-- that is what we are setting!
-- Checking for a constructor application
-- Cheap and cheerful; the simplifier moves casts out of the way
-- The lambda case is important to spot x = /\a. C (f a)
-- which comes up when C is a dictionary constructor and
-- f is a default method.
-- Example: the instance for Show (ST s a) in GHC.ST
--
-- However we *also* treat (\x. C p q) as a con-app-like thing,
-- Note [Closure conversion]
is_con_app (Var v) = isConLikeId v
is_con_app (App f _) = is_con_app f
is_con_app (Lam _ e) = is_con_app e
is_con_app (Tick _ e) = is_con_app e
is_con_app _ = False
maxExprSize :: Int
maxExprSize = 20 -- Rather arbitrary
cheapExprSize :: CoreExpr -> Int
-- Maxes out at maxExprSize
cheapExprSize e
= go 0 e
where
go n e | n >= maxExprSize = n
| otherwise = go1 n e
go1 n (Var {}) = n+1
go1 n (Lit {}) = n+1
go1 n (Type {}) = n
go1 n (Coercion {}) = n
go1 n (Tick _ e) = go1 n e
go1 n (Cast e _) = go1 n e
go1 n (App f a) = go (go1 n f) a
go1 n (Lam b e)
| isTyVar b = go1 n e
| otherwise = go (n+1) e
go1 n (Let b e) = gos (go1 n e) (rhssOfBind b)
go1 n (Case e _ _ as) = gos (go1 n e) (rhssOfAlts as)
gos n [] = n
gos n (e:es) | n >= maxExprSize = n
| otherwise = gos (go1 n e) es
betterLB :: NodeScore -> NodeScore -> Bool
-- If n1 `betterLB` n2 then choose n1 as the loop breaker
betterLB (rank1, size1, lb1) (rank2, size2, _)
| rank1 < rank2 = True
| rank1 > rank2 = False
| size1 < size2 = False -- Make the bigger n2 into the loop breaker
| size1 > size2 = True
| lb1 = True -- Tie-break: if n1 was a loop breaker before, choose it
| otherwise = False -- See Note [Loop breakers, node scoring, and stability]
{- Note [Self-recursion and loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have
rec { f = ...f...g...
; g = .....f... }
then 'f' has to be a loop breaker anyway, so we may as well choose it
right away, so that g can inline freely.
This is really just a cheap hack. Consider
rec { f = ...g...
; g = ..f..h...
; h = ...f....}
Here f or g are better loop breakers than h; but we might accidentally
choose h. Finding the minimal set of loop breakers is hard.
Note [Loop breakers, node scoring, and stability]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To choose a loop breaker, we give a NodeScore to each node in the SCC,
and pick the one with the best score (according to 'betterLB').
We need to be jolly careful (Trac #12425, #12234) about the stability
of this choice. Suppose we have
let rec { f = ...g...g...
; g = ...f...f... }
in
case x of
True -> ...f..
False -> ..f...
In each iteration of the simplifier the occurrence analyser OccAnal
chooses a loop breaker. Suppose in iteration 1 it choose g as the loop
breaker. That means it is free to inline f.
Suppose that GHC decides to inline f in the branches of the case, but
(for some reason; eg it is not saturated) in the rhs of g. So we get
let rec { f = ...g...g...
; g = ...f...f... }
in
case x of
True -> ...g...g.....
False -> ..g..g....
Now suppose that, for some reason, in the next iteration the occurrence
analyser chooses f as the loop breaker, so it can freely inline g. And
again for some reason the simplifier inlines g at its calls in the case
branches, but not in the RHS of f. Then we get
let rec { f = ...g...g...
; g = ...f...f... }
in
case x of
True -> ...(...f...f...)...(...f..f..).....
False -> ..(...f...f...)...(..f..f...)....
You can see where this is going! Each iteration of the simplifier
doubles the number of calls to f or g. No wonder GHC is slow!
(In the particular example in comment:3 of #12425, f and g are the two
mutually recursive fmap instances for CondT and Result. They are both
marked INLINE which, oddly, is why they don't inline in each other's
RHS, because the call there is not saturated.)
The root cause is that we flip-flop on our choice of loop breaker. I
always thought it didn't matter, and indeed for any single iteration
to terminate, it doesn't matter. But when we iterate, it matters a
lot!!
So The Plan is this:
If there is a tie, choose the node that
was a loop breaker last time round
Hence the is_lb field of NodeScore
************************************************************************
* *
Right hand sides
* *
************************************************************************
-}
occAnalRhs :: OccEnv -> RecFlag -> Id -> [CoreBndr] -> CoreExpr
-> (UsageDetails, [CoreBndr], CoreExpr)
-- Returned usage details covers only the RHS,
-- and *not* the RULE or INLINE template for the Id
occAnalRhs env Recursive _ bndrs body
= occAnalRecRhs env bndrs body
occAnalRhs env NonRecursive id bndrs body
= occAnalNonRecRhs env id bndrs body
occAnalRecRhs :: OccEnv -> [CoreBndr] -> CoreExpr -- Rhs lambdas, body
-> (UsageDetails, [CoreBndr], CoreExpr)
-- Returned usage details covers only the RHS,
-- and *not* the RULE or INLINE template for the Id
occAnalRecRhs env bndrs body = occAnalLamOrRhs (rhsCtxt env) bndrs body
occAnalNonRecRhs :: OccEnv
-> Id -> [CoreBndr] -> CoreExpr -- Binder; rhs lams, body
-- Binder is already tagged with occurrence info
-> (UsageDetails, [CoreBndr], CoreExpr)
-- Returned usage details covers only the RHS,
-- and *not* the RULE or INLINE template for the Id
occAnalNonRecRhs env bndr bndrs body
= occAnalLamOrRhs rhs_env bndrs body
where
env1 | is_join_point = env -- See Note [Join point RHSs]
| certainly_inline = env -- See Note [Cascading inlines]
| otherwise = rhsCtxt env
-- See Note [Sources of one-shot information]
rhs_env = env1 { occ_one_shots = argOneShots dmd }
certainly_inline -- See Note [Cascading inlines]
= case occ of
OneOcc { occ_in_lam = in_lam, occ_one_br = one_br }
-> not in_lam && one_br && active && not_stable
_ -> False
is_join_point = isAlwaysTailCalled occ
-- Like (isJoinId bndr) but happens one step earlier
-- c.f. willBeJoinId_maybe
occ = idOccInfo bndr
dmd = idDemandInfo bndr
active = isAlwaysActive (idInlineActivation bndr)
not_stable = not (isStableUnfolding (idUnfolding bndr))
occAnalUnfolding :: OccEnv
-> RecFlag
-> Id
-> Maybe UsageDetails
-- Just the analysis, not a new unfolding. The unfolding
-- got analysed when it was created and we don't need to
-- update it.
occAnalUnfolding env rec_flag id
= case realIdUnfolding id of -- ignore previous loop-breaker flag
CoreUnfolding { uf_tmpl = rhs, uf_src = src }
| not (isStableSource src)
-> Nothing
| otherwise
-> Just $ markAllMany usage
where
(bndrs, body) = collectBinders rhs
(usage, _, _) = occAnalRhs env rec_flag id bndrs body
DFunUnfolding { df_bndrs = bndrs, df_args = args }
-> Just $ zapDetails (delDetailsList usage bndrs)
where
usage = foldr (+++) emptyDetails (map (fst . occAnal env) args)
_ -> Nothing
occAnalRules :: OccEnv
-> Maybe JoinArity -- If the binder is (or MAY become) a join
-- point, what its join arity is (or WOULD
-- become). See Note [Rules and join points].
-> RecFlag
-> Id
-> [(CoreRule, -- Each (non-built-in) rule
UsageDetails, -- Usage details for LHS
UsageDetails)] -- Usage details for RHS
occAnalRules env mb_expected_join_arity rec_flag id
= [ (rule, lhs_uds, rhs_uds) | rule@Rule {} <- idCoreRules id
, let (lhs_uds, rhs_uds) = occ_anal_rule rule ]
where
occ_anal_rule (Rule { ru_bndrs = bndrs, ru_args = args, ru_rhs = rhs })
= (lhs_uds, final_rhs_uds)
where
lhs_uds = addManyOccsSet emptyDetails $
(exprsFreeVars args `delVarSetList` bndrs)
(rhs_bndrs, rhs_body) = collectBinders rhs
(rhs_uds, _, _) = occAnalRhs env rec_flag id rhs_bndrs rhs_body
-- Note [Rules are extra RHSs]
-- Note [Rule dependency info]
final_rhs_uds = adjust_tail_info args $ markAllMany $
(rhs_uds `delDetailsList` bndrs)
occ_anal_rule _
= (emptyDetails, emptyDetails)
adjust_tail_info args uds -- see Note [Rules and join points]
= case mb_expected_join_arity of
Just ar | args `lengthIs` ar -> uds
_ -> markAllNonTailCalled uds
{- Note [Join point RHSs]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
x = e
join j = Just x
We want to inline x into j right away, so we don't want to give
the join point a RhsCtxt (Trac #14137). It's not a huge deal, because
the FloatIn pass knows to float into join point RHSs; and the simplifier
does not float things out of join point RHSs. But it's a simple, cheap
thing to do. See Trac #14137.
Note [Cascading inlines]
~~~~~~~~~~~~~~~~~~~~~~~~
By default we use an rhsCtxt for the RHS of a binding. This tells the
occ anal n that it's looking at an RHS, which has an effect in
occAnalApp. In particular, for constructor applications, it makes
the arguments appear to have NoOccInfo, so that we don't inline into
them. Thus x = f y
k = Just x
we do not want to inline x.
But there's a problem. Consider
x1 = a0 : []
x2 = a1 : x1
x3 = a2 : x2
g = f x3
First time round, it looks as if x1 and x2 occur as an arg of a
let-bound constructor ==> give them a many-occurrence.
But then x3 is inlined (unconditionally as it happens) and
next time round, x2 will be, and the next time round x1 will be
Result: multiple simplifier iterations. Sigh.
So, when analysing the RHS of x3 we notice that x3 will itself
definitely inline the next time round, and so we analyse x3's rhs in
an ordinary context, not rhsCtxt. Hence the "certainly_inline" stuff.
Annoyingly, we have to approximate SimplUtils.preInlineUnconditionally.
If (a) the RHS is expandable (see isExpandableApp in occAnalApp), and
(b) certainly_inline says "yes" when preInlineUnconditionally says "no"
then the simplifier iterates indefinitely:
x = f y
k = Just x -- We decide that k is 'certainly_inline'
v = ...k... -- but preInlineUnconditionally doesn't inline it
inline ==>
k = Just (f y)
v = ...k...
float ==>
x1 = f y
k = Just x1
v = ...k...
This is worse than the slow cascade, so we only want to say "certainly_inline"
if it really is certain. Look at the note with preInlineUnconditionally
for the various clauses.
************************************************************************
* *
Expressions
* *
************************************************************************
-}
occAnal :: OccEnv
-> CoreExpr
-> (UsageDetails, -- Gives info only about the "interesting" Ids
CoreExpr)
occAnal _ expr@(Type _) = (emptyDetails, expr)
occAnal _ expr@(Lit _) = (emptyDetails, expr)
occAnal env expr@(Var _) = occAnalApp env (expr, [], [])
-- At one stage, I gathered the idRuleVars for the variable here too,
-- which in a way is the right thing to do.
-- But that went wrong right after specialisation, when
-- the *occurrences* of the overloaded function didn't have any
-- rules in them, so the *specialised* versions looked as if they
-- weren't used at all.
occAnal _ (Coercion co)
= (addManyOccsSet emptyDetails (coVarsOfCo co), Coercion co)
-- See Note [Gather occurrences of coercion variables]
{-
Note [Gather occurrences of coercion variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to gather info about what coercion variables appear, so that
we can sort them into the right place when doing dependency analysis.
-}
occAnal env (Tick tickish body)
| SourceNote{} <- tickish
= (usage, Tick tickish body')
-- SourceNotes are best-effort; so we just proceed as usual.
-- If we drop a tick due to the issues described below it's
-- not the end of the world.
| tickish `tickishScopesLike` SoftScope
= (markAllNonTailCalled usage, Tick tickish body')
| Breakpoint _ ids <- tickish
= (usage_lam +++ foldr addManyOccs emptyDetails ids, Tick tickish body')
-- never substitute for any of the Ids in a Breakpoint
| otherwise
= (usage_lam, Tick tickish body')
where
!(usage,body') = occAnal env body
-- for a non-soft tick scope, we can inline lambdas only
usage_lam = markAllNonTailCalled (markAllInsideLam usage)
-- TODO There may be ways to make ticks and join points play
-- nicer together, but right now there are problems:
-- let j x = ... in tick<t> (j 1)
-- Making j a join point may cause the simplifier to drop t
-- (if the tick is put into the continuation). So we don't
-- count j 1 as a tail call.
-- See #14242.
occAnal env (Cast expr co)
= case occAnal env expr of { (usage, expr') ->
let usage1 = zapDetailsIf (isRhsEnv env) usage
-- usage1: if we see let x = y `cast` co
-- then mark y as 'Many' so that we don't
-- immediately inline y again.
usage2 = addManyOccsSet usage1 (coVarsOfCo co)
-- usage2: see Note [Gather occurrences of coercion variables]
in (markAllNonTailCalled usage2, Cast expr' co)
}
occAnal env app@(App _ _)
= occAnalApp env (collectArgsTicks tickishFloatable app)
-- Ignore type variables altogether
-- (a) occurrences inside type lambdas only not marked as InsideLam
-- (b) type variables not in environment
occAnal env (Lam x body)
| isTyVar x
= case occAnal env body of { (body_usage, body') ->
(markAllNonTailCalled body_usage, Lam x body')
}
-- For value lambdas we do a special hack. Consider
-- (\x. \y. ...x...)
-- If we did nothing, x is used inside the \y, so would be marked
-- as dangerous to dup. But in the common case where the abstraction
-- is applied to two arguments this is over-pessimistic.
-- So instead, we just mark each binder with its occurrence
-- info in the *body* of the multiple lambda.
-- Then, the simplifier is careful when partially applying lambdas.
occAnal env expr@(Lam _ _)
= case occAnalLamOrRhs env binders body of { (usage, tagged_binders, body') ->
let
expr' = mkLams tagged_binders body'
usage1 = markAllNonTailCalled usage
one_shot_gp = all isOneShotBndr tagged_binders
final_usage | one_shot_gp = usage1
| otherwise = markAllInsideLam usage1
in
(final_usage, expr') }
where
(binders, body) = collectBinders expr
occAnal env (Case scrut bndr ty alts)
= case occ_anal_scrut scrut alts of { (scrut_usage, scrut') ->
case mapAndUnzip occ_anal_alt alts of { (alts_usage_s, alts') ->
let
alts_usage = foldr combineAltsUsageDetails emptyDetails alts_usage_s
(alts_usage1, tagged_bndr) = tag_case_bndr alts_usage bndr
total_usage = markAllNonTailCalled scrut_usage +++ alts_usage1
-- Alts can have tail calls, but the scrutinee can't
in
total_usage `seq` (total_usage, Case scrut' tagged_bndr ty alts') }}
where
-- Note [Case binder usage]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- The case binder gets a usage of either "many" or "dead", never "one".
-- Reason: we like to inline single occurrences, to eliminate a binding,
-- but inlining a case binder *doesn't* eliminate a binding.
-- We *don't* want to transform
-- case x of w { (p,q) -> f w }
-- into
-- case x of w { (p,q) -> f (p,q) }
tag_case_bndr usage bndr
= (usage', setIdOccInfo bndr final_occ_info)
where
occ_info = lookupDetails usage bndr
usage' = usage `delDetails` bndr
final_occ_info = case occ_info of IAmDead -> IAmDead
_ -> noOccInfo
alt_env = mkAltEnv env scrut bndr
occ_anal_alt = occAnalAlt alt_env
occ_anal_scrut (Var v) (alt1 : other_alts)
| not (null other_alts) || not (isDefaultAlt alt1)
= (mkOneOcc env v True 0, Var v)
-- The 'True' says that the variable occurs in an interesting
-- context; the case has at least one non-default alternative
occ_anal_scrut (Tick t e) alts
| t `tickishScopesLike` SoftScope
-- No reason to not look through all ticks here, but only
-- for soft-scoped ticks we can do so without having to
-- update returned occurance info (see occAnal)
= second (Tick t) $ occ_anal_scrut e alts
occ_anal_scrut scrut _alts
= occAnal (vanillaCtxt env) scrut -- No need for rhsCtxt
occAnal env (Let bind body)
= case occAnal env body of { (body_usage, body') ->
case occAnalBind env NotTopLevel
noImpRuleEdges bind
body_usage of { (final_usage, new_binds) ->
(final_usage, mkLets new_binds body') }}
occAnalArgs :: OccEnv -> [CoreExpr] -> [OneShots] -> (UsageDetails, [CoreExpr])
occAnalArgs _ [] _
= (emptyDetails, [])
occAnalArgs env (arg:args) one_shots
| isTypeArg arg
= case occAnalArgs env args one_shots of { (uds, args') ->
(uds, arg:args') }
| otherwise
= case argCtxt env one_shots of { (arg_env, one_shots') ->
case occAnal arg_env arg of { (uds1, arg') ->
case occAnalArgs env args one_shots' of { (uds2, args') ->
(uds1 +++ uds2, arg':args') }}}
{-
Applications are dealt with specially because we want
the "build hack" to work.
Note [Arguments of let-bound constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = let y = expensive x in
let z = (True,y) in
(case z of {(p,q)->q}, case z of {(p,q)->q})
We feel free to duplicate the WHNF (True,y), but that means
that y may be duplicated thereby.
If we aren't careful we duplicate the (expensive x) call!
Constructors are rather like lambdas in this way.
-}
occAnalApp :: OccEnv
-> (Expr CoreBndr, [Arg CoreBndr], [Tickish Id])
-> (UsageDetails, Expr CoreBndr)
occAnalApp env (Var fun, args, ticks)
| null ticks = (uds, mkApps (Var fun) args')
| otherwise = (uds, mkTicks ticks $ mkApps (Var fun) args')
where
uds = fun_uds +++ final_args_uds
!(args_uds, args') = occAnalArgs env args one_shots
!final_args_uds
| isRhsEnv env && is_exp = markAllNonTailCalled $
markAllInsideLam args_uds
| otherwise = markAllNonTailCalled args_uds
-- We mark the free vars of the argument of a constructor or PAP
-- as "inside-lambda", if it is the RHS of a let(rec).
-- This means that nothing gets inlined into a constructor or PAP
-- argument position, which is what we want. Typically those
-- constructor arguments are just variables, or trivial expressions.
-- We use inside-lam because it's like eta-expanding the PAP.
--
-- This is the *whole point* of the isRhsEnv predicate
-- See Note [Arguments of let-bound constructors]
n_val_args = valArgCount args
n_args = length args
fun_uds = mkOneOcc env fun (n_val_args > 0) n_args
is_exp = isExpandableApp fun n_val_args
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in Simplify.prepareRhs
one_shots = argsOneShots (idStrictness fun) guaranteed_val_args
guaranteed_val_args = n_val_args + length (takeWhile isOneShotInfo
(occ_one_shots env))
-- See Note [Sources of one-shot information], bullet point A']
occAnalApp env (fun, args, ticks)
= (markAllNonTailCalled (fun_uds +++ args_uds),
mkTicks ticks $ mkApps fun' args')
where
!(fun_uds, fun') = occAnal (addAppCtxt env args) fun
-- The addAppCtxt is a bit cunning. One iteration of the simplifier
-- often leaves behind beta redexs like
-- (\x y -> e) a1 a2
-- Here we would like to mark x,y as one-shot, and treat the whole
-- thing much like a let. We do this by pushing some True items
-- onto the context stack.
!(args_uds, args') = occAnalArgs env args []
zapDetailsIf :: Bool -- If this is true
-> UsageDetails -- Then do zapDetails on this
-> UsageDetails
zapDetailsIf True uds = zapDetails uds
zapDetailsIf False uds = uds
{-
Note [Sources of one-shot information]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The occurrence analyser obtains one-shot-lambda information from two sources:
A: Saturated applications: eg f e1 .. en
In general, given a call (f e1 .. en) we can propagate one-shot info from
f's strictness signature into e1 .. en, but /only/ if n is enough to
saturate the strictness signature. A strictness signature like
f :: C1(C1(L))LS
means that *if f is applied to three arguments* then it will guarantee to
call its first argument at most once, and to call the result of that at
most once. But if f has fewer than three arguments, all bets are off; e.g.
map (f (\x y. expensive) e2) xs
Here the \x y abstraction may be called many times (once for each element of
xs) so we should not mark x and y as one-shot. But if it was
map (f (\x y. expensive) 3 2) xs
then the first argument of f will be called at most once.
The one-shot info, derived from f's strictness signature, is
computed by 'argsOneShots', called in occAnalApp.
A': Non-obviously saturated applications: eg build (f (\x y -> expensive))
where f is as above.
In this case, f is only manifestly applied to one argument, so it does not
look saturated. So by the previous point, we should not use its strictness
signature to learn about the one-shotness of \x y. But in this case we can:
build is fully applied, so we may use its strictness signature; and from
that we learn that build calls its argument with two arguments *at most once*.
So there is really only one call to f, and it will have three arguments. In
that sense, f is saturated, and we may proceed as described above.
Hence the computation of 'guaranteed_val_args' in occAnalApp, using
'(occ_one_shots env)'. See also Trac #13227, comment:9
B: Let-bindings: eg let f = \c. let ... in \n -> blah
in (build f, build f)
Propagate one-shot info from the demanand-info on 'f' to the
lambdas in its RHS (which may not be syntactically at the top)
This information must have come from a previous run of the demanand
analyser.
Previously, the demand analyser would *also* set the one-shot information, but
that code was buggy (see #11770), so doing it only in on place, namely here, is
saner.
Note [OneShots]
~~~~~~~~~~~~~~~
When analysing an expression, the occ_one_shots argument contains information
about how the function is being used. The length of the list indicates
how many arguments will eventually be passed to the analysed expression,
and the OneShotInfo indicates whether this application is once or multiple times.
Example:
Context of f occ_one_shots when analysing f
f 1 2 [OneShot, OneShot]
map (f 1) [OneShot, NoOneShotInfo]
build f [OneShot, OneShot]
f 1 2 `seq` f 2 1 [NoOneShotInfo, OneShot]
Note [Binders in case alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case x of y { (a,b) -> f y }
We treat 'a', 'b' as dead, because they don't physically occur in the
case alternative. (Indeed, a variable is dead iff it doesn't occur in
its scope in the output of OccAnal.) It really helps to know when
binders are unused. See esp the call to isDeadBinder in
Simplify.mkDupableAlt
In this example, though, the Simplifier will bring 'a' and 'b' back to
life, beause it binds 'y' to (a,b) (imagine got inlined and
scrutinised y).
-}
occAnalLamOrRhs :: OccEnv -> [CoreBndr] -> CoreExpr
-> (UsageDetails, [CoreBndr], CoreExpr)
occAnalLamOrRhs env [] body
= case occAnal env body of (body_usage, body') -> (body_usage, [], body')
-- RHS of thunk or nullary join point
occAnalLamOrRhs env (bndr:bndrs) body
| isTyVar bndr
= -- Important: Keep the environment so that we don't inline into an RHS like
-- \(@ x) -> C @x (f @x)
-- (see the beginning of Note [Cascading inlines]).
case occAnalLamOrRhs env bndrs body of
(body_usage, bndrs', body') -> (body_usage, bndr:bndrs', body')
occAnalLamOrRhs env binders body
= case occAnal env_body body of { (body_usage, body') ->
let
(final_usage, tagged_binders) = tagLamBinders body_usage binders'
-- Use binders' to put one-shot info on the lambdas
in
(final_usage, tagged_binders, body') }
where
(env_body, binders') = oneShotGroup env binders
occAnalAlt :: (OccEnv, Maybe (Id, CoreExpr))
-> CoreAlt
-> (UsageDetails, Alt IdWithOccInfo)
occAnalAlt (env, scrut_bind) (con, bndrs, rhs)
= case occAnal env rhs of { (rhs_usage1, rhs1) ->
let
(alt_usg, tagged_bndrs) = tagLamBinders rhs_usage1 bndrs
-- See Note [Binders in case alternatives]
(alt_usg', rhs2) =
wrapAltRHS env scrut_bind alt_usg tagged_bndrs rhs1
in
(alt_usg', (con, tagged_bndrs, rhs2)) }
wrapAltRHS :: OccEnv
-> Maybe (Id, CoreExpr) -- proxy mapping generated by mkAltEnv
-> UsageDetails -- usage for entire alt (p -> rhs)
-> [Var] -- alt binders
-> CoreExpr -- alt RHS
-> (UsageDetails, CoreExpr)
wrapAltRHS env (Just (scrut_var, let_rhs)) alt_usg bndrs alt_rhs
| occ_binder_swap env
, scrut_var `usedIn` alt_usg -- bndrs are not be present in alt_usg so this
-- handles condition (a) in Note [Binder swap]
, not captured -- See condition (b) in Note [Binder swap]
= ( alt_usg' +++ let_rhs_usg
, Let (NonRec tagged_scrut_var let_rhs') alt_rhs )
where
captured = any (`usedIn` let_rhs_usg) bndrs
-- The rhs of the let may include coercion variables
-- if the scrutinee was a cast, so we must gather their
-- usage. See Note [Gather occurrences of coercion variables]
(let_rhs_usg, let_rhs') = occAnal env let_rhs
(alt_usg', [tagged_scrut_var]) = tagLamBinders alt_usg [scrut_var]
wrapAltRHS _ _ alt_usg _ alt_rhs
= (alt_usg, alt_rhs)
{-
************************************************************************
* *
OccEnv
* *
************************************************************************
-}
data OccEnv
= OccEnv { occ_encl :: !OccEncl -- Enclosing context information
, occ_one_shots :: !OneShots -- See Note [OneShots]
, occ_gbl_scrut :: GlobalScruts
, occ_unf_act :: Id -> Bool -- Which Id unfoldings are active
, occ_rule_act :: Activation -> Bool -- Which rules are active
-- See Note [Finding rule RHS free vars]
, occ_binder_swap :: !Bool -- enable the binder_swap
-- See CorePrep Note [Dead code in CorePrep]
}
type GlobalScruts = IdSet -- See Note [Binder swap on GlobalId scrutinees]
-----------------------------
-- OccEncl is used to control whether to inline into constructor arguments
-- For example:
-- x = (p,q) -- Don't inline p or q
-- y = /\a -> (p a, q a) -- Still don't inline p or q
-- z = f (p,q) -- Do inline p,q; it may make a rule fire
-- So OccEncl tells enough about the context to know what to do when
-- we encounter a constructor application or PAP.
data OccEncl
= OccRhs -- RHS of let(rec), albeit perhaps inside a type lambda
-- Don't inline into constructor args here
| OccVanilla -- Argument of function, body of lambda, scruintee of case etc.
-- Do inline into constructor args here
instance Outputable OccEncl where
ppr OccRhs = text "occRhs"
ppr OccVanilla = text "occVanilla"
-- See note [OneShots]
type OneShots = [OneShotInfo]
initOccEnv :: OccEnv
initOccEnv
= OccEnv { occ_encl = OccVanilla
, occ_one_shots = []
, occ_gbl_scrut = emptyVarSet
-- To be conservative, we say that all
-- inlines and rules are active
, occ_unf_act = \_ -> True
, occ_rule_act = \_ -> True
, occ_binder_swap = True }
vanillaCtxt :: OccEnv -> OccEnv
vanillaCtxt env = env { occ_encl = OccVanilla, occ_one_shots = [] }
rhsCtxt :: OccEnv -> OccEnv
rhsCtxt env = env { occ_encl = OccRhs, occ_one_shots = [] }
argCtxt :: OccEnv -> [OneShots] -> (OccEnv, [OneShots])
argCtxt env []
= (env { occ_encl = OccVanilla, occ_one_shots = [] }, [])
argCtxt env (one_shots:one_shots_s)
= (env { occ_encl = OccVanilla, occ_one_shots = one_shots }, one_shots_s)
isRhsEnv :: OccEnv -> Bool
isRhsEnv (OccEnv { occ_encl = OccRhs }) = True
isRhsEnv (OccEnv { occ_encl = OccVanilla }) = False
oneShotGroup :: OccEnv -> [CoreBndr]
-> ( OccEnv
, [CoreBndr] )
-- The result binders have one-shot-ness set that they might not have had originally.
-- This happens in (build (\c n -> e)). Here the occurrence analyser
-- linearity context knows that c,n are one-shot, and it records that fact in
-- the binder. This is useful to guide subsequent float-in/float-out tranformations
oneShotGroup env@(OccEnv { occ_one_shots = ctxt }) bndrs
= go ctxt bndrs []
where
go ctxt [] rev_bndrs
= ( env { occ_one_shots = ctxt, occ_encl = OccVanilla }
, reverse rev_bndrs )
go [] bndrs rev_bndrs
= ( env { occ_one_shots = [], occ_encl = OccVanilla }
, reverse rev_bndrs ++ bndrs )
go ctxt@(one_shot : ctxt') (bndr : bndrs) rev_bndrs
| isId bndr = go ctxt' bndrs (bndr': rev_bndrs)
| otherwise = go ctxt bndrs (bndr : rev_bndrs)
where
bndr' = updOneShotInfo bndr one_shot
-- Use updOneShotInfo, not setOneShotInfo, as pre-existing
-- one-shot info might be better than what we can infer, e.g.
-- due to explicit use of the magic 'oneShot' function.
-- See Note [The oneShot function]
markJoinOneShots :: Maybe JoinArity -> [Var] -> [Var]
-- Mark the lambdas of a non-recursive join point as one-shot.
-- This is good to prevent gratuitous float-out etc
markJoinOneShots mb_join_arity bndrs
= case mb_join_arity of
Nothing -> bndrs
Just n -> go n bndrs
where
go 0 bndrs = bndrs
go _ [] = [] -- This can legitimately happen.
-- e.g. let j = case ... in j True
-- This will become an arity-1 join point after the
-- simplifier has eta-expanded it; but it may not have
-- enough lambdas /yet/. (Lint checks that JoinIds do
-- have enough lambdas.)
go n (b:bs) = b' : go (n-1) bs
where
b' | isId b = setOneShotLambda b
| otherwise = b
addAppCtxt :: OccEnv -> [Arg CoreBndr] -> OccEnv
addAppCtxt env@(OccEnv { occ_one_shots = ctxt }) args
= env { occ_one_shots = replicate (valArgCount args) OneShotLam ++ ctxt }
transClosureFV :: UniqFM VarSet -> UniqFM VarSet
-- If (f,g), (g,h) are in the input, then (f,h) is in the output
-- as well as (f,g), (g,h)
transClosureFV env
| no_change = env
| otherwise = transClosureFV (listToUFM new_fv_list)
where
(no_change, new_fv_list) = mapAccumL bump True (nonDetUFMToList env)
-- It's OK to use nonDetUFMToList here because we'll forget the
-- ordering by creating a new set with listToUFM
bump no_change (b,fvs)
| no_change_here = (no_change, (b,fvs))
| otherwise = (False, (b,new_fvs))
where
(new_fvs, no_change_here) = extendFvs env fvs
-------------
extendFvs_ :: UniqFM VarSet -> VarSet -> VarSet
extendFvs_ env s = fst (extendFvs env s) -- Discard the Bool flag
extendFvs :: UniqFM VarSet -> VarSet -> (VarSet, Bool)
-- (extendFVs env s) returns
-- (s `union` env(s), env(s) `subset` s)
extendFvs env s
| isNullUFM env
= (s, True)
| otherwise
= (s `unionVarSet` extras, extras `subVarSet` s)
where
extras :: VarSet -- env(s)
extras = nonDetFoldUFM unionVarSet emptyVarSet $
-- It's OK to use nonDetFoldUFM here because unionVarSet commutes
intersectUFM_C (\x _ -> x) env (getUniqSet s)
{-
************************************************************************
* *
Binder swap
* *
************************************************************************
Note [Binder swap]
~~~~~~~~~~~~~~~~~~
We do these two transformations right here:
(1) case x of b { pi -> ri }
==>
case x of b { pi -> let x=b in ri }
(2) case (x |> co) of b { pi -> ri }
==>
case (x |> co) of b { pi -> let x = b |> sym co in ri }
Why (2)? See Note [Case of cast]
In both cases, in a particular alternative (pi -> ri), we only
add the binding if
(a) x occurs free in (pi -> ri)
(ie it occurs in ri, but is not bound in pi)
(b) the pi does not bind b (or the free vars of co)
We need (a) and (b) for the inserted binding to be correct.
For the alternatives where we inject the binding, we can transfer
all x's OccInfo to b. And that is the point.
Notice that
* The deliberate shadowing of 'x'.
* That (a) rapidly becomes false, so no bindings are injected.
The reason for doing these transformations here is because it allows
us to adjust the OccInfo for 'x' and 'b' as we go.
* Suppose the only occurrences of 'x' are the scrutinee and in the
ri; then this transformation makes it occur just once, and hence
get inlined right away.
* If we do this in the Simplifier, we don't know whether 'x' is used
in ri, so we are forced to pessimistically zap b's OccInfo even
though it is typically dead (ie neither it nor x appear in the
ri). There's nothing actually wrong with zapping it, except that
it's kind of nice to know which variables are dead. My nose
tells me to keep this information as robustly as possible.
The Maybe (Id,CoreExpr) passed to occAnalAlt is the extra let-binding
{x=b}; it's Nothing if the binder-swap doesn't happen.
There is a danger though. Consider
let v = x +# y
in case (f v) of w -> ...v...v...
And suppose that (f v) expands to just v. Then we'd like to
use 'w' instead of 'v' in the alternative. But it may be too
late; we may have substituted the (cheap) x+#y for v in the
same simplifier pass that reduced (f v) to v.
I think this is just too bad. CSE will recover some of it.
Note [Case of cast]
~~~~~~~~~~~~~~~~~~~
Consider case (x `cast` co) of b { I# ->
... (case (x `cast` co) of {...}) ...
We'd like to eliminate the inner case. That is the motivation for
equation (2) in Note [Binder swap]. When we get to the inner case, we
inline x, cancel the casts, and away we go.
Note [Binder swap on GlobalId scrutinees]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the scrutinee is a GlobalId we must take care in two ways
i) In order to *know* whether 'x' occurs free in the RHS, we need its
occurrence info. BUT, we don't gather occurrence info for
GlobalIds. That's the reason for the (small) occ_gbl_scrut env in
OccEnv is for: it says "gather occurrence info for these".
ii) We must call localiseId on 'x' first, in case it's a GlobalId, or
has an External Name. See, for example, SimplEnv Note [Global Ids in
the substitution].
Note [Zap case binders in proxy bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
From the original
case x of cb(dead) { p -> ...x... }
we will get
case x of cb(live) { p -> let x = cb in ...x... }
Core Lint never expects to find an *occurrence* of an Id marked
as Dead, so we must zap the OccInfo on cb before making the
binding x = cb. See Trac #5028.
Historical note [no-case-of-case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We *used* to suppress the binder-swap in case expressions when
-fno-case-of-case is on. Old remarks:
"This happens in the first simplifier pass,
and enhances full laziness. Here's the bad case:
f = \ y -> ...(case x of I# v -> ...(case x of ...) ... )
If we eliminate the inner case, we trap it inside the I# v -> arm,
which might prevent some full laziness happening. I've seen this
in action in spectral/cichelli/Prog.hs:
[(m,n) | m <- [1..max], n <- [1..max]]
Hence the check for NoCaseOfCase."
However, now the full-laziness pass itself reverses the binder-swap, so this
check is no longer necessary.
Historical note [Suppressing the case binder-swap]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This old note describes a problem that is also fixed by doing the
binder-swap in OccAnal:
There is another situation when it might make sense to suppress the
case-expression binde-swap. If we have
case x of w1 { DEFAULT -> case x of w2 { A -> e1; B -> e2 }
...other cases .... }
We'll perform the binder-swap for the outer case, giving
case x of w1 { DEFAULT -> case w1 of w2 { A -> e1; B -> e2 }
...other cases .... }
But there is no point in doing it for the inner case, because w1 can't
be inlined anyway. Furthermore, doing the case-swapping involves
zapping w2's occurrence info (see paragraphs that follow), and that
forces us to bind w2 when doing case merging. So we get
case x of w1 { A -> let w2 = w1 in e1
B -> let w2 = w1 in e2
...other cases .... }
This is plain silly in the common case where w2 is dead.
Even so, I can't see a good way to implement this idea. I tried
not doing the binder-swap if the scrutinee was already evaluated
but that failed big-time:
data T = MkT !Int
case v of w { MkT x ->
case x of x1 { I# y1 ->
case x of x2 { I# y2 -> ...
Notice that because MkT is strict, x is marked "evaluated". But to
eliminate the last case, we must either make sure that x (as well as
x1) has unfolding MkT y1. The straightforward thing to do is to do
the binder-swap. So this whole note is a no-op.
It's fixed by doing the binder-swap in OccAnal because we can do the
binder-swap unconditionally and still get occurrence analysis
information right.
-}
mkAltEnv :: OccEnv -> CoreExpr -> Id -> (OccEnv, Maybe (Id, CoreExpr))
-- Does two things: a) makes the occ_one_shots = OccVanilla
-- b) extends the GlobalScruts if possible
-- c) returns a proxy mapping, binding the scrutinee
-- to the case binder, if possible
mkAltEnv env@(OccEnv { occ_gbl_scrut = pe }) scrut case_bndr
= case stripTicksTopE (const True) scrut of
Var v -> add_scrut v case_bndr'
Cast (Var v) co -> add_scrut v (Cast case_bndr' (mkSymCo co))
-- See Note [Case of cast]
_ -> (env { occ_encl = OccVanilla }, Nothing)
where
add_scrut v rhs = ( env { occ_encl = OccVanilla, occ_gbl_scrut = pe `extendVarSet` v }
, Just (localise v, rhs) )
case_bndr' = Var (zapIdOccInfo case_bndr) -- See Note [Zap case binders in proxy bindings]
localise scrut_var = mkLocalIdOrCoVar (localiseName (idName scrut_var)) (idType scrut_var)
-- Localise the scrut_var before shadowing it; we're making a
-- new binding for it, and it might have an External Name, or
-- even be a GlobalId; Note [Binder swap on GlobalId scrutinees]
-- Also we don't want any INLINE or NOINLINE pragmas!
{-
************************************************************************
* *
\subsection[OccurAnal-types]{OccEnv}
* *
************************************************************************
Note [UsageDetails and zapping]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
On many occasions, we must modify all gathered occurrence data at once. For
instance, all occurrences underneath a (non-one-shot) lambda set the
'occ_in_lam' flag to become 'True'. We could use 'mapVarEnv' to do this, but
that takes O(n) time and we will do this often---in particular, there are many
places where tail calls are not allowed, and each of these causes all variables
to get marked with 'NoTailCallInfo'.
Instead of relying on `mapVarEnv`, then, we carry three 'IdEnv's around along
with the 'OccInfoEnv'. Each of these extra environments is a "zapped set"
recording which variables have been zapped in some way. Zapping all occurrence
info then simply means setting the corresponding zapped set to the whole
'OccInfoEnv', a fast O(1) operation.
-}
type OccInfoEnv = IdEnv OccInfo -- A finite map from ids to their usage
-- INVARIANT: never IAmDead
-- (Deadness is signalled by not being in the map at all)
type ZappedSet = OccInfoEnv -- Values are ignored
data UsageDetails
= UD { ud_env :: !OccInfoEnv
, ud_z_many :: ZappedSet -- apply 'markMany' to these
, ud_z_in_lam :: ZappedSet -- apply 'markInsideLam' to these
, ud_z_no_tail :: ZappedSet } -- apply 'markNonTailCalled' to these
-- INVARIANT: All three zapped sets are subsets of the OccInfoEnv
instance Outputable UsageDetails where
ppr ud = ppr (ud_env (flattenUsageDetails ud))
-------------------
-- UsageDetails API
(+++), combineAltsUsageDetails
:: UsageDetails -> UsageDetails -> UsageDetails
(+++) = combineUsageDetailsWith addOccInfo
combineAltsUsageDetails = combineUsageDetailsWith orOccInfo
combineUsageDetailsList :: [UsageDetails] -> UsageDetails
combineUsageDetailsList = foldl (+++) emptyDetails
mkOneOcc :: OccEnv -> Id -> InterestingCxt -> JoinArity -> UsageDetails
mkOneOcc env id int_cxt arity
| isLocalId id
= singleton $ OneOcc { occ_in_lam = False
, occ_one_br = True
, occ_int_cxt = int_cxt
, occ_tail = AlwaysTailCalled arity }
| id `elemVarSet` occ_gbl_scrut env
= singleton noOccInfo
| otherwise
= emptyDetails
where
singleton info = emptyDetails { ud_env = unitVarEnv id info }
addOneOcc :: UsageDetails -> Id -> OccInfo -> UsageDetails
addOneOcc ud id info
= ud { ud_env = extendVarEnv_C plus_zapped (ud_env ud) id info }
`alterZappedSets` (`delVarEnv` id)
where
plus_zapped old new = doZapping ud id old `addOccInfo` new
addManyOccsSet :: UsageDetails -> VarSet -> UsageDetails
addManyOccsSet usage id_set = nonDetFoldUniqSet addManyOccs usage id_set
-- It's OK to use nonDetFoldUFM here because addManyOccs commutes
-- Add several occurrences, assumed not to be tail calls
addManyOccs :: Var -> UsageDetails -> UsageDetails
addManyOccs v u | isId v = addOneOcc u v noOccInfo
| otherwise = u
-- Give a non-committal binder info (i.e noOccInfo) because
-- a) Many copies of the specialised thing can appear
-- b) We don't want to substitute a BIG expression inside a RULE
-- even if that's the only occurrence of the thing
-- (Same goes for INLINE.)
delDetails :: UsageDetails -> Id -> UsageDetails
delDetails ud bndr
= ud `alterUsageDetails` (`delVarEnv` bndr)
delDetailsList :: UsageDetails -> [Id] -> UsageDetails
delDetailsList ud bndrs
= ud `alterUsageDetails` (`delVarEnvList` bndrs)
emptyDetails :: UsageDetails
emptyDetails = UD { ud_env = emptyVarEnv
, ud_z_many = emptyVarEnv
, ud_z_in_lam = emptyVarEnv
, ud_z_no_tail = emptyVarEnv }
isEmptyDetails :: UsageDetails -> Bool
isEmptyDetails = isEmptyVarEnv . ud_env
markAllMany, markAllInsideLam, markAllNonTailCalled, zapDetails
:: UsageDetails -> UsageDetails
markAllMany ud = ud { ud_z_many = ud_env ud }
markAllInsideLam ud = ud { ud_z_in_lam = ud_env ud }
markAllNonTailCalled ud = ud { ud_z_no_tail = ud_env ud }
zapDetails = markAllMany . markAllNonTailCalled -- effectively sets to noOccInfo
lookupDetails :: UsageDetails -> Id -> OccInfo
lookupDetails ud id
= case lookupVarEnv (ud_env ud) id of
Just occ -> doZapping ud id occ
Nothing -> IAmDead
usedIn :: Id -> UsageDetails -> Bool
v `usedIn` ud = isExportedId v || v `elemVarEnv` ud_env ud
udFreeVars :: VarSet -> UsageDetails -> VarSet
-- Find the subset of bndrs that are mentioned in uds
udFreeVars bndrs ud = restrictUniqSetToUFM bndrs (ud_env ud)
-------------------
-- Auxiliary functions for UsageDetails implementation
combineUsageDetailsWith :: (OccInfo -> OccInfo -> OccInfo)
-> UsageDetails -> UsageDetails -> UsageDetails
combineUsageDetailsWith plus_occ_info ud1 ud2
| isEmptyDetails ud1 = ud2
| isEmptyDetails ud2 = ud1
| otherwise
= UD { ud_env = plusVarEnv_C plus_occ_info (ud_env ud1) (ud_env ud2)
, ud_z_many = plusVarEnv (ud_z_many ud1) (ud_z_many ud2)
, ud_z_in_lam = plusVarEnv (ud_z_in_lam ud1) (ud_z_in_lam ud2)
, ud_z_no_tail = plusVarEnv (ud_z_no_tail ud1) (ud_z_no_tail ud2) }
doZapping :: UsageDetails -> Var -> OccInfo -> OccInfo
doZapping ud var occ
= doZappingByUnique ud (varUnique var) occ
doZappingByUnique :: UsageDetails -> Unique -> OccInfo -> OccInfo
doZappingByUnique ud uniq
= (if | in_subset ud_z_many -> markMany
| in_subset ud_z_in_lam -> markInsideLam
| otherwise -> id) .
(if | in_subset ud_z_no_tail -> markNonTailCalled
| otherwise -> id)
where
in_subset field = uniq `elemVarEnvByKey` field ud
alterZappedSets :: UsageDetails -> (ZappedSet -> ZappedSet) -> UsageDetails
alterZappedSets ud f
= ud { ud_z_many = f (ud_z_many ud)
, ud_z_in_lam = f (ud_z_in_lam ud)
, ud_z_no_tail = f (ud_z_no_tail ud) }
alterUsageDetails :: UsageDetails -> (OccInfoEnv -> OccInfoEnv) -> UsageDetails
alterUsageDetails ud f
= ud { ud_env = f (ud_env ud) }
`alterZappedSets` f
flattenUsageDetails :: UsageDetails -> UsageDetails
flattenUsageDetails ud
= ud { ud_env = mapUFM_Directly (doZappingByUnique ud) (ud_env ud) }
`alterZappedSets` const emptyVarEnv
-------------------
-- See Note [Adjusting right-hand sides]
adjustRhsUsage :: Maybe JoinArity -> RecFlag
-> [CoreBndr] -- Outer lambdas, AFTER occ anal
-> UsageDetails -> UsageDetails
adjustRhsUsage mb_join_arity rec_flag bndrs usage
= maybe_mark_lam (maybe_drop_tails usage)
where
maybe_mark_lam ud | one_shot = ud
| otherwise = markAllInsideLam ud
maybe_drop_tails ud | exact_join = ud
| otherwise = markAllNonTailCalled ud
one_shot = case mb_join_arity of
Just join_arity
| isRec rec_flag -> False
| otherwise -> all isOneShotBndr (drop join_arity bndrs)
Nothing -> all isOneShotBndr bndrs
exact_join = case mb_join_arity of
Just join_arity -> bndrs `lengthIs` join_arity
_ -> False
type IdWithOccInfo = Id
tagLamBinders :: UsageDetails -- Of scope
-> [Id] -- Binders
-> (UsageDetails, -- Details with binders removed
[IdWithOccInfo]) -- Tagged binders
-- Used for lambda and case binders
-- It copes with the fact that lambda bindings can have a
-- stable unfolding, used for join points
tagLamBinders usage binders = usage' `seq` (usage', bndrs')
where
(usage', bndrs') = mapAccumR tag_lam usage binders
tag_lam usage bndr = (usage2, bndr')
where
occ = lookupDetails usage bndr
bndr' = setBinderOcc (markNonTailCalled occ) bndr
-- Don't try to make an argument into a join point
usage1 = usage `delDetails` bndr
usage2 | isId bndr = addManyOccsSet usage1 (idUnfoldingVars bndr)
-- This is effectively the RHS of a
-- non-join-point binding, so it's okay to use
-- addManyOccsSet, which assumes no tail calls
| otherwise = usage1
tagNonRecBinder :: TopLevelFlag -- At top level?
-> UsageDetails -- Of scope
-> CoreBndr -- Binder
-> (UsageDetails, -- Details with binder removed
IdWithOccInfo) -- Tagged binder
tagNonRecBinder lvl usage binder
= let
occ = lookupDetails usage binder
will_be_join = decideJoinPointHood lvl usage [binder]
occ' | will_be_join = -- must already be marked AlwaysTailCalled
ASSERT(isAlwaysTailCalled occ) occ
| otherwise = markNonTailCalled occ
binder' = setBinderOcc occ' binder
usage' = usage `delDetails` binder
in
usage' `seq` (usage', binder')
tagRecBinders :: TopLevelFlag -- At top level?
-> UsageDetails -- Of body of let ONLY
-> [(CoreBndr, -- Binder
UsageDetails, -- RHS usage details
[CoreBndr])] -- Lambdas in new RHS
-> (UsageDetails, -- Adjusted details for whole scope,
-- with binders removed
[IdWithOccInfo]) -- Tagged binders
-- Substantially more complicated than non-recursive case. Need to adjust RHS
-- details *before* tagging binders (because the tags depend on the RHSes).
tagRecBinders lvl body_uds triples
= let
(bndrs, rhs_udss, _) = unzip3 triples
-- 1. Determine join-point-hood of whole group, as determined by
-- the *unadjusted* usage details
unadj_uds = body_uds +++ combineUsageDetailsList rhs_udss
will_be_joins = decideJoinPointHood lvl unadj_uds bndrs
-- 2. Adjust usage details of each RHS, taking into account the
-- join-point-hood decision
rhs_udss' = map adjust triples
adjust (bndr, rhs_uds, rhs_bndrs)
= adjustRhsUsage mb_join_arity Recursive rhs_bndrs rhs_uds
where
-- Can't use willBeJoinId_maybe here because we haven't tagged the
-- binder yet (the tag depends on these adjustments!)
mb_join_arity
| will_be_joins
, let occ = lookupDetails unadj_uds bndr
, AlwaysTailCalled arity <- tailCallInfo occ
= Just arity
| otherwise
= ASSERT(not will_be_joins) -- Should be AlwaysTailCalled if
Nothing -- we are making join points!
-- 3. Compute final usage details from adjusted RHS details
adj_uds = body_uds +++ combineUsageDetailsList rhs_udss'
-- 4. Tag each binder with its adjusted details
bndrs' = [ setBinderOcc (lookupDetails adj_uds bndr) bndr
| bndr <- bndrs ]
-- 5. Drop the binders from the adjusted details and return
usage' = adj_uds `delDetailsList` bndrs
in
(usage', bndrs')
setBinderOcc :: OccInfo -> CoreBndr -> CoreBndr
setBinderOcc occ_info bndr
| isTyVar bndr = bndr
| isExportedId bndr = if isManyOccs (idOccInfo bndr)
then bndr
else setIdOccInfo bndr noOccInfo
-- Don't use local usage info for visible-elsewhere things
-- BUT *do* erase any IAmALoopBreaker annotation, because we're
-- about to re-generate it and it shouldn't be "sticky"
| otherwise = setIdOccInfo bndr occ_info
-- | Decide whether some bindings should be made into join points or not.
-- Returns `False` if they can't be join points. Note that it's an
-- all-or-nothing decision, as if multiple binders are given, they're
-- assumed to be mutually recursive.
--
-- It must, however, be a final decision. If we say "True" for 'f',
-- and then subsequently decide /not/ make 'f' into a join point, then
-- the decision about another binding 'g' might be invalidated if (say)
-- 'f' tail-calls 'g'.
--
-- See Note [Invariants on join points] in CoreSyn.
decideJoinPointHood :: TopLevelFlag -> UsageDetails
-> [CoreBndr]
-> Bool
decideJoinPointHood TopLevel _ _
= False
decideJoinPointHood NotTopLevel usage bndrs
| isJoinId (head bndrs)
= WARN(not all_ok, text "OccurAnal failed to rediscover join point(s):" <+>
ppr bndrs)
all_ok
| otherwise
= all_ok
where
-- See Note [Invariants on join points]; invariants cited by number below.
-- Invariant 2 is always satisfiable by the simplifier by eta expansion.
all_ok = -- Invariant 3: Either all are join points or none are
all ok bndrs
ok bndr
| -- Invariant 1: Only tail calls, all same join arity
AlwaysTailCalled arity <- tailCallInfo (lookupDetails usage bndr)
, -- Invariant 1 as applied to LHSes of rules
all (ok_rule arity) (idCoreRules bndr)
-- Invariant 2a: stable unfoldings
-- See Note [Join points and INLINE pragmas]
, ok_unfolding arity (realIdUnfolding bndr)
-- Invariant 4: Satisfies polymorphism rule
, isValidJoinPointType arity (idType bndr)
= True
| otherwise
= False
ok_rule _ BuiltinRule{} = False -- only possible with plugin shenanigans
ok_rule join_arity (Rule { ru_args = args })
= args `lengthIs` join_arity
-- Invariant 1 as applied to LHSes of rules
-- ok_unfolding returns False if we should /not/ convert a non-join-id
-- into a join-id, even though it is AlwaysTailCalled
ok_unfolding join_arity (CoreUnfolding { uf_src = src, uf_tmpl = rhs })
= not (isStableSource src && join_arity > joinRhsArity rhs)
ok_unfolding _ (DFunUnfolding {})
= False
ok_unfolding _ _
= True
willBeJoinId_maybe :: CoreBndr -> Maybe JoinArity
willBeJoinId_maybe bndr
= case tailCallInfo (idOccInfo bndr) of
AlwaysTailCalled arity -> Just arity
_ -> isJoinId_maybe bndr
{- Note [Join points and INLINE pragmas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = let g = \x. not -- Arity 1
{-# INLINE g #-}
in case x of
A -> g True True
B -> g True False
C -> blah2
Here 'g' is always tail-called applied to 2 args, but the stable
unfolding captured by the INLINE pragma has arity 1. If we try to
convert g to be a join point, its unfolding will still have arity 1
(since it is stable, and we don't meddle with stable unfoldings), and
Lint will complain (see Note [Invariants on join points], (2a), in
CoreSyn. Trac #13413.
Moreover, since g is going to be inlined anyway, there is no benefit
from making it a join point.
If it is recursive, and uselessly marked INLINE, this will stop us
making it a join point, which is annoying. But occasionally
(notably in class methods; see Note [Instances and loop breakers] in
TcInstDcls) we mark recursive things as INLINE but the recursion
unravels; so ignoring INLINE pragmas on recursive things isn't good
either.
See Invariant 2a of Note [Invariants on join points] in CoreSyn
************************************************************************
* *
\subsection{Operations over OccInfo}
* *
************************************************************************
-}
markMany, markInsideLam, markNonTailCalled :: OccInfo -> OccInfo
markMany IAmDead = IAmDead
markMany occ = ManyOccs { occ_tail = occ_tail occ }
markInsideLam occ@(OneOcc {}) = occ { occ_in_lam = True }
markInsideLam occ = occ
markNonTailCalled IAmDead = IAmDead
markNonTailCalled occ = occ { occ_tail = NoTailCallInfo }
addOccInfo, orOccInfo :: OccInfo -> OccInfo -> OccInfo
addOccInfo a1 a2 = ASSERT( not (isDeadOcc a1 || isDeadOcc a2) )
ManyOccs { occ_tail = tailCallInfo a1 `andTailCallInfo`
tailCallInfo a2 }
-- Both branches are at least One
-- (Argument is never IAmDead)
-- (orOccInfo orig new) is used
-- when combining occurrence info from branches of a case
orOccInfo (OneOcc { occ_in_lam = in_lam1, occ_int_cxt = int_cxt1
, occ_tail = tail1 })
(OneOcc { occ_in_lam = in_lam2, occ_int_cxt = int_cxt2
, occ_tail = tail2 })
= OneOcc { occ_in_lam = in_lam1 || in_lam2
, occ_one_br = False -- False, because it occurs in both branches
, occ_int_cxt = int_cxt1 && int_cxt2
, occ_tail = tail1 `andTailCallInfo` tail2 }
orOccInfo a1 a2 = ASSERT( not (isDeadOcc a1 || isDeadOcc a2) )
ManyOccs { occ_tail = tailCallInfo a1 `andTailCallInfo`
tailCallInfo a2 }
andTailCallInfo :: TailCallInfo -> TailCallInfo -> TailCallInfo
andTailCallInfo info@(AlwaysTailCalled arity1) (AlwaysTailCalled arity2)
| arity1 == arity2 = info
andTailCallInfo _ _ = NoTailCallInfo
| shlevy/ghc | compiler/simplCore/OccurAnal.hs | bsd-3-clause | 116,076 | 0 | 17 | 32,730 | 12,441 | 6,776 | 5,665 | 942 | 11 |
module PaneInteractionServer
( interactions
) where
import Control.Monad (unless)
import Network
import Data.List (span)
import System.IO
import Parser
import Base
import Nettle.OpenFlow
import System.Time
import System.Log.Logger.TH (deriveLoggers)
import qualified System.Log.Logger as Logger
$(deriveLoggers "Logger" [Logger.INFO])
interactions :: Word16
-> Chan (Speaker, Integer, String)
-> Chan (Speaker, Integer, String)
-> IO ()
interactions port toClient fromClient = do
sock <- listenOn (PortNumber $ fromIntegral port)
clientIdCounter <- newMVar 0
-- no-op reader of the original copy of the toClient channel
forkIO $ forever $ do
readChan toClient
-- accept new clients
forkIO $ forever $ do
(h, _, _) <- accept sock
clientId <- takeMVar clientIdCounter
putMVar clientIdCounter (clientId + 1)
hSetBuffering h LineBuffering
toClient <- dupChan toClient
forkIO (handleUser h clientId fromClient toClient)
return ()
handleUser conn clientId fromClient toClient = do
-- logo (hPutStrLn conn)
hPutStr conn "Login: "
hFlush conn
msg <- hGetLine conn
let (tmp1, _) = span (/='.') msg
let (tmp2, _) = span (/='\r') tmp1
let (spk, _) = span (/='\n') tmp2
writeChan toClient (spk, clientId, "logged in")
-- monitor the toClient bus for messages to this client
toThreadId <- forkIO $ forever $ do
(spk', id', msg) <- readChan toClient
when (spk == spk' && clientId == id') $ do
hPutStrLn conn msg
hPutStr conn (spk ++ "> ")
hFlush conn
return ()
-- read commands from user and place on fromClient bus
forever $ do
tmp1 <- hGetLine conn
let (tmp2, _) = span (/='\r') tmp1
let (msg, _) = span (/='\n') tmp2
infoM $ spk ++ ": " ++ show msg
case msg of
"quit" -> do
hClose conn
killThread toThreadId
myThreadId >>= killThread
otherwise -> writeChan fromClient (spk, clientId, msg)
| brownsys/pane | src/PaneInteractionServer.hs | bsd-3-clause | 1,997 | 0 | 16 | 498 | 663 | 329 | 334 | -1 | -1 |
module RE.Interpret (interpret) where
import Control.Monad.Free
import Control.Monad.State
import qualified Data.Map as M
import Data.Maybe
import RE.Insn
import RE.Program
doJump :: Program [InsnF String ()] -> String -> Program [InsnF String ()]
doJump (Program _ tab) idx = Program (fromJust $ M.lookup idx tab) tab
interpret :: String -> Program [InsnF String ()] -> Bool
interpret s (Program [] _) = False
interpret s prgm@(Program (h:next) tab) = case h of
Label _ _ -> interpret s $ Program next tab
Character c _ -> case s of
[] -> False
(h:t) -> if c == h
then interpret t $ Program next tab
else False
Jump idx _ -> interpret s $ doJump prgm idx
Split idx1 idx2 _ -> let branch1 = interpret s $ doJump prgm idx1
branch2 = interpret s $ doJump prgm idx2
in branch1 || branch2
Match -> True
| forestbelton/revm | src/RE/Interpret.hs | bsd-3-clause | 961 | 0 | 13 | 307 | 369 | 186 | 183 | 23 | 7 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE ForeignFunctionInterface #-}
-- |
-- Module : Crypto.Encrypt.Stream.ChaCha20
-- Copyright : (c) Austin Seipp 2011-2014
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Fast streaming encryption. The underlying primitive is
-- @crypto_stream_chacha20@, a particular cipher specified in,
-- \"ChaCha, a variant of Salsa20\":
-- <http://cr.yp.to/chacha/chacha-20080128.pdf>
--
-- This module is intended to be imported @qualified@ to avoid name
-- clashes with other cryptographic primitives, e.g.
--
-- > import qualified Crypto.Encrypt.Stream.ChaCha20 as ChaCha20
--
module Crypto.Encrypt.Stream.ChaCha20
( -- * Security model
-- $securitymodel
-- * Types
ChaCha20 -- :: *
-- * Key creation
, randomKey -- :: IO (SecretKey ChaCha20)
-- * Encrypting messages
-- ** Example usage
-- $example
, stream -- :: Nonce ChaCha20 -> Int -> SecretKey ChaCha20 -> ByteString
, encrypt -- :: Nonce ChaCha20 -> ByteString -> SecretKey ChaCha20 -> ByteString
, decrypt -- :: Nonce ChaCha20 -> ByteString -> SecretKey ChaCha20 -> ByteString
) where
import Data.Word
import Foreign.C.Types
import Foreign.Ptr
import System.IO.Unsafe (unsafePerformIO)
import Data.ByteString as S
import Data.ByteString.Internal as SI
import Data.ByteString.Unsafe as SU
import Crypto.Key
import Crypto.Nonce
import System.Crypto.Random
-- $securitymodel
--
-- The @'stream'@ function, viewed as a function of the nonce for a
-- uniform random key, is designed to meet the standard notion of
-- unpredictability (\"PRF\"). For a formal definition see, e.g.,
-- Section 2.3 of Bellare, Kilian, and Rogaway, \"The security of the
-- cipher block chaining message authentication code,\" Journal of
-- Computer and System Sciences 61 (2000), 362–399;
-- <http://www-cse.ucsd.edu/~mihir/papers/cbc.html>.
--
-- This means that an attacker cannot distinguish this function from a
-- uniform random function. Consequently, if a series of messages is
-- encrypted by @'encrypt'@ with a different nonce for each message,
-- the ciphertexts are indistinguishable from uniform random strings
-- of the same length.
--
-- Note that the length is not hidden. Note also that it is the
-- caller's responsibility to ensure the uniqueness of nonces—for
-- example, by using nonce 1 for the first message, nonce 2 for the
-- second message, etc. Nonces are long enough that randomly generated
-- nonces have negligible risk of collision.
--
-- NaCl does not make any promises regarding the resistance of
-- @'stream'@ to \"related-key attacks.\" It is the caller's
-- responsibility to use proper key-derivation functions.
-- $setup
-- >>> :set -XOverloadedStrings
-- | A phantom type for representing types related to secret-key
-- streaming encryption.
data ChaCha20
instance Nonces ChaCha20 where
nonceSize _ = chacha20NONCEBYTES
-- | Generate a random key for performing encryption.
--
-- Example usage:
--
-- >>> key <- randomKey
randomKey :: IO (SecretKey ChaCha20)
randomKey = SecretKey `fmap` randombytes chacha20KEYBYTES
-- | Given a @'Nonce'@ @n@, size @s@ and @'Key'@ @k@, @'stream' n s k@
-- generates a cryptographic stream of length @s@.
--
-- Example usage:
--
-- >>> nonce <- randomNonce :: IO (Nonce ChaCha20)
-- >>> key <- randomKey
-- >>> let ks = stream nonce 256 key
stream :: Nonce ChaCha20
-- ^ Nonce
-> Int
-- ^ Size
-> SecretKey ChaCha20
-- ^ Key
-> ByteString
-- ^ Resulting crypto stream
stream (Nonce n) sz (SecretKey sk)
= unsafePerformIO . SI.create sz $ \out ->
SU.unsafeUseAsCString n $ \pn ->
SU.unsafeUseAsCString sk $ \psk -> do
_ <- c_crypto_stream_chacha20 out (fromIntegral sz) pn psk
return ()
-- | Given a @'Nonce'@ @n@, plaintext @p@ and @'Key'@ @k@, @encrypt n
-- p k@ encrypts the message @p@ using @'Key'@ @k@ and returns the
-- result.
--
-- @'encrypt'@ guarantees the resulting ciphertext is the plaintext
-- bitwise XOR'd with the result of @'stream'@. As a result,
-- @'encrypt'@ can also be used to decrypt messages.
encrypt :: Nonce ChaCha20
-- ^ Nonce
-> ByteString
-- ^ Input plaintext
-> SecretKey ChaCha20
-- ^ Key
-> ByteString
-- ^ Ciphertext
encrypt (Nonce n) msg (SecretKey sk)
= let l = S.length msg
in unsafePerformIO . SI.create l $ \out ->
SU.unsafeUseAsCString msg $ \cstr ->
SU.unsafeUseAsCString n $ \pn ->
SU.unsafeUseAsCString sk $ \psk -> do
_ <- c_crypto_stream_chacha20_xor out cstr (fromIntegral l) pn psk
return ()
{-# INLINE encrypt #-}
-- | Simple alias for @'encrypt'@.
decrypt :: Nonce ChaCha20
-- ^ Nonce
-> ByteString
-- ^ Input ciphertext
-> SecretKey ChaCha20
-- ^ Key
-> ByteString
-- ^ Plaintext
decrypt = encrypt
{-# INLINE decrypt #-}
-- $example
-- >>> nonce <- randomNonce :: IO (Nonce ChaCha20)
-- >>> key <- randomKey
-- >>> let cipherText = encrypt nonce "Hello" key
-- >>> let recoveredText = decrypt nonce cipherText key
-- >>> recoveredText == "Hello"
-- True
chacha20KEYBYTES :: Int
chacha20KEYBYTES = 32
chacha20NONCEBYTES :: Int
chacha20NONCEBYTES = 8
foreign import ccall unsafe "crypto_stream_chacha20"
c_crypto_stream_chacha20 :: Ptr Word8 -> CULLong -> Ptr CChar ->
Ptr CChar -> IO Int
foreign import ccall unsafe "crypto_stream_chacha20_xor"
c_crypto_stream_chacha20_xor :: Ptr Word8 -> Ptr CChar ->
CULLong -> Ptr CChar -> Ptr CChar -> IO Int
| thoughtpolice/hs-nacl | src/Crypto/Encrypt/Stream/ChaCha20.hs | bsd-3-clause | 5,873 | 0 | 20 | 1,402 | 659 | 392 | 267 | -1 | -1 |
module Syntax where
import Data.Set (union)
import qualified Data.Set as S
import Data.Foldable (foldl')
type SymName = String
data BinOp
= Add
| Sub
| Mul
| Div
| Lt
| Lte
| Gt
| Gte
| Eq
deriving (Eq, Ord, Show)
str2binOp :: [(String, BinOp)]
str2binOp =
[ ("+" , Add)
, ("-" , Sub)
, ("*" , Mul)
, ("/" , Div)
, ("<" , Lt)
, ("<=" , Lte)
, (">" , Gt)
, (">=" , Gte)
, ("=" , Eq)
]
str2bool :: [(String, Bool)]
str2bool =
[("#t", True)
,("#f", False)
]
reservedWords :: [String]
reservedWords =
["define"
,"lambda"
,"if"
,"cond"
,"else"
,"and"
,"or"
,"not"
,envVarName
,"malloc"
,"memalign"
,"let"
,"set!"
,"isTag"
,dataPrefix
]
primFuncs :: [(String, String)]
primFuncs =
[("boolean?", "isBoolean")
,("char?", "isChar")
,("number?", "isNumber")
,("cons", "cons")
,("null?", "isNull")
,("car", "car")
,("cdr", "cdr")
,("set-cdr!", "cdrSet")
,("set-car!", "carSet")
,("make-vector", "makeVector")
,("vector?", "isVector")
,("vecor-length", "vectorLength")
,("vector-ref", "vectorRef")
,("vector-set!", "vectorSet")
]
envVarName, suffLambda, dataPrefix, vectorPrefix :: String
entryFuncName, initGlobalsFuncName :: String
suffLambda = "-lambda"
envVarName = "__env"
dataPrefix = "'"
vectorPrefix = "#"
entryFuncName = "entryFunc"
initGlobalsFuncName = "initGlobals"
data Expr
= NumberExp Integer
| EmptyExp
| CharExp Char
| BoolExp Bool
| ArrayExp [Expr]
| StringExp String
| VarExp SymName
| SetExp SymName Expr
| DefExp SymName Expr
| IfExp Expr Expr Expr
| FuncExp [SymName] [Expr]
| CallExp Expr [Expr]
| PrimCallExp SymName [Expr]
| BinOpExp BinOp Expr Expr
deriving (Eq, Ord, Show)
findFreeVars :: [SymName] -> [Expr] -> [SymName]
findFreeVars vars exprs = S.toList
. foldl' union S.empty
. map (go vars)
$ exprs
where
go vars (VarExp var) = if var `elem` vars then S.empty else S.singleton var
go vars (SetExp var e) = if var `elem` vars
then S.empty
else S.singleton var `union` go vars e
go vars (BinOpExp _ e1 e2) = go vars e1 `union` go vars e2
go vars (IfExp e1 e2 e3) = go vars e1 `union` go vars e2 `union` go vars e3
go vars (CallExp e1 es) = go vars e1 `union` theRest
where theRest = foldl' union S.empty $ map (go vars) es
go _ _ = S.empty
| talw/crisp-compiler | src/Syntax.hs | bsd-3-clause | 2,467 | 0 | 12 | 660 | 896 | 536 | 360 | 102 | 8 |
module System.IO.Streams.Logging.Deferred
( DeferredLogger
, makeDeferredLogger
, stopDeferredLogger
) where
------------------------------------------------------------------------------
import Blaze.ByteString.Builder (Builder)
import qualified Blaze.ByteString.Builder as B
import Control.Concurrent (ThreadId)
import Control.Concurrent.MVar (MVar, newEmptyMVar,
newMVar, readMVar)
import System.IO.Streams (OutputStream)
import qualified System.IO.Streams as Streams
------------------------------------------------------------------------------
import System.IO.Streams.Logging.Internal.Types (Logger (..),
LoggerState (..))
data DeferredLogger = DeferredLogger
{ _dataWaiting :: !(MVar ())
, _loggingThread :: !(MVar ThreadId)
, _wrappedLogger :: !Logger
}
makeDeferredLogger :: Logger
-> IO Logger
makeDeferredLogger wrappedLogger = do
dataWaitingMVar <- newEmptyMVar
loggingThreadMVar <- newEmptyMVar
(LoggerState output reset onerror) <- readMVar $ unLogger wrappedLogger
newLogger <- newMVar $! LoggerState output (wrapReset reset) onerror
undefined
where
wrapReset = undefined
stopDeferredLogger :: Logger -> IO ()
stopDeferredLogger _ = undefined
| snapframework/logging-streams | src/System/IO/Streams/Logging/Deferred.hs | bsd-3-clause | 1,525 | 0 | 12 | 477 | 275 | 159 | 116 | 34 | 1 |
-- | Simulates the @isSpaceChar@ Java method. <http://docs.oracle.com/javase/6/docs/api/java/lang/Character.html#isSpaceChar%28int%29>
module Language.Java.Character.IsSpaceChar
(
IsSpaceChar(..)
) where
import Data.Char
import Data.Word
import Data.Set.Diet(Diet)
import qualified Data.Set.Diet as S
-- | Instances simulate Java characters and provide a decision on simulating @isSpaceChar@.
class Enum c => IsSpaceChar c where
isSpaceChar ::
c
-> Bool
isNotSpaceChar ::
c
-> Bool
isNotSpaceChar =
not . isSpaceChar
instance IsSpaceChar Char where
isSpaceChar c =
ord c `S.member` isSpaceCharSet
instance IsSpaceChar Int where
isSpaceChar c =
c `S.member` isSpaceCharSet
instance IsSpaceChar Integer where
isSpaceChar c =
c `S.member` isSpaceCharSet
instance IsSpaceChar Word8 where
isSpaceChar c =
c `S.member` isSpaceCharSet
instance IsSpaceChar Word16 where
isSpaceChar c =
c `S.member` isSpaceCharSet
instance IsSpaceChar Word32 where
isSpaceChar c =
c `S.member` isSpaceCharSet
instance IsSpaceChar Word64 where
isSpaceChar c =
c `S.member` isSpaceCharSet
isSpaceCharSet ::
(Num a, Enum a, Ord a) =>
Diet a
isSpaceCharSet =
let r = [
[32]
, [160]
, [5760]
, [6158]
, [8192..8203]
, [8232..8233]
, [8239]
, [8287]
, [12288]
]
in S.fromList . concat $ r | tonymorris/java-character | src/Language/Java/Character/IsSpaceChar.hs | bsd-3-clause | 1,447 | 0 | 10 | 361 | 376 | 214 | 162 | 52 | 1 |
-- © 2002 Peter Thiemann
-- |Implements RFC 2045 MIME coding.
module WASH.Utility.Base64
(encode, encode', decode, decode'
,alphabet_list
)
where
import Data.Array
import Data.Char
--
-- |Yields encoded input cropped to lines of less than 76 characters. Directly
-- usable as email body.
encode :: String -> String
encode = encode_base64
-- |yields continuous stream of bytes.
encode' :: String -> String
encode' = encode_base64'
-- |Directly applicable to email body.
decode :: String -> String
decode = decode_base64
-- |Only applicable to stream of Base64 characters.
decode' :: String -> String
decode' = decode_base64'
-- |Applicable to list of lines.
decode_lines :: [String] -> String
decode_lines = decode_base64_lines
-- --------------------------------------------------------------------
-- |Base64 alphabet in encoding order.
alphabet_list :: String
alphabet_list =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
encode_base64_alphabet_index =
zip [0 .. (63::Int)] alphabet_list
decode_base64_alphabet_index =
zip alphabet_list [0 .. (63::Int)]
encode_base64_alphabet =
array (0 :: Int, 63 :: Int) encode_base64_alphabet_index
decode_base64_alphabet =
array (' ','z') decode_base64_alphabet_index
base64_character =
array (chr 0, chr 255) [(c, c `elem` alphabet_list || c == '=') | c <- [chr 0 .. chr 255]]
encode_base64 = linebreak 76 . encode_base64'
linebreak m xs = lb m xs
where
lb n [] = "\r\n"
lb 0 xs = '\r':'\n': lb m xs
lb n (x:xs) = x: lb (n-1) xs
encode_base64' [] = []
encode_base64' [ch] =
encode_base64_alphabet!b1 :
encode_base64_alphabet!b2 :
"=="
where (b1, b2, _, _) = encode_base64_group (ch, chr 0, chr 0)
encode_base64' [ch1, ch2] =
encode_base64_alphabet!b1 :
encode_base64_alphabet!b2 :
encode_base64_alphabet!b3 :
"="
where (b1, b2, b3, _) = encode_base64_group (ch1, ch2, chr 0)
encode_base64' (ch1: ch2: ch3: rest) =
encode_base64_alphabet!b1 :
encode_base64_alphabet!b2 :
encode_base64_alphabet!b3 :
encode_base64_alphabet!b4 :
encode_base64' rest
where (b1, b2, b3, b4) = encode_base64_group (ch1, ch2, ch3)
-- 111111 112222 222233 333333
encode_base64_group (ch1, ch2, ch3) = (b1, b2, b3, b4)
where o1 = ord ch1
o2 = ord ch2
o3 = ord ch3
b1 = o1 `div` 4
b2 = (o1 `mod` 4) * 16 + o2 `div` 16
b3 = (o2 `mod` 16) * 4 + o3 `div` 64
b4 = o3 `mod` 64
decode_base64_group (b1, b2, b3, b4) = (ch1, ch2, ch3)
where ch1 = chr (b1 * 4 + b2 `div` 16)
ch2 = chr (b2 `mod` 16 * 16 + b3 `div` 4)
ch3 = chr (b3 `mod` 4 * 64 + b4)
decode_base64' [] = []
decode_base64' [cin1, cin2, '=', '='] = [cout1]
where (cout1, _, _) =
decode_base64_group (decode_base64_alphabet!cin1
,decode_base64_alphabet!cin2
,0
,0)
decode_base64' [cin1, cin2, cin3, '='] = [cout1, cout2]
where (cout1, cout2, _) =
decode_base64_group (decode_base64_alphabet!cin1
,decode_base64_alphabet!cin2
,decode_base64_alphabet!cin3
,0)
decode_base64' (cin1: cin2: cin3: cin4: rest) =
cout1: cout2: cout3: decode_base64' rest
where (cout1, cout2, cout3) =
decode_base64_group (decode_base64_alphabet!cin1
,decode_base64_alphabet!cin2
,decode_base64_alphabet!cin3
,decode_base64_alphabet!cin4)
decode_base64 = decode_base64' . filter (base64_character!)
decode_base64_lines = decode_base64' . concat
| nh2/WashNGo | WASH/Utility/Base64.hs | bsd-3-clause | 3,461 | 17 | 13 | 708 | 993 | 582 | 411 | 86 | 3 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Test.IO.Zodiac.TSRP.Data.Symmetric where
import Disorder.Core.IO (testIO)
import Disorder.Core.Property (failWith)
import Disorder.Core.Run (ExpectedTestSpeed(..), disorderCheckEnvAll)
import P
import System.IO (IO)
import Zodiac.TSRP.Data.Symmetric
import Test.Zodiac.TSRP.Arbitrary ()
import Test.QuickCheck
-- really need a SafeEq typeclass for this kind of thing
-- FIXME: rewrite once https://github.com/ambiata/tinfoil/pull/47 is merged
prop_tripping_SymmetricAuthHeader :: SymmetricAuthHeader -> Property
prop_tripping_SymmetricAuthHeader sah =
let sah' = parseSymmetricAuthHeader $ renderSymmetricAuthHeader sah in
case sah' of
Nothing' -> failWith "parsing SymmetricAuthHeader unexpectedly failed"
Just' sah'' -> testIO $ do
r <- sah'' `symmetricAuthHeaderEq` sah
pure $ r === True
return []
tests :: IO Bool
tests = $disorderCheckEnvAll TestRunMore
| ambiata/zodiac | zodiac-tsrp/test/Test/IO/Zodiac/TSRP/Data/Symmetric.hs | bsd-3-clause | 1,079 | 0 | 14 | 217 | 200 | 114 | 86 | 23 | 2 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__
{-# LANGUAGE Rank2Types #-}
#endif
#if __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.Graph
-- Copyright : (c) The University of Glasgow 2002
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- A version of the graph algorithms described in:
--
-- /Structuring Depth-First Search Algorithms in Haskell/,
-- by David King and John Launchbury.
--
-----------------------------------------------------------------------------
module Data.Graph(
-- * External interface
-- At present the only one with a "nice" external interface
stronglyConnComp, stronglyConnCompR, SCC(..), flattenSCC, flattenSCCs,
-- * Graphs
Graph, Table, Bounds, Edge, Vertex,
-- ** Building graphs
graphFromEdges, graphFromEdges', buildG, transposeG,
-- reverseE,
-- ** Graph properties
vertices, edges,
outdegree, indegree,
-- * Algorithms
dfs, dff,
topSort,
components,
scc,
bcc,
-- tree, back, cross, forward,
reachable, path,
module Data.Tree
) where
#if __GLASGOW_HASKELL__
# define USE_ST_MONAD 1
#endif
-- Extensions
#if USE_ST_MONAD
import Control.Monad.ST
import Data.Array.ST (STArray, newArray, readArray, writeArray)
#else
import Data.IntSet (IntSet)
import qualified Data.IntSet as Set
#endif
import Data.Tree (Tree(Node), Forest)
-- std interfaces
import Control.DeepSeq (NFData(rnf))
import Data.Maybe
import Data.Array
import Data.List
-------------------------------------------------------------------------
-- -
-- External interface
-- -
-------------------------------------------------------------------------
-- | Strongly connected component.
data SCC vertex = AcyclicSCC vertex -- ^ A single vertex that is not
-- in any cycle.
| CyclicSCC [vertex] -- ^ A maximal set of mutually
-- reachable vertices.
instance NFData a => NFData (SCC a) where
rnf (AcyclicSCC v) = rnf v
rnf (CyclicSCC vs) = rnf vs
-- | The vertices of a list of strongly connected components.
flattenSCCs :: [SCC a] -> [a]
flattenSCCs = concatMap flattenSCC
-- | The vertices of a strongly connected component.
flattenSCC :: SCC vertex -> [vertex]
flattenSCC (AcyclicSCC v) = [v]
flattenSCC (CyclicSCC vs) = vs
-- | The strongly connected components of a directed graph, topologically
-- sorted.
stronglyConnComp
:: Ord key
=> [(node, key, [key])]
-- ^ The graph: a list of nodes uniquely identified by keys,
-- with a list of keys of nodes this node has edges to.
-- The out-list may contain keys that don't correspond to
-- nodes of the graph; such edges are ignored.
-> [SCC node]
stronglyConnComp edges0
= map get_node (stronglyConnCompR edges0)
where
get_node (AcyclicSCC (n, _, _)) = AcyclicSCC n
get_node (CyclicSCC triples) = CyclicSCC [n | (n,_,_) <- triples]
-- | The strongly connected components of a directed graph, topologically
-- sorted. The function is the same as 'stronglyConnComp', except that
-- all the information about each node retained.
-- This interface is used when you expect to apply 'SCC' to
-- (some of) the result of 'SCC', so you don't want to lose the
-- dependency information.
stronglyConnCompR
:: Ord key
=> [(node, key, [key])]
-- ^ The graph: a list of nodes uniquely identified by keys,
-- with a list of keys of nodes this node has edges to.
-- The out-list may contain keys that don't correspond to
-- nodes of the graph; such edges are ignored.
-> [SCC (node, key, [key])] -- ^ Topologically sorted
stronglyConnCompR [] = [] -- added to avoid creating empty array in graphFromEdges -- SOF
stronglyConnCompR edges0
= map decode forest
where
(graph, vertex_fn,_) = graphFromEdges edges0
forest = scc graph
decode (Node v []) | mentions_itself v = CyclicSCC [vertex_fn v]
| otherwise = AcyclicSCC (vertex_fn v)
decode other = CyclicSCC (dec other [])
where
dec (Node v ts) vs = vertex_fn v : foldr dec vs ts
mentions_itself v = v `elem` (graph ! v)
-------------------------------------------------------------------------
-- -
-- Graphs
-- -
-------------------------------------------------------------------------
-- | Abstract representation of vertices.
type Vertex = Int
-- | Table indexed by a contiguous set of vertices.
type Table a = Array Vertex a
-- | Adjacency list representation of a graph, mapping each vertex to its
-- list of successors.
type Graph = Table [Vertex]
-- | The bounds of a 'Table'.
type Bounds = (Vertex, Vertex)
-- | An edge from the first vertex to the second.
type Edge = (Vertex, Vertex)
-- | All vertices of a graph.
vertices :: Graph -> [Vertex]
vertices = indices
-- | All edges of a graph.
edges :: Graph -> [Edge]
edges g = [ (v, w) | v <- vertices g, w <- g!v ]
mapT :: (Vertex -> a -> b) -> Table a -> Table b
mapT f t = array (bounds t) [ (,) v (f v (t!v)) | v <- indices t ]
-- | Build a graph from a list of edges.
buildG :: Bounds -> [Edge] -> Graph
buildG bounds0 edges0 = accumArray (flip (:)) [] bounds0 edges0
-- | The graph obtained by reversing all edges.
transposeG :: Graph -> Graph
transposeG g = buildG (bounds g) (reverseE g)
reverseE :: Graph -> [Edge]
reverseE g = [ (w, v) | (v, w) <- edges g ]
-- | A table of the count of edges from each node.
outdegree :: Graph -> Table Int
outdegree = mapT numEdges
where numEdges _ ws = length ws
-- | A table of the count of edges into each node.
indegree :: Graph -> Table Int
indegree = outdegree . transposeG
-- | Identical to 'graphFromEdges', except that the return value
-- does not include the function which maps keys to vertices. This
-- version of 'graphFromEdges' is for backwards compatibility.
graphFromEdges'
:: Ord key
=> [(node, key, [key])]
-> (Graph, Vertex -> (node, key, [key]))
graphFromEdges' x = (a,b) where
(a,b,_) = graphFromEdges x
-- | Build a graph from a list of nodes uniquely identified by keys,
-- with a list of keys of nodes this node should have edges to.
-- The out-list may contain keys that don't correspond to
-- nodes of the graph; they are ignored.
graphFromEdges
:: Ord key
=> [(node, key, [key])]
-> (Graph, Vertex -> (node, key, [key]), key -> Maybe Vertex)
graphFromEdges edges0
= (graph, \v -> vertex_map ! v, key_vertex)
where
max_v = length edges0 - 1
bounds0 = (0,max_v) :: (Vertex, Vertex)
sorted_edges = sortBy lt edges0
edges1 = zipWith (,) [0..] sorted_edges
graph = array bounds0 [(,) v (mapMaybe key_vertex ks) | (,) v (_, _, ks) <- edges1]
key_map = array bounds0 [(,) v k | (,) v (_, k, _ ) <- edges1]
vertex_map = array bounds0 edges1
(_,k1,_) `lt` (_,k2,_) = k1 `compare` k2
-- key_vertex :: key -> Maybe Vertex
-- returns Nothing for non-interesting vertices
key_vertex k = findVertex 0 max_v
where
findVertex a b | a > b
= Nothing
findVertex a b = case compare k (key_map ! mid) of
LT -> findVertex a (mid-1)
EQ -> Just mid
GT -> findVertex (mid+1) b
where
mid = (a + b) `div` 2
-------------------------------------------------------------------------
-- -
-- Depth first search
-- -
-------------------------------------------------------------------------
-- | A spanning forest of the graph, obtained from a depth-first search of
-- the graph starting from each vertex in an unspecified order.
dff :: Graph -> Forest Vertex
dff g = dfs g (vertices g)
-- | A spanning forest of the part of the graph reachable from the listed
-- vertices, obtained from a depth-first search of the graph starting at
-- each of the listed vertices in order.
dfs :: Graph -> [Vertex] -> Forest Vertex
dfs g vs = prune (bounds g) (map (generate g) vs)
generate :: Graph -> Vertex -> Tree Vertex
generate g v = Node v (map (generate g) (g!v))
prune :: Bounds -> Forest Vertex -> Forest Vertex
prune bnds ts = run bnds (chop ts)
chop :: Forest Vertex -> SetM s (Forest Vertex)
chop [] = return []
chop (Node v ts : us)
= do
visited <- contains v
if visited then
chop us
else do
include v
as <- chop ts
bs <- chop us
return (Node v as : bs)
-- A monad holding a set of vertices visited so far.
#if USE_ST_MONAD
-- Use the ST monad if available, for constant-time primitives.
newtype SetM s a = SetM { runSetM :: STArray s Vertex Bool -> ST s a }
instance Monad (SetM s) where
return x = SetM $ const (return x)
SetM v >>= f = SetM $ \ s -> do { x <- v s; runSetM (f x) s }
run :: Bounds -> (forall s. SetM s a) -> a
run bnds act = runST (newArray bnds False >>= runSetM act)
contains :: Vertex -> SetM s Bool
contains v = SetM $ \ m -> readArray m v
include :: Vertex -> SetM s ()
include v = SetM $ \ m -> writeArray m v True
#else /* !USE_ST_MONAD */
-- Portable implementation using IntSet.
newtype SetM s a = SetM { runSetM :: IntSet -> (a, IntSet) }
instance Monad (SetM s) where
return x = SetM $ \ s -> (x, s)
SetM v >>= f = SetM $ \ s -> case v s of (x, s') -> runSetM (f x) s'
run :: Bounds -> SetM s a -> a
run _ act = fst (runSetM act Set.empty)
contains :: Vertex -> SetM s Bool
contains v = SetM $ \ m -> (Set.member v m, m)
include :: Vertex -> SetM s ()
include v = SetM $ \ m -> ((), Set.insert v m)
#endif /* !USE_ST_MONAD */
-------------------------------------------------------------------------
-- -
-- Algorithms
-- -
-------------------------------------------------------------------------
------------------------------------------------------------
-- Algorithm 1: depth first search numbering
------------------------------------------------------------
preorder' :: Tree a -> [a] -> [a]
preorder' (Node a ts) = (a :) . preorderF' ts
preorderF' :: Forest a -> [a] -> [a]
preorderF' ts = foldr (.) id $ map preorder' ts
preorderF :: Forest a -> [a]
preorderF ts = preorderF' ts []
tabulate :: Bounds -> [Vertex] -> Table Int
tabulate bnds vs = array bnds (zipWith (,) vs [1..])
preArr :: Bounds -> Forest Vertex -> Table Int
preArr bnds = tabulate bnds . preorderF
------------------------------------------------------------
-- Algorithm 2: topological sorting
------------------------------------------------------------
postorder :: Tree a -> [a] -> [a]
postorder (Node a ts) = postorderF ts . (a :)
postorderF :: Forest a -> [a] -> [a]
postorderF ts = foldr (.) id $ map postorder ts
postOrd :: Graph -> [Vertex]
postOrd g = postorderF (dff g) []
-- | A topological sort of the graph.
-- The order is partially specified by the condition that a vertex /i/
-- precedes /j/ whenever /j/ is reachable from /i/ but not vice versa.
topSort :: Graph -> [Vertex]
topSort = reverse . postOrd
------------------------------------------------------------
-- Algorithm 3: connected components
------------------------------------------------------------
-- | The connected components of a graph.
-- Two vertices are connected if there is a path between them, traversing
-- edges in either direction.
components :: Graph -> Forest Vertex
components = dff . undirected
undirected :: Graph -> Graph
undirected g = buildG (bounds g) (edges g ++ reverseE g)
-- Algorithm 4: strongly connected components
-- | The strongly connected components of a graph.
scc :: Graph -> Forest Vertex
scc g = dfs g (reverse (postOrd (transposeG g)))
------------------------------------------------------------
-- Algorithm 5: Classifying edges
------------------------------------------------------------
{-
XXX unused code
tree :: Bounds -> Forest Vertex -> Graph
tree bnds ts = buildG bnds (concat (map flat ts))
where flat (Node v ts') = [ (v, w) | Node w _us <- ts' ]
++ concat (map flat ts')
back :: Graph -> Table Int -> Graph
back g post = mapT select g
where select v ws = [ w | w <- ws, post!v < post!w ]
cross :: Graph -> Table Int -> Table Int -> Graph
cross g pre post = mapT select g
where select v ws = [ w | w <- ws, post!v > post!w, pre!v > pre!w ]
forward :: Graph -> Graph -> Table Int -> Graph
forward g tree' pre = mapT select g
where select v ws = [ w | w <- ws, pre!v < pre!w ] \\ tree' ! v
-}
------------------------------------------------------------
-- Algorithm 6: Finding reachable vertices
------------------------------------------------------------
-- | A list of vertices reachable from a given vertex.
reachable :: Graph -> Vertex -> [Vertex]
reachable g v = preorderF (dfs g [v])
-- | Is the second vertex reachable from the first?
path :: Graph -> Vertex -> Vertex -> Bool
path g v w = w `elem` (reachable g v)
------------------------------------------------------------
-- Algorithm 7: Biconnected components
------------------------------------------------------------
-- | The biconnected components of a graph.
-- An undirected graph is biconnected if the deletion of any vertex
-- leaves it connected.
bcc :: Graph -> Forest [Vertex]
bcc g = (concat . map bicomps . map (do_label g dnum)) forest
where forest = dff g
dnum = preArr (bounds g) forest
do_label :: Graph -> Table Int -> Tree Vertex -> Tree (Vertex,Int,Int)
do_label g dnum (Node v ts) = Node (v,dnum!v,lv) us
where us = map (do_label g dnum) ts
lv = minimum ([dnum!v] ++ [dnum!w | w <- g!v]
++ [lu | Node (_,_,lu) _ <- us])
bicomps :: Tree (Vertex,Int,Int) -> Forest [Vertex]
bicomps (Node (v,_,_) ts)
= [ Node (v:vs) us | (_,Node vs us) <- map collect ts]
collect :: Tree (Vertex,Int,Int) -> (Int, Tree [Vertex])
collect (Node (v,dv,lv) ts) = (lv, Node (v:vs) cs)
where collected = map collect ts
vs = concat [ ws | (lw, Node ws _) <- collected, lw<dv]
cs = concat [ if lw<dv then us else [Node (v:ws) us]
| (lw, Node ws us) <- collected ]
| ekmett/containers | Data/Graph.hs | bsd-3-clause | 15,628 | 2 | 14 | 4,535 | 3,325 | 1,832 | 1,493 | 179 | 4 |
module Weak (module System.Mem.Weak) where
import System.Mem.Weak
| OS2World/DEV-UTIL-HUGS | oldlib/Weak.hs | bsd-3-clause | 66 | 0 | 5 | 7 | 19 | 13 | 6 | 2 | 0 |
{-# LANGUAGE PatternGuards #-}
module Prolog2 (
ask,
Fact,
Rule(..),
Term(..),
TwoD(..),
Result
) where
import PrologTools
import Data.Maybe
ask :: (TwoD sc, Eq sc, Eq s) =>
sc -> Result sc s -> Fact sc s -> [Rule sc s] -> [Result sc s]
ask sc ret q rs
| is@(Is : _) <- q sc = maybeToList $ applyIs is ret
| otherwise = concat $ zipWith ar (iterate next $ down sc) rs
where
ar sc' r = askrule sc' ret q r rs
-- | [Is, t, u] <- q sc =
-- maybeToList $ [([t], Just $ apply u ret)] `merge` ret
-- | (Is : _) <- q sc' = error "debug: Is"
askrule :: (TwoD sc, Eq sc, Eq s) =>
sc -> Result sc s -> Fact sc s -> Rule sc s -> [Rule sc s] -> [Result sc s]
askrule sc ret q (Rule fact _ facts notFacts) rs
-- | [Is, t, u] <- fact (next sc) = maybeToList $ [([t], Just u)] `merge` ret
| [Is, _, _] <- fact (next sc) = error "askrule: not implemented"
| otherwise = filter (`checkAll` nots) ret'
where
ret' = foldl (\rets (sc', f) -> rets >>= \r' -> ask sc' r' f rs) r0 $
zip (iterate next sc0) $ map (const . ($ sc0)) facts
sc0 = down sc
r0 = case q sc `unification` fact sc0 of
Nothing -> []
Just r0' -> maybeToList $ ret `merge` r0'
nots = concatMap (flip (notAsk sc0) rs . const . ($sc0)) notFacts
| YoshikuniJujo/lojysamban | src/Prolog2.hs | bsd-3-clause | 1,223 | 18 | 14 | 295 | 566 | 294 | 272 | 28 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
-- Here we use Template Haskell to generate a test suite for the Aeson wrappers
-- from the DSL description of same.
module Data.API.Tools.JSONTests
( jsonTestsTool
, prop_decodesTo
, prop_resultsMatchRoundtrip
) where
import Data.API.JSON
import Data.API.Tools.Combinators
import Data.API.Tools.Datatypes
import Data.API.TH
import Data.API.Types
import qualified Data.Aeson as JS
import Language.Haskell.TH
import Test.QuickCheck
-- | Tool to generate a list of tests of type @[('String', 'Property')]@
-- with the given name. This depends on 'jsonTool' and 'quickCheckTool'.
jsonTestsTool :: Name -> APITool
jsonTestsTool nm = simpleTool $ \ api -> simpleSigD nm [t| [(String, Property)] |] (props api)
where
props api = listE $ map generateProp [ an | ThNode an <- api ]
-- | For an APINode, generate a (String, Property) pair giving the
-- type name and an appropriate instance of the
-- prop_resultsMatchRoundtrip property
generateProp :: APINode -> ExpQ
generateProp an = [e| ($ty, property (prop_resultsMatchRoundtrip :: $(nodeT an) -> Bool)) |]
where
ty = stringE $ _TypeName $ anName an
-- | QuickCheck property that a 'Value' decodes to an expected Haskell
-- value, using 'fromJSONWithErrs'
prop_decodesTo :: forall a . (Eq a, FromJSONWithErrs a)
=> JS.Value -> a -> Bool
prop_decodesTo v x = case fromJSONWithErrs v :: Either [(JSONError, Position)] a of
Right y | x == y -> True
_ -> False
-- | QuickCheck property that Haskell values can be encoded with
-- 'toJSON' and decoded with 'fromJSONWithErrs' to get the original
-- value
prop_resultsMatchRoundtrip :: forall a . (Eq a, JS.ToJSON a, FromJSONWithErrs a )
=> a -> Bool
prop_resultsMatchRoundtrip x = prop_decodesTo (JS.toJSON x) x
| adinapoli/api-tools | src/Data/API/Tools/JSONTests.hs | bsd-3-clause | 2,048 | 0 | 12 | 523 | 352 | 203 | 149 | 29 | 2 |
-- Ensure that we can derive Functor, Foldable, and Traversable using only
-- an import of Data.Singletons.TH
module FunctorLikeDeriving where
import Data.Kind
import Data.Singletons.Base.TH
$(singletons [d|
data T x a
= MkT1 x a (Maybe a) (Maybe (Maybe a))
| MkT2 (Maybe x)
deriving (Functor, Foldable, Traversable)
data Empty (a :: Type)
deriving (Functor, Foldable, Traversable)
|])
| goldfirere/singletons | singletons-base/tests/compile-and-dump/Singletons/FunctorLikeDeriving.hs | bsd-3-clause | 411 | 0 | 7 | 82 | 32 | 21 | 11 | -1 | -1 |
-- Copyright (c) 2014-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file.
{-# LANGUAGE RebindableSyntax, OverloadedStrings #-}
module BatchTests (tests) where
import TestTypes
import TestUtils
import MockTAO
import Control.Applicative
import Test.HUnit
import Haxl.Core
import Prelude()
import Haxl.Prelude
import Data.IORef
-- -----------------------------------------------------------------------------
--
-- Test batching over multiple arguments in liftA2
--
batching1 = expectResult 12 batching1_
batching1_ = do
a <- id1
b <- id2
length <$> liftA2 (++) (friendsOf a) (friendsOf b)
--
-- Test batching in mapM (which is really traverse)
--
batching2 = expectResult 12 batching2_
batching2_ = do
a <- id1
b <- id2
fs <- mapM friendsOf [a,b]
return (sum (map length fs))
--
-- Test batching when we have a monadic bind in each branch
--
batching3 = expectResult 12 batching3_
batching3_ = do
let a = id1 >>= friendsOf
b = id2 >>= friendsOf
length <$> a .++ b
--
-- Test batching over both arguments of (+)
--
batching4 = expectResult 12 batching4_
batching4_ = do
let a = length <$> (id1 >>= friendsOf)
b = length <$> (id2 >>= friendsOf)
a + b
--
-- Test batching over both arguments of (+)
--
batching5 = expectResult 2 batching5_
batching5_ :: Haxl Int
batching5_ = if a .> b then 1 else 2
where
a = length <$> (id1 >>= friendsOf)
b = length <$> (id2 >>= friendsOf)
--
-- Test batching when we perform all batching tests together with sequence
--
batching6 = expectResult [12,12,12,12,2] batching6_
batching6_ = sequence [batching1_,batching2_,batching3_,batching4_,batching5_]
--
-- Ensure if/then/else and bool operators break batching
--
batching7 = expectResult 12 batching7_
batching7_ :: Haxl Int
batching7_ = if a .> 0 then a+b else 0
where
a = length <$> (id1 >>= friendsOf)
b = length <$> (id2 >>= friendsOf)
-- We expect 3 rounds here due to boolean operators
batching8 = expectResult 12 batching8_
batching8_ :: Haxl Int
batching8_ = if (c .== 0) .|| (a .> 0 .&& b .> 0) then a+b else 0
where
a = length <$> (id1 >>= friendsOf)
b = length <$> (id2 >>= friendsOf)
c = length <$> (id3 >>= friendsOf)
-- (>>) should batch, so we expect one round
batching9 = expectResult 6 batching9_
batching9_ :: Haxl Int
batching9_ = (id1 >>= friendsOf) >> (length <$> (id2 >>= friendsOf))
--
-- Test data caching, numFetches
--
-- simple (one cache hit)
caching1 = expectFetches 3 caching1_
caching1_ = nf id1 + nf id2 + nf id3 + nf id3
where
nf id = length <$> (id >>= friendsOf)
-- simple, in rounds (no cache hits)
caching2 = expectFetches 3 caching2_
caching2_ = if nf id1 .> 0 then nf id2 + nf id3 else 0
where
nf id = length <$> (id >>= friendsOf)
-- rounds (one cache hit)
caching3 = expectFetches 3 caching3_
caching3_ = if nf id1 .> 0 then nf id1 + nf id2 + nf id3 else 0
where
nf id = length <$> (id >>= friendsOf)
--
-- Basic sanity check on data-cache re-use
--
cacheReuse future = do
env <- makeTestEnv future
expectResultWithEnv 12 batching7_ env
-- make a new env
tao <- MockTAO.initGlobalState future
let st = stateSet tao stateEmpty
env2 <- initEnvWithData st testinput (caches env)
-- ensure no more data fetching rounds needed
expectResultWithEnv 12 batching7_ env2
noCaching future = do
env <- makeTestEnv future
let env' = env{ flags = (flags env){caching = 0} }
result <- runHaxl env' caching3_
assertEqual "result" result 18
stats <- readIORef (statsRef env)
assertEqual "fetches" 4 (numFetches stats)
exceptionTest1 = expectResult []
$ withDefault [] $ friendsOf 101
exceptionTest2 = expectResult [7..12] $ liftA2 (++)
(withDefault [] (friendsOf 101))
(withDefault [] (friendsOf 2))
deterministicExceptions future = do
env <- makeTestEnv future
let haxl =
sequence [ do _ <- friendsOf =<< id1; throw (NotFound "xxx")
, throw (NotFound "yyy")
]
-- the first time, friendsOf should block, but we should still get the
-- "xxx" exception.
r <- runHaxl env $ try haxl
assertBool "exceptionTest3" $
case r of
Left (NotFound "xxx") -> True
_ -> False
-- the second time, friendsOf will be cached, and we should get the "xxx"
-- exception as before.
r <- runHaxl env $ try haxl
assertBool "exceptionTest3" $
case r of
Left (NotFound "xxx") -> True
_ -> False
pOrTests future = do
env <- makeTestEnv future
-- Test semantics
r <- runHaxl env $ do
a <- return False `pOr` return False
b <- return False `pOr` return True
c <- return True `pOr` return False
d <- return True `pOr` return True
return (not a && b && c && d)
assertBool "pOr0" r
-- pOr is left-biased with respect to exceptions:
r <- runHaxl env $ try $ return True `pOr` throw (NotFound "foo")
assertBool "pOr1" $
case (r :: Either NotFound Bool) of
Right True -> True
_ -> False
r <- runHaxl env $ try $ throw (NotFound "foo") `pOr` return True
assertBool "pOr2" $
case (r :: Either NotFound Bool) of
Left (NotFound "foo") -> True
_ -> False
-- pOr is non-deterministic (see also Note [tricky pOr/pAnd])
let nondet = (do _ <- friendsOf 1; throw (NotFound "foo")) `pOr` return True
r <- runHaxl env $ try nondet
assertBool "pOr3" $
case (r :: Either NotFound Bool) of
Right True -> True
_ -> False
-- next we populate the cache
_ <- runHaxl env $ friendsOf 1
-- and now exactly the same pOr again will throw this time:
r <- runHaxl env $ try nondet
assertBool "pOr4" $
case (r :: Either NotFound Bool) of
Left (NotFound "foo") -> True
_ -> False
-- One more test: Blocked/False => Blocked
r <- runHaxl env $ try $
(do _ <- friendsOf 2; throw (NotFound "foo")) `pOr` return False
assertBool "pOr5" $
case (r :: Either NotFound Bool) of
Left (NotFound _) -> True
_ -> False
pAndTests future = do
env <- makeTestEnv future
-- Test semantics
r <- runHaxl env $ do
a <- return False `pAnd` return False
b <- return False `pAnd` return True
c <- return True `pAnd` return False
d <- return True `pAnd` return True
return (not a && not b && not c && d)
assertBool "pAnd0" r
-- pAnd is left-biased with respect to exceptions:
r <- runHaxl env $ try $ return False `pAnd` throw (NotFound "foo")
assertBool "pAnd1" $
case (r :: Either NotFound Bool) of
Right False -> True
_ -> False
r <- runHaxl env $ try $ throw (NotFound "foo") `pAnd` return False
assertBool "pAnd2" $
case (r :: Either NotFound Bool) of
Left (NotFound "foo") -> True
_ -> False
-- pAnd is non-deterministic (see also Note [tricky pOr/pAnd])
let nondet =
(do _ <- friendsOf 1; throw (NotFound "foo")) `pAnd` return False
r <- runHaxl env $ try nondet
assertBool "pAnd3" $
case (r :: Either NotFound Bool) of
Right False -> True
_ -> False
-- next we populate the cache
_ <- runHaxl env $ friendsOf 1
-- and now exactly the same pAnd again will throw this time:
r <- runHaxl env $ try nondet
assertBool "pAnd4" $
case (r :: Either NotFound Bool) of
Left (NotFound "foo") -> True
_ -> False
-- One more test: Blocked/True => Blocked
r <- runHaxl env $ try $
(do _ <- friendsOf 2; throw (NotFound "foo")) `pAnd` return True
assertBool "pAnd5" $
case (r :: Either NotFound Bool) of
Left (NotFound _) -> True
_ -> False
tests :: Bool -> Test
tests future = TestList
[ TestLabel "batching1" $ TestCase (batching1 future)
, TestLabel "batching2" $ TestCase (batching2 future)
, TestLabel "batching3" $ TestCase (batching3 future)
, TestLabel "batching4" $ TestCase (batching4 future)
, TestLabel "batching5" $ TestCase (batching5 future)
, TestLabel "batching6" $ TestCase (batching6 future)
, TestLabel "batching7" $ TestCase (batching7 future)
, TestLabel "batching8" $ TestCase (batching8 future)
, TestLabel "batching9" $ TestCase (batching9 future)
, TestLabel "caching1" $ TestCase (caching1 future)
, TestLabel "caching2" $ TestCase (caching2 future)
, TestLabel "caching3" $ TestCase (caching3 future)
, TestLabel "CacheReuse" $ TestCase (cacheReuse future)
, TestLabel "NoCaching" $ TestCase (noCaching future)
, TestLabel "exceptionTest1" $ TestCase (exceptionTest1 future)
, TestLabel "exceptionTest2" $ TestCase (exceptionTest2 future)
, TestLabel "deterministicExceptions" $
TestCase (deterministicExceptions future)
, TestLabel "pOrTest" $ TestCase (pOrTests future)
, TestLabel "pAndTest" $ TestCase (pAndTests future)
]
| jiayuanmark/Haxl | tests/BatchTests.hs | bsd-3-clause | 8,782 | 0 | 16 | 2,025 | 2,861 | 1,415 | 1,446 | 193 | 6 |
-- | Basic table building for prettier futhark-test output.
module Futhark.Util.Table
( buildTable,
mkEntry,
Entry,
)
where
import Data.List (intercalate, transpose)
import Futhark.Util (maxinum)
import Futhark.Util.Console (color)
import System.Console.ANSI
data RowTemplate = RowTemplate [Int] Int deriving (Show)
-- | A table entry. Consists of the content as well a list of
-- SGR commands to color/stylelize the entry.
type Entry = (String, [SGR])
-- | Makes a table entry with the default SGR mode.
mkEntry :: String -> (String, [SGR])
mkEntry s = (s, [])
buildRowTemplate :: [[Entry]] -> Int -> RowTemplate
buildRowTemplate rows = RowTemplate widths
where
widths = map (maxinum . map (length . fst)) . transpose $ rows
buildRow :: RowTemplate -> [Entry] -> String
buildRow (RowTemplate widths pad) entries = cells ++ "\n"
where
bar = "\x2502"
cells = concatMap buildCell (zip entries widths) ++ bar
buildCell ((entry, sgr), width) =
let padding = width - length entry + pad
in bar ++ " " ++ color sgr entry ++ replicate padding ' '
buildSep :: Char -> Char -> Char -> RowTemplate -> String
buildSep lCorner rCorner sep (RowTemplate widths pad) =
corners . concatMap cellFloor $ widths
where
cellFloor width = replicate (width + pad + 1) '\x2500' ++ [sep]
corners [] = ""
corners s = [lCorner] ++ init s ++ [rCorner]
-- | Builds a table from a list of entries and a padding amount that
-- determines padding from the right side of the widest entry in each column.
buildTable :: [[Entry]] -> Int -> String
buildTable rows pad = buildTop template ++ sepRows ++ buildBottom template
where
sepRows = intercalate (buildFloor template) builtRows
builtRows = map (buildRow template) rows
template = buildRowTemplate rows pad
buildTop rt = buildSep '\x250C' '\x2510' '\x252C' rt ++ "\n"
buildFloor rt = buildSep '\x251C' '\x2524' '\x253C' rt ++ "\n"
buildBottom = buildSep '\x2514' '\x2518' '\x2534'
| diku-dk/futhark | src/Futhark/Util/Table.hs | isc | 1,993 | 0 | 14 | 405 | 594 | 319 | 275 | 36 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
module Analysis.ParallelIndependent where
import Abstract.Category
import Abstract.Category.Adhesive
import Abstract.Category.FindMorphism
import Abstract.Category.Finitary
import Abstract.Category.Limit
import Abstract.Rewriting.DPO
import Abstract.Rewriting.DPO.DiagramAlgorithms
-- | Algorithm used to determine independence between two rules
-- Cond1 -> 3 pullbacks and two iso tests
-- Cond2 -> 2 pullbacks and one iso test
-- Cond3 -> classical delete-use
data Algorithm = Cond1 | Cond2 | Cond3
data IndependenceType = Parallel | Sequentially deriving (Eq, Show)
-- | Checks if two transformations are independent (just delete-use),
-- works with delete-use or pullback checking.
isIndependent :: (E'PairCofinitary morph, DPO morph, Complete morph) =>
IndependenceType -> Algorithm -> MorphismsConfig morph -> Production morph -> Production morph -> Bool
isIndependent ind algorithm conf p1' p2 = not $ conflict algorithm
where
p1 = case ind of
Parallel -> p1'
Sequentially -> invertProductionWithoutNacs p1'
satisfyingPairs = findJointSurjectiveApplicableMatches conf p1 p2
conflict Cond1 = any (cond1 p1 p2) satisfyingPairs
conflict Cond2 = any (cond2 p1 p2) satisfyingPairs
conflict Cond3 = any (\(m1,m2) -> isDeleteUse conf p1 (m1,m2) || isDeleteUse conf p2 (m2,m1)) satisfyingPairs
-- | Checks independence between transformations via 2 pullbacks
cond2 :: forall morph. (MAdhesive morph, FindMorphism morph, Complete morph) => Production morph -> Production morph -> (morph,morph) -> Bool
cond2 p1 p2 (m1,m2) = Prelude.null (findCospanCommuters (iso @morph) k1k2ToG l1l2ToG)
where
(_,pb1) = calculatePullback m1 m2
a1 = m1 <&> leftMorphism p1
a2 = m2 <&> leftMorphism p2
(_,pb2) = calculatePullback a1 a2
k1k2ToG = a1 <&> pb2
l1l2ToG = m1 <&> pb1
-- | Checks independence between transformations via 3 pullbacks
cond1 :: (MAdhesive morph, FindMorphism morph, Complete morph) => Production morph -> Production morph -> (morph,morph) -> Bool
cond1 p1 p2 (m1,m2) = not (isIsomorphism a && isIsomorphism b)
where
(pb2,pb1) = calculatePullback m1 m2
(a,_) = calculatePullbackAlongM (leftMorphism p1) pb1
(b,_) = calculatePullbackAlongM (leftMorphism p2) pb2
| rodrigo-machado/verigraph | src/library/Analysis/ParallelIndependent.hs | gpl-3.0 | 2,408 | 0 | 12 | 479 | 630 | 338 | 292 | 35 | 4 |
module Lambda1 where
main :: Int
main = (\1 (x:xs) -> x) 1 [3]
| roberth/uu-helium | test/correct/Lambda1.hs | gpl-3.0 | 64 | 0 | 9 | 15 | 39 | 23 | 16 | 3 | 1 |
module Sound.Tidal.Utils where
{-
Utils.hs - A library of handy Haskell utility functions
Copyright (C) 2020, Alex McLean and contributors
This library is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this library. If not, see <http://www.gnu.org/licenses/>.
-}
import Data.List (delete)
import System.IO (hPutStrLn, stderr)
writeError :: String -> IO ()
writeError = hPutStrLn stderr
mapBoth :: (a -> a) -> (a,a) -> (a,a)
mapBoth f (a,b) = (f a, f b)
mapPartTimes :: (a -> a) -> ((a,a),(a,a)) -> ((a,a),(a,a))
mapPartTimes f = mapBoth (mapBoth f)
mapFst :: (a -> b) -> (a, c) -> (b, c)
mapFst f (x,y) = (f x,y)
mapSnd :: (a -> b) -> (c, a) -> (c, b)
mapSnd f (x,y) = (x,f y)
delta :: Num a => (a, a) -> a
delta (a,b) = b-a
-- | The midpoint of two values
mid :: Fractional a => (a,a) -> a
mid (a,b) = a + ((b - a) / 2)
removeCommon :: Eq a => [a] -> [a] -> ([a],[a])
removeCommon [] bs = ([],bs)
removeCommon as [] = (as,[])
removeCommon (a:as) bs | a `elem` bs = removeCommon as (delete a bs)
| otherwise = (a:as',bs')
where (as',bs') = removeCommon as bs
readMaybe :: (Read a) => String -> Maybe a
readMaybe s = case [x | (x,t) <- reads s, ("","") <- lex t] of
[x] -> Just x
_ -> Nothing
{- | like `!!` selects @n@th element from xs, but wraps over at the end of @xs@
>>> map ((!!!) [1,3,5]) [0,1,2,3,4,5]
[1,3,5,1,3,5]
-}
(!!!) :: [a] -> Int -> a
(!!!) xs n = xs !! (n `mod` length xs)
{- | Safer version of !! --}
nth :: Int -> [a] -> Maybe a
nth _ [] = Nothing
nth 0 (x : _) = Just x
nth n (_ : xs) = nth (n - 1) xs
accumulate :: Num t => [t] -> [t]
accumulate [] = []
accumulate (x:xs) = scanl (+) x xs
{- | enumerate a list of things
>>> enumerate ["foo","bar","baz"]
[(1,"foo"), (2,"bar"), (3,"baz")]
-}
enumerate :: [a] -> [(Int, a)]
enumerate = zip [0..]
{- | split given list of @a@ by given single a, e.g.
>>> wordsBy (== ':') "bd:3"
["bd", "3"]
-}
wordsBy :: (a -> Bool) -> [a] -> [[a]]
wordsBy p s = case dropWhile p s of
[] -> []
s':rest -> (s':w) : wordsBy p (drop 1 s'')
where (w, s'') = break p rest
matchMaybe :: Maybe a -> Maybe a -> Maybe a
matchMaybe Nothing y = y
matchMaybe x _ = x
-- Available in Data.Either, but only since 4.10
fromRight :: b -> Either a b -> b
fromRight _ (Right b) = b
fromRight b _ = b
| bgold-cosmos/Tidal | src/Sound/Tidal/Utils.hs | gpl-3.0 | 2,924 | 0 | 11 | 745 | 1,059 | 581 | 478 | 49 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Shelduck.LogParser where
import Control.Lens
import Data.Aeson
import Data.ByteString.Lazy
import Data.HashMap.Strict
import Data.Maybe
import qualified Data.Text as T
import Data.Text.Encoding
import Prelude hiding (lookup)
data LogLine = Data (HashMap T.Text Value) | UnParseable T.Text
data LogLineAction = PostRequestMade
| PostResponseReceived
| RoundTrip
| CorrectTopicReceived
| IncorrectTopicReceived
| UnknownAction T.Text
| Retry
| NoAction
| HttpExceptionDuringTest
instance Show LogLineAction where
show PostRequestMade = "post request made"
show PostResponseReceived = "post response received"
show RoundTrip = "finished waiting for webhook"
show CorrectTopicReceived = "correct topic received"
show IncorrectTopicReceived = "incorrect topic received"
show Retry = "retry performed"
show (UnknownAction b) = mconcat ["unknown action: ", show b]
show NoAction = "log line contained no action"
show HttpExceptionDuringTest = "http exception detected"
verb :: LogLine -> LogLineAction
verb (UnParseable b) = UnknownAction b
verb (Data l) = resolve logDictionary
where logDictionary = [ lookup "params" l *> pure PostRequestMade
, lookup "status" l *> pure PostResponseReceived
, lookup "duration" l *> pure RoundTrip
, lookup "good_topic" l *> pure CorrectTopicReceived
, lookup "bad_topic" l *> pure IncorrectTopicReceived
, lookup "retry" l *> pure Retry
, lookup "http_exception" l *> pure HttpExceptionDuringTest
]
resolve :: [Maybe LogLineAction] -> LogLineAction
resolve x = catMaybes x ^? ix 0 & fromMaybe NoAction
toLogLine :: T.Text -> LogLine
toLogLine x = case decode ((fromStrict . encodeUtf8) x) of
Nothing -> UnParseable x
(Just p) -> Data p
| bobjflong/scalpel | src/Shelduck/LogParser.hs | apache-2.0 | 2,190 | 0 | 10 | 716 | 474 | 247 | 227 | 46 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Ros.Sensor_msgs.CompressedImage where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Data.Vector.Storable as V
import qualified Data.Word as Word
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data CompressedImage = CompressedImage { _header :: Header.Header
, _format :: P.String
, __data :: V.Vector Word.Word8
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''CompressedImage)
instance RosBinary CompressedImage where
put obj' = put (_header obj') *> put (_format obj') *> put (__data obj')
get = CompressedImage <$> get <*> get <*> get
putMsg = putStampedMsg
instance HasHeader CompressedImage where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo CompressedImage where
sourceMD5 _ = "8f7a12909da2c9d3332d540a0977563f"
msgTypeName _ = "sensor_msgs/CompressedImage"
instance D.Default CompressedImage
| acowley/roshask | msgs/Sensor_msgs/Ros/Sensor_msgs/CompressedImage.hs | bsd-3-clause | 1,588 | 1 | 10 | 322 | 402 | 236 | 166 | 37 | 0 |
-- |
-- Module : Crypto.Hash.Tiger
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
-- Module containing the binding functions to work with the
-- Tiger cryptographic hash.
--
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeFamilies #-}
module Crypto.Hash.Tiger ( Tiger (..) ) where
import Crypto.Hash.Types
import Foreign.Ptr (Ptr)
import Data.Data
import Data.Word (Word8, Word32)
-- | Tiger cryptographic hash algorithm
data Tiger = Tiger
deriving (Show,Data)
instance HashAlgorithm Tiger where
type HashBlockSize Tiger = 64
type HashDigestSize Tiger = 24
type HashInternalContextSize Tiger = 96
hashBlockSize _ = 64
hashDigestSize _ = 24
hashInternalContextSize _ = 96
hashInternalInit = c_tiger_init
hashInternalUpdate = c_tiger_update
hashInternalFinalize = c_tiger_finalize
foreign import ccall unsafe "cryptonite_tiger_init"
c_tiger_init :: Ptr (Context a)-> IO ()
foreign import ccall "cryptonite_tiger_update"
c_tiger_update :: Ptr (Context a) -> Ptr Word8 -> Word32 -> IO ()
foreign import ccall unsafe "cryptonite_tiger_finalize"
c_tiger_finalize :: Ptr (Context a) -> Ptr (Digest a) -> IO ()
| vincenthz/cryptonite | Crypto/Hash/Tiger.hs | bsd-3-clause | 1,430 | 0 | 10 | 341 | 275 | 155 | 120 | 27 | 0 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Control/Monad/Trans/Resource.hs" #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE Safe #-}
-- | Allocate resources which are guaranteed to be released.
--
-- For more information, see <https://www.fpcomplete.com/user/snoyberg/library-documentation/resourcet>.
--
-- One point to note: all register cleanup actions live in the @IO@ monad, not
-- the main monad. This allows both more efficient code, and for monads to be
-- transformed.
module Control.Monad.Trans.Resource
( -- * Data types
ResourceT
, ResIO
, ReleaseKey
-- * Unwrap
, runResourceT
-- * Special actions
, resourceForkWith
, resourceForkIO
-- * Monad transformation
, transResourceT
, joinResourceT
-- * Registering/releasing
, allocate
, register
, release
, unprotect
, resourceMask
-- * Type class/associated types
, MonadResource (..)
, MonadResourceBase
-- ** Low-level
, InvalidAccess (..)
-- * Re-exports
, MonadBaseControl
-- * Internal state
-- $internalState
, InternalState
, getInternalState
, runInternalState
, withInternalState
, createInternalState
, closeInternalState
-- * Backwards compatibility
, ExceptionT (..)
, runExceptionT
, runExceptionT_
, runException
, runException_
, MonadThrow (..)
, monadThrow
) where
import qualified Data.IntMap as IntMap
import Control.Exception (SomeException, throw)
import Control.Monad.Trans.Control
( MonadBaseControl (..), liftBaseDiscard, control )
import qualified Data.IORef as I
import Control.Monad.Base (MonadBase, liftBase)
import Control.Applicative (Applicative (..))
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad (liftM)
import qualified Control.Exception as E
import Data.Monoid (Monoid)
import qualified Control.Exception.Lifted as L
import Control.Monad.Trans.Resource.Internal
import Control.Concurrent (ThreadId, forkIO)
import Data.Functor.Identity (Identity, runIdentity)
import Control.Monad.Catch (MonadThrow, throwM)
import Control.Monad.Catch.Pure (CatchT, runCatchT)
import Data.Acquire.Internal (ReleaseType (..))
-- | Register some action that will be called precisely once, either when
-- 'runResourceT' is called, or when the 'ReleaseKey' is passed to 'release'.
--
-- Since 0.3.0
register :: MonadResource m => IO () -> m ReleaseKey
register = liftResourceT . registerRIO
-- | Call a release action early, and deregister it from the list of cleanup
-- actions to be performed.
--
-- Since 0.3.0
release :: MonadIO m => ReleaseKey -> m ()
release (ReleaseKey istate rk) = liftIO $ release' istate rk
(maybe (return ()) id)
-- | Unprotect resource from cleanup actions, this allowes you to send
-- resource into another resourcet process and reregister it there.
-- It returns an release action that should be run in order to clean
-- resource or Nothing in case if resource is already freed.
--
-- Since 0.4.5
unprotect :: MonadIO m => ReleaseKey -> m (Maybe (IO ()))
unprotect (ReleaseKey istate rk) = liftIO $ release' istate rk return
-- | Perform some allocation, and automatically register a cleanup action.
--
-- This is almost identical to calling the allocation and then
-- @register@ing the release action, but this properly handles masking of
-- asynchronous exceptions.
--
-- Since 0.3.0
allocate :: MonadResource m
=> IO a -- ^ allocate
-> (a -> IO ()) -- ^ free resource
-> m (ReleaseKey, a)
allocate a = liftResourceT . allocateRIO a
-- | Perform asynchronous exception masking.
--
-- This is more general then @Control.Exception.mask@, yet more efficient
-- than @Control.Exception.Lifted.mask@.
--
-- Since 0.3.0
resourceMask :: MonadResource m => ((forall a. ResourceT IO a -> ResourceT IO a) -> ResourceT IO b) -> m b
resourceMask r = liftResourceT (resourceMaskRIO r)
allocateRIO :: IO a -> (a -> IO ()) -> ResourceT IO (ReleaseKey, a)
allocateRIO acquire rel = ResourceT $ \istate -> liftIO $ E.mask_ $ do
a <- acquire
key <- register' istate $ rel a
return (key, a)
registerRIO :: IO () -> ResourceT IO ReleaseKey
registerRIO rel = ResourceT $ \istate -> liftIO $ register' istate rel
resourceMaskRIO :: ((forall a. ResourceT IO a -> ResourceT IO a) -> ResourceT IO b) -> ResourceT IO b
resourceMaskRIO f = ResourceT $ \istate -> liftIO $ E.mask $ \restore ->
let ResourceT f' = f (go restore)
in f' istate
where
go :: (forall a. IO a -> IO a) -> (forall a. ResourceT IO a -> ResourceT IO a)
go r (ResourceT g) = ResourceT (\i -> r (g i))
release' :: I.IORef ReleaseMap
-> Int
-> (Maybe (IO ()) -> IO a)
-> IO a
release' istate key act = E.mask_ $ do
maction <- I.atomicModifyIORef istate lookupAction
act maction
where
lookupAction rm@(ReleaseMap next rf m) =
case IntMap.lookup key m of
Nothing -> (rm, Nothing)
Just action ->
( ReleaseMap next rf $ IntMap.delete key m
, Just (action ReleaseEarly)
)
-- We tried to call release, but since the state is already closed, we
-- can assume that the release action was already called. Previously,
-- this threw an exception, though given that @release@ can be called
-- from outside the context of a @ResourceT@ starting with version
-- 0.4.4, it's no longer a library misuse or a library bug.
lookupAction ReleaseMapClosed = (ReleaseMapClosed, Nothing)
-- | Unwrap a 'ResourceT' transformer, and call all registered release actions.
--
-- Note that there is some reference counting involved due to 'resourceForkIO'.
-- If multiple threads are sharing the same collection of resources, only the
-- last call to @runResourceT@ will deallocate the resources.
--
-- Since 0.3.0
runResourceT :: MonadBaseControl IO m => ResourceT m a -> m a
runResourceT (ResourceT r) = control $ \run -> do
istate <- createInternalState
E.mask $ \restore -> do
res <- restore (run (r istate)) `E.onException`
stateCleanup ReleaseException istate
stateCleanup ReleaseNormal istate
return res
bracket_ :: MonadBaseControl IO m
=> IO () -- ^ allocate
-> IO () -- ^ normal cleanup
-> IO () -- ^ exceptional cleanup
-> m a
-> m a
bracket_ alloc cleanupNormal cleanupExc inside =
control $ \run -> E.mask $ \restore -> do
alloc
res <- restore (run inside) `E.onException` cleanupExc
cleanupNormal
return res
finally :: MonadBaseControl IO m => m a -> IO () -> m a
finally action cleanup =
control $ \run -> E.finally (run action) cleanup
-- | This function mirrors @join@ at the transformer level: it will collapse
-- two levels of @ResourceT@ into a single @ResourceT@.
--
-- Since 0.4.6
joinResourceT :: ResourceT (ResourceT m) a
-> ResourceT m a
joinResourceT (ResourceT f) = ResourceT $ \r -> unResourceT (f r) r
-- | For backwards compatibility.
type ExceptionT = CatchT
-- | For backwards compatibility.
runExceptionT :: ExceptionT m a -> m (Either SomeException a)
runExceptionT = runCatchT
-- | Same as 'runExceptionT', but immediately 'E.throw' any exception returned.
--
-- Since 0.3.0
runExceptionT_ :: Monad m => ExceptionT m a -> m a
runExceptionT_ = liftM (either E.throw id) . runExceptionT
-- | Run an @ExceptionT Identity@ stack.
--
-- Since 0.4.2
runException :: ExceptionT Identity a -> Either SomeException a
runException = runIdentity . runExceptionT
-- | Run an @ExceptionT Identity@ stack, but immediately 'E.throw' any exception returned.
--
-- Since 0.4.2
runException_ :: ExceptionT Identity a -> a
runException_ = runIdentity . runExceptionT_
-- | Introduce a reference-counting scheme to allow a resource context to be
-- shared by multiple threads. Once the last thread exits, all remaining
-- resources will be released.
--
-- The first parameter is a function which will be used to create the
-- thread, such as @forkIO@ or @async@.
--
-- Note that abuse of this function will greatly delay the deallocation of
-- registered resources. This function should be used with care. A general
-- guideline:
--
-- If you are allocating a resource that should be shared by multiple threads,
-- and will be held for a long time, you should allocate it at the beginning of
-- a new @ResourceT@ block and then call @resourceForkWith@ from there.
--
-- @since 1.1.9
resourceForkWith :: MonadBaseControl IO m => (IO () -> IO a) -> ResourceT m () -> ResourceT m a
resourceForkWith g (ResourceT f) = ResourceT $ \r -> L.mask $ \restore ->
-- We need to make sure the counter is incremented before this call
-- returns. Otherwise, the parent thread may call runResourceT before
-- the child thread increments, and all resources will be freed
-- before the child gets called.
bracket_
(stateAlloc r)
(return ())
(return ())
(liftBaseDiscard g $ bracket_
(return ())
(stateCleanup ReleaseNormal r)
(stateCleanup ReleaseException r)
(restore $ f r))
-- | Launch a new reference counted resource context using @forkIO@.
--
-- This is defined as @resourceForkWith forkIO@.
--
-- @since 0.3.0
resourceForkIO :: MonadBaseControl IO m => ResourceT m () -> ResourceT m ThreadId
resourceForkIO = resourceForkWith forkIO
-- | A @Monad@ which can be used as a base for a @ResourceT@.
--
-- A @ResourceT@ has some restrictions on its base monad:
--
-- * @runResourceT@ requires an instance of @MonadBaseControl IO@.
-- * @MonadResource@ requires an instance of @MonadThrow@, @MonadIO@, and @Applicative@.
--
-- While any instance of @MonadBaseControl IO@ should be an instance of the
-- other classes, this is not guaranteed by the type system (e.g., you may have
-- a transformer in your stack with does not implement @MonadThrow@). Ideally,
-- we would like to simply create an alias for the five type classes listed,
-- but this is not possible with GHC currently.
--
-- Instead, this typeclass acts as a proxy for the other five. Its only purpose
-- is to make your type signatures shorter.
--
-- Note that earlier versions of @conduit@ had a typeclass @ResourceIO@. This
-- fulfills much the same role.
--
-- Since 0.3.2
type MonadResourceBase m = (MonadBaseControl IO m, MonadThrow m, MonadBase IO m, MonadIO m, Applicative m)
-- $internalState
--
-- A @ResourceT@ internally is a modified @ReaderT@ monad transformer holding
-- onto a mutable reference to all of the release actions still remaining to be
-- performed. If you are building up a custom application monad, it may be more
-- efficient to embed this @ReaderT@ functionality directly in your own monad
-- instead of wrapping around @ResourceT@ itself. This section provides you the
-- means of doing so.
-- | Create a new internal state. This state must be closed with
-- @closeInternalState@. It is your responsibility to ensure exception safety.
-- Caveat emptor!
--
-- Since 0.4.9
createInternalState :: MonadBase IO m => m InternalState
createInternalState = liftBase
$ I.newIORef
$ ReleaseMap maxBound (minBound + 1) IntMap.empty
-- | Close an internal state created by @createInternalState@.
--
-- Since 0.4.9
closeInternalState :: MonadBase IO m => InternalState -> m ()
closeInternalState = liftBase . stateCleanup ReleaseNormal
-- | Get the internal state of the current @ResourceT@.
--
-- Since 0.4.6
getInternalState :: Monad m => ResourceT m InternalState
getInternalState = ResourceT return
-- | The internal state held by a @ResourceT@ transformer.
--
-- Since 0.4.6
type InternalState = I.IORef ReleaseMap
-- | Unwrap a @ResourceT@ using the given @InternalState@.
--
-- Since 0.4.6
runInternalState :: ResourceT m a -> InternalState -> m a
runInternalState = unResourceT
-- | Run an action in the underlying monad, providing it the @InternalState@.
--
-- Since 0.4.6
withInternalState :: (InternalState -> m a) -> ResourceT m a
withInternalState = ResourceT
-- | Backwards compatibility
monadThrow :: (E.Exception e, MonadThrow m) => e -> m a
monadThrow = throwM
| phischu/fragnix | tests/packages/scotty/Control.Monad.Trans.Resource.hs | bsd-3-clause | 12,558 | 0 | 21 | 2,669 | 2,264 | 1,249 | 1,015 | 167 | 3 |
{-# LANGUAGE DeriveGeneric #-}
module Tinc.Config (
getAdditionalDependencies
, configFile
) where
import GHC.Generics
import Hpack.Config
import Hpack.Yaml
import System.Directory
import Tinc.Fail
data Config = Config {
configDependencies :: Dependencies
} deriving (Eq, Show, Generic)
instance FromValue Config
configFile :: FilePath
configFile = "tinc.yaml"
getAdditionalDependencies :: IO Dependencies
getAdditionalDependencies = do
exists <- doesFileExist configFile
if exists
then readConfig
else return mempty
readConfig :: IO Dependencies
readConfig = decodeYaml configFile >>= either die (return . configDependencies . fst) . (>>= decodeValue . snd)
| sol/tinc | src/Tinc/Config.hs | mit | 732 | 0 | 10 | 156 | 172 | 94 | 78 | 23 | 2 |
module System.Mesos.Raw.ExecutorId where
import System.Mesos.Internal
type ExecutorIDPtr = Ptr ExecutorID
foreign import ccall unsafe "ext/types.h toExecutorID" c_toExecutorID :: ToID ExecutorIDPtr
foreign import ccall unsafe "ext/types.h fromExecutorID" c_fromExecutorID :: FromID ExecutorIDPtr
foreign import ccall unsafe "ext/types.h destroyExecutorID" c_destroyExecutorID :: ExecutorIDPtr -> IO ()
instance CPPValue ExecutorID where
marshal x = do
(strp, l) <- cstring $ executorIDId' x
liftIO $ c_toExecutorID strp $ fromIntegral l
unmarshal p = fmap ExecutorID $ do
ptrPtr <- alloc
len <- liftIO $ c_fromExecutorID p ptrPtr
peekCString' (ptrPtr, len)
destroy = c_destroyExecutorID
| Atidot/hs-mesos | src/System/Mesos/Raw/ExecutorId.hs | mit | 730 | 2 | 12 | 130 | 190 | 96 | 94 | 15 | 0 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2015 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE Safe #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE DeriveGeneric #-}
module Cryptol.Utils.PP where
import Cryptol.ModuleSystem.Name
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import qualified Data.Monoid as M
import Data.String (IsString(..))
import qualified Text.PrettyPrint as PJ
import GHC.Generics (Generic)
import Control.DeepSeq
-- | How to display names.
newtype NameEnv = NameEnv (Map QName NameInfo)
deriving (Show)
data NameInfo = NameInfo { niDisp :: QName
, niInfix :: Bool
} deriving (Show)
mkNameEnv :: [(QName,NameInfo)] -> NameEnv
mkNameEnv = NameEnv . Map.fromList
-- | Compose two naming environments.
extend :: NameEnv -> NameEnv -> NameEnv
extend (NameEnv l) (NameEnv r) = NameEnv (lkp `fmap` l)
where
lkp ni = Map.findWithDefault ni (niDisp ni) r
getNameInfo :: QName -> NameEnv -> NameInfo
getNameInfo n (NameEnv e) = Map.findWithDefault (NameInfo n False) n e
instance M.Monoid NameEnv where
mempty = NameEnv Map.empty
mappend (NameEnv a) (NameEnv b) = NameEnv (Map.union a b)
newtype Doc = Doc (NameEnv -> PJ.Doc) deriving (Generic)
instance NFData Doc
runDoc :: NameEnv -> Doc -> PJ.Doc
runDoc names (Doc f) = f names
instance Show Doc where
show d = show (runDoc M.mempty d)
instance IsString Doc where
fromString = text
render :: Doc -> String
render d = PJ.render (runDoc M.mempty d)
class PP a where
ppPrec :: Int -> a -> Doc
pp :: PP a => a -> Doc
pp = ppPrec 0
pretty :: PP a => a -> String
pretty = show . pp
optParens :: Bool -> Doc -> Doc
optParens b body | b = parens body
| otherwise = body
-- | Information about associativity.
data Assoc = LeftAssoc | RightAssoc | NonAssoc
deriving (Show,Eq,Generic)
instance NFData Assoc
-- | Information about an infix expression of some sort.
data Infix op thing = Infix
{ ieOp :: op -- ^ operator
, ieLeft :: thing -- ^ left argument
, ieRight :: thing -- ^ right argumrnt
, iePrec :: Int -- ^ operator precedence
, ieAssoc :: Assoc -- ^ operator associativity
}
commaSep :: [Doc] -> Doc
commaSep = fsep . punctuate comma
-- | Pretty print an infix expression of some sort.
ppInfix :: (PP thing, PP op)
=> Int -- ^ Non-infix leaves are printed with this precedence
-> (thing -> Maybe (Infix op thing))
-- ^ pattern to check if sub-thing is also infix
-> Infix op thing -- ^ Pretty print this infix expression
-> Doc
ppInfix lp isInfix expr =
sep [ ppSub (wrapSub LeftAssoc ) (ieLeft expr) <+> pp (ieOp expr)
, ppSub (wrapSub RightAssoc) (ieRight expr) ]
where
wrapSub dir p = p < iePrec expr || p == iePrec expr && ieAssoc expr /= dir
ppSub w e
| Just e1 <- isInfix e = optParens (w (iePrec e1)) (ppInfix lp isInfix e1)
ppSub _ e = ppPrec lp e
-- | Display a numeric values as an ordinar (e.g., 2nd)
ordinal :: (Integral a, Show a, Eq a) => a -> Doc
ordinal x = text (show x) <> text (ordSuffix x)
-- | The suffix to use when displaying a number as an oridinal
ordSuffix :: (Integral a, Eq a) => a -> String
ordSuffix n0 =
case n `mod` 10 of
1 | notTeen -> "st"
2 | notTeen -> "nd"
3 | notTeen -> "rd"
_ -> "th"
where
n = abs n0
m = n `mod` 100
notTeen = m < 11 || m > 19
-- Wrapped Combinators ---------------------------------------------------------
liftPJ :: PJ.Doc -> Doc
liftPJ d = Doc (const d)
liftPJ1 :: (PJ.Doc -> PJ.Doc) -> Doc -> Doc
liftPJ1 f (Doc d) = Doc (\env -> f (d env))
liftPJ2 :: (PJ.Doc -> PJ.Doc -> PJ.Doc) -> (Doc -> Doc -> Doc)
liftPJ2 f (Doc a) (Doc b) = Doc (\e -> f (a e) (b e))
liftSep :: ([PJ.Doc] -> PJ.Doc) -> ([Doc] -> Doc)
liftSep f ds = Doc (\e -> f [ d e | Doc d <- ds ])
(<>) :: Doc -> Doc -> Doc
(<>) = liftPJ2 (PJ.<>)
(<+>) :: Doc -> Doc -> Doc
(<+>) = liftPJ2 (PJ.<+>)
($$) :: Doc -> Doc -> Doc
($$) = liftPJ2 (PJ.$$)
sep :: [Doc] -> Doc
sep = liftSep PJ.sep
fsep :: [Doc] -> Doc
fsep = liftSep PJ.fsep
hsep :: [Doc] -> Doc
hsep = liftSep PJ.hsep
hcat :: [Doc] -> Doc
hcat = liftSep PJ.hcat
vcat :: [Doc] -> Doc
vcat = liftSep PJ.vcat
hang :: Doc -> Int -> Doc -> Doc
hang (Doc p) i (Doc q) = Doc (\e -> PJ.hang (p e) i (q e))
nest :: Int -> Doc -> Doc
nest n = liftPJ1 (PJ.nest n)
parens :: Doc -> Doc
parens = liftPJ1 PJ.parens
braces :: Doc -> Doc
braces = liftPJ1 PJ.braces
brackets :: Doc -> Doc
brackets = liftPJ1 PJ.brackets
quotes :: Doc -> Doc
quotes = liftPJ1 PJ.quotes
punctuate :: Doc -> [Doc] -> [Doc]
punctuate p = go
where
go (d:ds) | null ds = [d]
| otherwise = d <> p : go ds
go [] = []
text :: String -> Doc
text s = liftPJ (PJ.text s)
char :: Char -> Doc
char c = liftPJ (PJ.char c)
integer :: Integer -> Doc
integer i = liftPJ (PJ.integer i)
int :: Int -> Doc
int i = liftPJ (PJ.int i)
comma :: Doc
comma = liftPJ PJ.comma
empty :: Doc
empty = liftPJ PJ.empty
colon :: Doc
colon = liftPJ PJ.colon
-- Names -----------------------------------------------------------------------
withNameEnv :: (NameEnv -> Doc) -> Doc
withNameEnv f = Doc (\e -> runDoc e (f e))
fixNameEnv :: NameEnv -> Doc -> Doc
fixNameEnv env (Doc f) = Doc (\_ -> f env)
instance PP ModName where
ppPrec _ (ModName ns) = hcat (punctuate (text "::") (map (text . unpack) ns))
instance PP QName where
ppPrec _ qn = withNameEnv $ \ names ->
let NameInfo (QName mb n) isInfix = getNameInfo qn names
mbNs = maybe empty (\ mn -> pp mn <> text "::") mb
in optParens isInfix (mbNs <> pp n)
instance PP Name where
ppPrec _ (Name x) = text (unpack x)
-- XXX: This may clash with user-specified names.
ppPrec _ (NewName p x) = text "__" <> passName p <> int x
-- | Pretty-print the qualified name as-is; don't consult the environment.
ppQName :: QName -> Doc
ppQName (QName mb n) = maybe empty (\ mn -> pp mn <> text "::") mb <> pp n
passName :: Pass -> Doc
passName pass =
case pass of
NoPat -> text "p"
MonoValues -> text "mv"
| beni55/cryptol | src/Cryptol/Utils/PP.hs | bsd-3-clause | 6,389 | 0 | 16 | 1,677 | 2,410 | 1,267 | 1,143 | 157 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
module Data.IP.Range where
import Control.Monad
import Data.Bits
import Data.Data (Data)
import Data.IP.Addr
import Data.IP.Mask
import Data.String
import Data.Typeable (Typeable)
import Text.Appar.String
import GHC.Generics
----------------------------------------------------------------
{-|
A unified data for 'AddrRange' 'IPv4' and 'AddrRange' 'IPv6'.
To create this, use 'read' @\"192.0.2.0/24\"@ :: 'IPRange'.
Also, @\"192.0.2.0/24\"@ can be used as literal with OverloadedStrings.
>>> (read "192.0.2.1/24" :: IPRange) == IPv4Range (read "192.0.2.0/24" :: AddrRange IPv4)
True
>>> (read "2001:db8:00:00:00:00:00:01/48" :: IPRange) == IPv6Range (read "2001:db8:00:00:00:00:00:01/48" :: AddrRange IPv6)
True
-}
data IPRange = IPv4Range { ipv4range :: AddrRange IPv4 }
| IPv6Range { ipv6range :: AddrRange IPv6 }
deriving (Eq, Ord, Data, Generic, Typeable)
----------------------------------------------------------------
--
-- Range
--
{-|
The Addr range consists of an address, a contiguous mask,
and mask length. The contiguous mask and the mask length
are essentially same information but contained for pre
calculation.
To create this, use 'makeAddrRange' or 'read' @\"192.0.2.0/24\"@ :: 'AddrRange' 'IPv4'.
Also, @\"192.0.2.0/24\"@ can be used as literal with OverloadedStrings.
>>> read "192.0.2.1/24" :: AddrRange IPv4
192.0.2.0/24
>>> read "2001:db8:00:00:00:00:00:01/48" :: AddrRange IPv6
2001:db8::/48
-}
data AddrRange a = AddrRange {
-- |The 'addr' function returns an address from 'AddrRange'.
addr :: !a
-- |The 'mask' function returns a contiguous 'IP' mask from 'AddrRange'.
, mask :: !a
-- |The 'mlen' function returns a mask length from 'AddrRange'.
, mlen :: {-# UNPACK #-} !Int
}
deriving (Eq, Ord, Data, Generic, Typeable)
----------------------------------------------------------------
--
-- Show
--
instance Show a => Show (AddrRange a) where
show x = show (addr x) ++ "/" ++ show (mlen x)
instance Show IPRange where
show (IPv4Range ip) = show ip
show (IPv6Range ip) = show ip
----------------------------------------------------------------
--
-- Read
--
instance Read IPRange where
readsPrec _ = parseIPRange
parseIPRange :: String -> [(IPRange,String)]
parseIPRange cs = case runParser ip4range cs of
(Just ip,rest) -> [(IPv4Range ip,rest)]
(Nothing,_) -> case runParser ip6range cs of
(Just ip,rest) -> [(IPv6Range ip,rest)]
(Nothing,_) -> []
instance Read (AddrRange IPv4) where
readsPrec _ = parseIPv4Range
instance Read (AddrRange IPv6) where
readsPrec _ = parseIPv6Range
parseIPv4Range :: String -> [(AddrRange IPv4,String)]
parseIPv4Range cs = case runParser ip4range cs of
(Nothing,_) -> []
(Just a4,rest) -> [(a4,rest)]
parseIPv6Range :: String -> [(AddrRange IPv6,String)]
parseIPv6Range cs = case runParser ip6range cs of
(Nothing,_) -> []
(Just a6,rest) -> [(a6,rest)]
ip4range :: Parser (AddrRange IPv4)
ip4range = do
ip <- ip4
len <- option 32 $ char '/' >> dig
check len
let msk = maskIPv4 len
adr = ip `maskedIPv4` msk
return $ AddrRange adr msk len
where
check len = when (len < 0 || 32 < len) (fail "IPv4 mask length")
maskedIPv4 :: IPv4 -> IPv4 -> IPv4
IP4 a `maskedIPv4` IP4 m = IP4 (a .&. m)
ip6range :: Parser (AddrRange IPv6)
ip6range = do
ip <- ip6
len <- option 128 $ char '/' >> dig
check len
let msk = maskIPv6 len
adr = ip `maskedIPv6` msk
return $ AddrRange adr msk len
where
check len = when (len < 0 || 128 < len) (fail ("IPv6 mask length: " ++ show len))
maskedIPv6 :: IPv6 -> IPv6 -> IPv6
IP6 (a1,a2,a3,a4) `maskedIPv6` IP6 (m1,m2,m3,m4) = IP6 (a1.&.m1,a2.&.m2,a3.&.m3,a4.&.m4)
----------------------------------------------------------------
--
-- IsString
--
instance IsString IPRange where
fromString = read
instance IsString (AddrRange IPv4) where
fromString = read
instance IsString (AddrRange IPv6) where
fromString = read
| kazu-yamamoto/iproute | Data/IP/Range.hs | bsd-3-clause | 4,173 | 0 | 13 | 839 | 1,075 | 577 | 498 | 78 | 3 |
{-#LANGUAGE OverloadedStrings#-}
import Shelly
import Prelude hiding (FilePath)
import System.Environment
import Data.String
configure = command_ "cabal" ["configure"] . map fromString
haddock = command_ "cabal" ["haddock"] []
moveToTemp = command_ "mv" ["-f","dist/doc","/tmp"] []
cleanTmp = command_ "rm" ["-rf", "/tmp/doc"] []
moveFromTemp = command_ "cp" ["-rf","/tmp/doc","dist/"] []
checkout a = command_ "git" ["checkout",a] []
add = command_ "git" ["add","dist/doc/*"] []
commit = command_ "git" ["commit","-am 'documentation updated'"] []
main = do
as <- getArgs
shelly $ verbosely $ do
echo "Running haddock"
silently $ configure as
haddock
cleanTmp
moveToTemp
silently $ checkout "gh-pages"
moveFromTemp
add
commit
echo "documentation updated. You can now push `gh-pages` branch to github"
silently $ checkout "VT/development"
| TomMD/CV | makedocs.hs | bsd-3-clause | 968 | 0 | 11 | 239 | 273 | 138 | 135 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Yesod.Form.I18n.Russian where
import Yesod.Form.Types (FormMessage (..))
import Data.Monoid (mappend)
import Data.Text (Text)
russianFormMessage :: FormMessage -> Text
russianFormMessage (MsgInvalidInteger t) = "Неверно записано целое число: " `Data.Monoid.mappend` t
russianFormMessage (MsgInvalidNumber t) = "Неверный формат числа: " `mappend` t
russianFormMessage (MsgInvalidEntry t) = "Неверный выбор: " `mappend` t
russianFormMessage MsgInvalidTimeFormat = "Неверно указано время, используйте формат ЧЧ:ММ[:СС]"
russianFormMessage MsgInvalidDay = "Неверно указана дата, используйте формат ГГГГ-ММ-ДД"
russianFormMessage (MsgInvalidUrl t) = "Неверно указан URL адрес: " `mappend` t
russianFormMessage (MsgInvalidEmail t) = "Неверно указана электронная почта: " `mappend` t
russianFormMessage (MsgInvalidHour t) = "Неверно указан час: " `mappend` t
russianFormMessage (MsgInvalidMinute t) = "Неверно указаны минуты: " `mappend` t
russianFormMessage (MsgInvalidSecond t) = "Неверно указаны секунды: " `mappend` t
russianFormMessage MsgCsrfWarning = "Для защиты от межсайтовой подделки запросов (CSRF), пожалуйста, подтвердите отправку данных формы."
russianFormMessage MsgValueRequired = "Обязательно к заполнению"
russianFormMessage (MsgInputNotFound t) = "Поле не найдено: " `mappend` t
russianFormMessage MsgSelectNone = "<Не выбрано>"
russianFormMessage (MsgInvalidBool t) = "Неверное логическое значение: " `mappend` t
russianFormMessage MsgBoolYes = "Да"
russianFormMessage MsgBoolNo = "Нет"
russianFormMessage MsgDelete = "Удалить?"
| psibi/yesod | yesod-form/Yesod/Form/I18n/Russian.hs | mit | 1,993 | 0 | 7 | 182 | 320 | 178 | 142 | 24 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE GADTs #-}
#if __GLASGOW_HASKELL__ >= 707
{-# LANGUAGE DeriveDataTypeable #-}
#endif
{-# OPTIONS_GHC -Wall #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Applicative.Trans.Free
-- Copyright : (C) 2012-2013 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : provisional
-- Portability : GADTs, Rank2Types
--
-- 'Applicative' functor transformers for free
----------------------------------------------------------------------------
module Control.Applicative.Trans.Free
(
-- | Compared to the free monad transformers, they are less expressive. However, they are also more
-- flexible to inspect and interpret, as the number of ways in which
-- the values can be nested is more limited.
--
-- See <http://paolocapriotti.com/assets/applicative.pdf Free Applicative Functors>,
-- by Paolo Capriotti and Ambrus Kaposi, for some applications.
ApT(..)
, ApF(..)
, liftApT
, liftApO
, runApT
, runApF
, runApT_
, hoistApT
, hoistApF
, transApT
, transApF
, joinApT
-- * Free Applicative
, Ap
, runAp
, runAp_
, retractAp
-- * Free Alternative
, Alt
, runAlt
) where
import Control.Applicative
import Control.Monad (liftM)
import Data.Functor.Apply
import Data.Functor.Identity
import Data.Typeable
#if !(MIN_VERSION_base(4,8,0))
import Data.Monoid (Monoid)
#endif
import qualified Data.Foldable as F
-- | The free 'Applicative' for a 'Functor' @f@.
data ApF f g a where
Pure :: a -> ApF f g a
Ap :: f a -> ApT f g (a -> b) -> ApF f g b
#if __GLASGOW_HASKELL__ >= 707
deriving Typeable
#endif
-- | The free 'Applicative' transformer for a 'Functor' @f@ over
-- 'Applicative' @g@.
newtype ApT f g a = ApT { getApT :: g (ApF f g a) }
#if __GLASGOW_HASKELL__ >= 707
deriving Typeable
#endif
instance Functor g => Functor (ApF f g) where
fmap f (Pure a) = Pure (f a)
fmap f (Ap x g) = x `Ap` fmap (f .) g
instance Functor g => Functor (ApT f g) where
fmap f (ApT g) = ApT (fmap f <$> g)
instance Applicative g => Applicative (ApF f g) where
pure = Pure
{-# INLINE pure #-}
Pure f <*> y = fmap f y -- fmap
y <*> Pure a = fmap ($ a) y -- interchange
Ap a f <*> b = a `Ap` (flip <$> f <*> ApT (pure b))
{-# INLINE (<*>) #-}
instance Applicative g => Applicative (ApT f g) where
pure = ApT . pure . pure
{-# INLINE pure #-}
ApT xs <*> ApT ys = ApT ((<*>) <$> xs <*> ys)
{-# INLINE (<*>) #-}
instance Applicative g => Apply (ApF f g) where
(<.>) = (<*>)
{-# INLINE (<.>) #-}
instance Applicative g => Apply (ApT f g) where
(<.>) = (<*>)
{-# INLINE (<.>) #-}
instance Alternative g => Alternative (ApT f g) where
empty = ApT empty
{-# INLINE empty #-}
ApT g <|> ApT h = ApT (g <|> h)
{-# INLINE (<|>) #-}
-- | A version of 'lift' that can be used with no constraint for @f@.
liftApT :: Applicative g => f a -> ApT f g a
liftApT x = ApT (pure (Ap x (pure id)))
-- | Lift an action of the \"outer\" 'Functor' @g a@ to @'ApT' f g a@.
liftApO :: Functor g => g a -> ApT f g a
liftApO g = ApT (Pure <$> g)
-- | Given natural transformations @f ~> h@ and @g . h ~> h@ this gives
-- a natural transformation @ApF f g ~> h@.
runApF :: (Applicative h, Functor g) => (forall a. f a -> h a) -> (forall a. g (h a) -> h a) -> ApF f g b -> h b
runApF _ _ (Pure x) = pure x
runApF f g (Ap x y) = f x <**> runApT f g y
-- | Given natural transformations @f ~> h@ and @g . h ~> h@ this gives
-- a natural transformation @ApT f g ~> h@.
runApT :: (Applicative h, Functor g) => (forall a. f a -> h a) -> (forall a. g (h a) -> h a) -> ApT f g b -> h b
runApT f g (ApT a) = g (runApF f g <$> a)
-- | Perform a monoidal analysis over @'ApT' f g b@ value.
--
-- Examples:
--
-- @
-- height :: ('Functor' g, 'F.Foldable' g) => 'ApT' f g a -> 'Int'
-- height = 'getSum' . runApT_ (\_ -> 'Sum' 1) 'F.maximum'
-- @
--
-- @
-- size :: ('Functor' g, 'F.Foldable' g) => 'ApT' f g a -> 'Int'
-- size = 'getSum' . runApT_ (\_ -> 'Sum' 1) 'F.fold'
-- @
runApT_ :: (Functor g, Monoid m) => (forall a. f a -> m) -> (g m -> m) -> ApT f g b -> m
runApT_ f g = getConst . runApT (Const . f) (Const . g . fmap getConst)
-- | Given a natural transformation from @f@ to @f'@ this gives a monoidal natural transformation from @ApF f g@ to @ApF f' g@.
hoistApF :: Functor g => (forall a. f a -> f' a) -> ApF f g b -> ApF f' g b
hoistApF _ (Pure x) = Pure x
hoistApF f (Ap x y) = f x `Ap` hoistApT f y
-- | Given a natural transformation from @f@ to @f'@ this gives a monoidal natural transformation from @ApT f g@ to @ApT f' g@.
hoistApT :: Functor g => (forall a. f a -> f' a) -> ApT f g b -> ApT f' g b
hoistApT f (ApT g) = ApT (hoistApF f <$> g)
-- | Given a natural transformation from @g@ to @g'@ this gives a monoidal natural transformation from @ApF f g@ to @ApF f g'@.
transApF :: Functor g => (forall a. g a -> g' a) -> ApF f g b -> ApF f g' b
transApF _ (Pure x) = Pure x
transApF f (Ap x y) = x `Ap` transApT f y
-- | Given a natural transformation from @g@ to @g'@ this gives a monoidal natural transformation from @ApT f g@ to @ApT f g'@.
transApT :: Functor g => (forall a. g a -> g' a) -> ApT f g b -> ApT f g' b
transApT f (ApT g) = ApT $ f (transApF f <$> g)
-- | Pull out and join @m@ layers of @'ApT' f m a@.
joinApT :: Monad m => ApT f m a -> m (Ap f a)
joinApT (ApT m) = m >>= joinApF
where
joinApF (Pure x) = return (pure x)
joinApF (Ap x y) = (liftApT x <**>) `liftM` joinApT y
-- | The free 'Applicative' for a 'Functor' @f@.
type Ap f = ApT f Identity
-- | Given a natural transformation from @f@ to @g@, this gives a canonical monoidal natural transformation from @'Ap' f@ to @g@.
--
-- prop> runAp t == retractApp . hoistApp t
runAp :: Applicative g => (forall x. f x -> g x) -> Ap f a -> g a
runAp f = runApT f runIdentity
-- | Perform a monoidal analysis over free applicative value.
--
-- Example:
--
-- @
-- count :: 'Ap' f a -> 'Int'
-- count = 'getSum' . runAp_ (\\_ -> 'Sum' 1)
-- @
runAp_ :: Monoid m => (forall x. f x -> m) -> Ap f a -> m
runAp_ f = runApT_ f runIdentity
-- | Interprets the free applicative functor over f using the semantics for
-- `pure` and `<*>` given by the Applicative instance for f.
--
-- prop> retractApp == runAp id
retractAp :: Applicative f => Ap f a -> f a
retractAp = runAp id
-- | The free 'Alternative' for a 'Functor' @f@.
type Alt f = ApT f []
-- | Given a natural transformation from @f@ to @g@, this gives a canonical monoidal natural transformation from @'Alt' f@ to @g@.
runAlt :: (Alternative g, F.Foldable t) => (forall x. f x -> g x) -> ApT f t a -> g a
runAlt f (ApT xs) = F.foldr (\x acc -> h x <|> acc) empty xs
where
h (Pure x) = pure x
h (Ap x g) = f x <**> runAlt f g
#if __GLASGOW_HASKELL__ < 707
instance (Typeable1 f, Typeable1 g) => Typeable1 (ApT f g) where
typeOf1 t = mkTyConApp apTTyCon [typeOf1 (f t)] where
f :: ApT f g a -> g (f a)
f = undefined
instance (Typeable1 f, Typeable1 g) => Typeable1 (ApF f g) where
typeOf1 t = mkTyConApp apFTyCon [typeOf1 (f t)] where
f :: ApF f g a -> g (f a)
f = undefined
apTTyCon, apFTyCon :: TyCon
#if __GLASGOW_HASKELL__ < 704
apTTyCon = mkTyCon "Control.Applicative.Trans.Free.ApT"
apFTyCon = mkTyCon "Control.Applicative.Trans.Free.ApF"
#else
apTTyCon = mkTyCon3 "free" "Control.Applicative.Trans.Free" "ApT"
apFTyCon = mkTyCon3 "free" "Control.Applicative.Trans.Free" "ApF"
#endif
{-# NOINLINE apTTyCon #-}
{-# NOINLINE apFTyCon #-}
#endif
| dalaing/free | src/Control/Applicative/Trans/Free.hs | bsd-3-clause | 7,641 | 0 | 13 | 1,725 | 2,204 | 1,164 | 1,040 | 113 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ms-MY">
<title>Front-End Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/frontendscanner/src/main/javahelp/org/zaproxy/zap/extension/frontendscanner/resources/help_ms_MY/helpset_ms_MY.hs | apache-2.0 | 978 | 78 | 67 | 159 | 417 | 211 | 206 | -1 | -1 |
import Foo.Bar
import qualified Foo.Baz as Z
main = putStrLn "hello"
baz = Z.bar + bar1
| RefactoringTools/HaRe | test/testdata/cabal/foo/src/Main.expected.hs | bsd-3-clause | 92 | 0 | 6 | 20 | 33 | 19 | 14 | 4 | 1 |
module Demote.WhereIn1 where
--A definition can be demoted to the local 'where' binding of a friend declaration,
--if it is only used by this friend declaration.
--Demoting a definition narrows down the scope of the definition.
--In this example, demote the top level 'sq' to 'sumSquares'
--This example also aims to test the adding of a 'where' claus.
sumSquares x y = sq x + sq y
sq 0 = 0
sq z = z^pow
pow=2
anotherFun 0 y = sq y
where sq x = x^2
| kmate/HaRe | test/testdata/Demote/WhereIn1.hs | bsd-3-clause | 469 | 0 | 7 | 107 | 83 | 44 | 39 | 7 | 1 |
{-# LANGUAGE PolyKinds, TypeFamilies, MagicHash, DataKinds, RankNTypes #-}
module T11473 where
import GHC.Exts
import GHC.Types
type family Boxed (a :: k) :: *
type instance Boxed Char# = Char
type instance Boxed Char = Char
class BoxIt (a :: TYPE lev) where
boxed :: a -> Boxed a
instance BoxIt Char# where boxed x = C# x
instance BoxIt Char where boxed = id
hello :: forall (r :: RuntimeRep). forall (a :: TYPE r). BoxIt a => a -> Boxed a
hello x = boxed x
{-# NOINLINE hello #-}
main :: IO ()
main = do
print $ boxed 'c'#
print $ boxed 'c'
print $ hello 'c'
print $ hello 'c'#
| sdiehl/ghc | testsuite/tests/dependent/should_fail/T11473.hs | bsd-3-clause | 609 | 0 | 9 | 143 | 223 | 117 | 106 | 20 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP #-}
#ifdef __GLASGOW_HASKELL__
{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-}
{-# LANGUAGE MagicHash #-}
#if !defined(__PARALLEL_HASKELL__)
{-# LANGUAGE UnboxedTuples #-}
#endif
#endif
-----------------------------------------------------------------------------
-- |
-- Module : System.Mem.StableName
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
-- Stable names are a way of performing fast (O(1)), not-quite-exact
-- comparison between objects.
--
-- Stable names solve the following problem: suppose you want to build
-- a hash table with Haskell objects as keys, but you want to use
-- pointer equality for comparison; maybe because the keys are large
-- and hashing would be slow, or perhaps because the keys are infinite
-- in size. We can\'t build a hash table using the address of the
-- object as the key, because objects get moved around by the garbage
-- collector, meaning a re-hash would be necessary after every garbage
-- collection.
--
-------------------------------------------------------------------------------
module System.Mem.StableName (
-- * Stable Names
StableName,
makeStableName,
hashStableName,
) where
import Prelude
import Data.Typeable
#ifdef __HUGS__
import Hugs.Stable
#endif
#ifdef __GLASGOW_HASKELL__
import GHC.IO ( IO(..) )
import GHC.Base ( Int(..), StableName#, makeStableName#
, eqStableName#, stableNameToInt# )
-----------------------------------------------------------------------------
-- Stable Names
{-|
An abstract name for an object, that supports equality and hashing.
Stable names have the following property:
* If @sn1 :: StableName@ and @sn2 :: StableName@ and @sn1 == sn2@
then @sn1@ and @sn2@ were created by calls to @makeStableName@ on
the same object.
The reverse is not necessarily true: if two stable names are not
equal, then the objects they name may still be equal. Note in particular
that `mkStableName` may return a different `StableName` after an
object is evaluated.
Stable Names are similar to Stable Pointers ("Foreign.StablePtr"),
but differ in the following ways:
* There is no @freeStableName@ operation, unlike "Foreign.StablePtr"s.
Stable names are reclaimed by the runtime system when they are no
longer needed.
* There is no @deRefStableName@ operation. You can\'t get back from
a stable name to the original Haskell object. The reason for
this is that the existence of a stable name for an object does not
guarantee the existence of the object itself; it can still be garbage
collected.
-}
data StableName a = StableName (StableName# a)
-- | Makes a 'StableName' for an arbitrary object. The object passed as
-- the first argument is not evaluated by 'makeStableName'.
makeStableName :: a -> IO (StableName a)
#if defined(__PARALLEL_HASKELL__)
makeStableName a =
error "makeStableName not implemented in parallel Haskell"
#else
makeStableName a = IO $ \ s ->
case makeStableName# a s of (# s', sn #) -> (# s', StableName sn #)
#endif
-- | Convert a 'StableName' to an 'Int'. The 'Int' returned is not
-- necessarily unique; several 'StableName's may map to the same 'Int'
-- (in practice however, the chances of this are small, so the result
-- of 'hashStableName' makes a good hash key).
hashStableName :: StableName a -> Int
#if defined(__PARALLEL_HASKELL__)
hashStableName (StableName sn) =
error "hashStableName not implemented in parallel Haskell"
#else
hashStableName (StableName sn) = I# (stableNameToInt# sn)
#endif
instance Eq (StableName a) where
#if defined(__PARALLEL_HASKELL__)
(StableName sn1) == (StableName sn2) =
error "eqStableName not implemented in parallel Haskell"
#else
(StableName sn1) == (StableName sn2) =
case eqStableName# sn1 sn2 of
0# -> False
_ -> True
#endif
#endif /* __GLASGOW_HASKELL__ */
#include "Typeable.h"
INSTANCE_TYPEABLE1(StableName,stableNameTc,"StableName")
| jtojnar/haste-compiler | libraries/ghc-7.8/base/System/Mem/StableName.hs | bsd-3-clause | 4,172 | 2 | 8 | 742 | 259 | 168 | 91 | -1 | -1 |
module Main where
bmiTell :: (RealFloat a) => a -> a -> String
bmiTell weight height
| bmi <= skinny = "You're underweight, you emo, you!"
| bmi <= normal = "You're supposedly normal. Pffft, I bet you're ugly!"
| bmi <= fat = "You're fat! Lose some weight, fatty!"
| otherwise = "You're a whale, congratulations!"
where bmi = weight / height ^ 2
skinny = 18.5
normal = 25.0
fat = 30.0
| rglew/lyah | bmi.hs | mit | 462 | 0 | 8 | 151 | 111 | 57 | 54 | 11 | 1 |
{-# LANGUAGE RebindableSyntax, OverloadedStrings #-}
module Main (main) where
import Prelude hiding ((>>), return)
import Data.String
import Data.Void
import React
import React.DOM
import React.GHCJS
import React.Rebindable
import Cochon.Imports
import Cochon.Model
import Cochon.Reactify
import DisplayLang.DisplayTm
import Evidences.Tm
page_ :: () -> ReactNode Void
page_ = classLeaf $ smartClass
{ React.name = "page"
, transition = \(state, sig) -> (state, Nothing)
, renderFn = \_ _ -> div_ [] $ do
h1_ [] "Design Mode!"
pairs_
sigmas_
pis_
scopes_
dataLayouts_
}
-- relnames_ = div_ [] $ do
pairs_ :: ReactNode TermTransition
pairs_ = div_ [ class_ "demo" ] $ do
h2_ [] "pairs"
"DVOID"
pair_ DVOID
"DPAIR DUNIT (DPAIR DUNIT (DPAIR DUNIT DVOID))"
pair_ $ DPAIR DUNIT (DPAIR DUNIT (DPAIR DUNIT DVOID))
"DPAIR (DPAIR DUNIT DVOID) (DPAIR DUNIT DVOID)"
pair_ $ DPAIR (DPAIR DUNIT DVOID) (DPAIR DUNIT DVOID)
sigmas_ :: ReactNode TermTransition
sigmas_ = div_ [ class_ "demo" ] $ do
h2_ [] "sigmas"
"DUNIT"
sigma_ DUNIT
"DSIGMA (DL \"x\" ::. DUNIT)"
sigma_ $ DSIGMA DUNIT (DL ("x" ::. DUNIT))
"DSIGMA (DL (DK DUNIT))"
sigma_ $ DSIGMA DUNIT (DL (DK DUNIT))
"DSIGMA DUNIT DUNIT"
sigma_ $ DSIGMA DUNIT DUNIT
pis_ :: ReactNode TermTransition
pis_ = div_ [ class_ "demo" ] $ do
h2_ [] "pis"
"DARR DUNIT DUNIT"
pi_ $ DARR DUNIT DUNIT
"DPI DUNIT (DLK DUNIT)"
pi_ $ DPI DUNIT (DLK DUNIT)
"DPI DUNIT (DL (\"unit\" ::. DUNIT))"
pi_ $ DPI DUNIT (DL ("unit" ::. DUNIT))
scopes_ :: ReactNode TermTransition
scopes_ = div_ [ class_ "demo" ] $ do
h2_ [] "scopes"
"DLK DUNIT"
dInTmRN_ $ DLK DUNIT
"DL (\"x\" ::. DUNIT)"
dInTmRN_ $ DL ("x" ::. DUNIT)
"DL (\"x\" ::. (DL (\"y\" ::. (DL (\"z\" ::. DUNIT)))))"
dInTmRN_ $ DL ("x" ::. (DL ("y" ::. (DL ("z" ::. DUNIT)))))
-- cans_ = div_ [] $ do
-- can_ Set
-- -- can_ (Pi )
--
-- dInTms_ = div_ [] $ do
-- dInTmRN_ SET
-- dInTmRN_ ARR SET SET
dataLayouts_ :: ReactNode TermTransition
dataLayouts_ = div_ [ class_ "demo" ] $ do
h2_ [] "data"
locally dataLayout_
main :: IO ()
main = currentDocument >>= \(Just doc) ->
documentGetElementById doc ("inject" :: JSString) >>= \(Just e) ->
render (page_ ()) e
| kwangkim/pigment | src-web/hs/DesignMode.hs | mit | 2,390 | 0 | 19 | 620 | 736 | 364 | 372 | 71 | 1 |
import Control.Applicative
import Sat
import System.Environment
main :: IO ()
main = do
args <- getArgs
case args of
[a, b, c, d] -> putStr =<< printDimacs <$> makeCmty (read a) (read b) (read c) (fromIntegral(read d) / 100)
_ -> error "Unknown argument s, must be <vars> <clauses> <cmtys> <q>."
| JLiangWaterloo/sat | Haskell/CmtyWithInteger.hs | mit | 334 | 0 | 16 | 89 | 121 | 62 | 59 | 9 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.