code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : MsgFmt
Description : Raw pack-it-forms message handling
Copyright : (c) 2015 Peter Amidon <[email protected]>
: (c) 2015 Keith Amidon <[email protected]>
Generic handling of pack-it-forms encoded forms, which are also
compatible with PacFORMS encoded forms. Forms consist of a set of key
value pairs representing the form fields. All field names and values
are strings to allow this module to provide basic functionality for
any form. It is expected that type-safe modules will be built on top
of this module for specific forms that require automated processing.
-}
module PackItForms.MsgFmt
( parse
, parseFile
, getValue
, getEnvVal
, getText
, getMap
, emptyRep
, fromList
, insertKV
, insertEnvKV
, insertAll
, insertEnvAll
, MsgFmt(..)
, MsgNo(..)
) where
import System.IO
import qualified Data.Map as M
import Data.Maybe
import qualified Data.Text as T
import Data.String.Utils
-- | Identifying number given to the message
type MsgNo = String
-- | Representation of a message
data MsgFmt = MsgFmt { envelope :: M.Map String String
, fields :: M.Map String String
, text :: T.Text
} deriving (Show, Eq)
-- | Accessor for value for a field in the message
getValue :: MsgFmt -> String -> Maybe String
getValue (MsgFmt _ m _) k = M.lookup k m
-- | Accessor for the encoded form message text
getText :: MsgFmt -> T.Text
getText (MsgFmt _ _ s) = s
-- | Accessor to allow access to all of the fields as a map
getMap (MsgFmt _ m _) = m
-- | Accessor for the value of an envelope field
getEnvVal :: MsgFmt -> String -> Maybe String
getEnvVal (MsgFmt e _ _) k = M.lookup k e
-- | Create an empty message
emptyRep :: MsgFmt
emptyRep = MsgFmt M.empty M.empty ""
-- | Create a message from a list of field key/value pairs and a list of envelope key/value pairs
fromList :: [(String, String)] -> [(String, String)] -> MsgFmt
fromList e v = insertEnvAll (insertAll emptyRep v) e
-- | Add a field key/value pair to a message
insertKV :: MsgFmt -> String -> String -> MsgFmt
insertKV (MsgFmt e m s) k v =
let nis = encodeKV (quoteKey k) (quoteValue v)
in MsgFmt e (M.insert k v m) (T.append s nis)
-- | Add a key/value pair to a message envelope
insertEnvKV :: MsgFmt -> String -> String -> MsgFmt
insertEnvKV m k v = insertEnvAll m [(k, v)]
-- Backtick escape invalid characters in encoded field names
quoteKey :: String -> String
quoteKey = backtickQuote "`:#!"
-- Backtick escape invalid characters in encoded field values
quoteValue :: String -> String
quoteValue = backtickQuote "`]#!"
-- Backtick quoting function
backtickQuote :: String -> String -> String
backtickQuote d = foldr step ""
where step y acc | y `elem` d = '`':y:acc
| otherwise = y:acc
-- Encode a key/value pair in the text representation
encodeKV :: String -> String -> T.Text
encodeKV k v = T.pack $ k ++ ": [" ++ v ++ "]\r\n"
-- | Add a list of field key/value pairs to message
insertAll :: MsgFmt -> [(String, String)] -> MsgFmt
insertAll = foldl $ uncurry . insertKV
-- | Add a key/value pair to the envelope of the message
insertEnvAll :: MsgFmt -> [(String, String)] -> MsgFmt
insertEnvAll (MsgFmt e m s) kv =
let nEnv = M.union (M.fromList kv) e
lines' = lines . T.unpack $ s
ns = if any isEnvLine lines'
then T.pack . unlines . map (regenEnv nEnv) $ lines'
else T.pack . unlines . ((genEnv nEnv++"\n"):) $ lines'
in MsgFmt nEnv m ns
where regenEnv :: M.Map String String -> String -> String
regenEnv e s | isEnvLine s = genEnv e
| otherwise = s
isEnvLine = startswith "!OUTPOST! "
genEnv :: M.Map String String -> String
genEnv e = "!OUTPOST! " ++ serializeEnv e
serializeEnv :: M.Map String String -> String
serializeEnv e = drop 2 $ M.foldrWithKey strEntry "" e
strEntry :: String -> String -> String -> String
strEntry k v s = s ++ ", " ++ k ++ "=" ++ v
-- | Parse an encoded form message to a MsgFmt
--
-- Delimiters are considered :, [ and ]. : ends a key, [ starts a
-- value, and ] ends a value. In all components, a backtick followed
-- by a character is an escape for that character, and can be used to
-- escape that character, so k`:ey is a valid key that would parse to
-- be named "k:ey". Anything between delimiters is ignored; notably,
-- any characters between the colon that ends a key and the first
-- unescaped begin bracket will be ignored.
parse :: String -> MsgFmt
parse p = MsgFmt (parseEnv p) (parseMap M.empty "" $ stripUnnecessary p) $ T.pack p
-- Skip comment and directive lines
stripUnnecessary :: String -> String
stripUnnecessary = unlines . filter pred . lines
where pred = maybe True notComment . listToMaybe
notComment = not . (`elem` ("#!" ::String))
-- Create map of key/value pairs for each field
parseMap :: M.Map String String -> String -> String -> M.Map String String
parseMap parsed _ "" = parsed
parseMap parsed key string = if key == ""
then let (k,ks) = splitEF ':' string
in parseMap parsed (stripKey k) ks
else let (_,v) = splitEF '[' string
(nv,vs) = splitEF ']' v
submap = parseMap parsed "" vs
in M.insert key nv submap
-- stripKey is used to get rid of the newline that might be at the
-- beginning of some keys
where stripKey ('\r':'\n':xs) = xs
stripKey ('\n':xs) = xs
stripKey x = x
parseEnv :: String -> M.Map String String
parseEnv msg = maybe M.empty parseEnvLine line
where line :: Maybe String
line = listToMaybe . filter (startswith "!OUTPOST! ") . lines $ msg
parseEnvLine :: String -> M.Map String String
parseEnvLine l = M.fromList . mapMaybe mkKV . split "," $ drop 10 l
mkKV :: String -> Maybe (String, String)
mkKV s = let strs = split "=" s
in if length strs == 2
then Just (lstrip $ head strs, strs !! 1)
else Nothing
-- Split keys from values
splitEF :: Char -> String -> (String, String)
splitEF = splitEF' False
-- Worker for splitting keys and values dequoting backticks
splitEF' :: Bool -> Char -> String -> (String,String)
splitEF' _ _ [] = ("","")
splitEF' False d ('`':xs) = let rest = splitEF' True d xs
in rest
splitEF' b d (x:xs)
| not b && d == x = ("",xs)
| otherwise = let rest = splitEF' False d xs
in (x : fst rest, snd rest)
-- | Parse the encoded form message in a file into a MsgFmt
parseFile :: String -> IO MsgFmt
parseFile f = do
s <- readFile f
return $ parse s
| pack-it-forms/msgfmt | src/PackItForms/MsgFmt.hs | apache-2.0 | 6,920 | 0 | 15 | 1,886 | 1,797 | 943 | 854 | 116 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Stubby.Net.Admin (adminserver) where
import Prelude hiding (concat)
import Stubby.Settings (Settings, getAdmin, getLocation)
import Stubby.CLI.Logging (status)
import Stubby.Net.LoggerMiddleware (logger)
import Network.Wai (Application, responseLBS)
import Network.Wai.Handler.Warp (runSettings,setHost,setPort,defaultSettings)
import Network.HTTP.Types (status200)
import Data.ByteString.Char8 (pack, concat)
import Data.String (fromString)
adminserver' :: Application
adminserver' _ respond = respond $
responseLBS status200 [("Content-Type", "text/plain")] "Hello, World! -- admin"
adminserver :: Settings -> IO ()
adminserver args = do
let port = getAdmin args
host = getLocation args
settings = setHost (fromString host) $ setPort port defaultSettings
msg = concat [ "Admin"
, " portal running at http://"
, pack host
, ":"
, pack $ show port
]
status msg
runSettings settings $ logger args adminserver'
| mrak/stubby4hs | src/lib/Stubby/Net/Admin.hs | apache-2.0 | 1,113 | 0 | 13 | 277 | 283 | 159 | 124 | 26 | 1 |
module NVIM.Command where
import Data.MessagePack (Object (..))
import Data.MessagePack.RPC (Request (..))
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Vector as Vector
-- TODO Support special types (with newtype over Object) for Buffer, Window, Tabpage (EXT 0, 1, 2)
-- see https://neovim.io/doc/user/api.html
data Command
= NvimBufSetLines Object Int Int Bool [Text]
| NvimCommand [Object]
| NvimCommandOutput [Object]
| NvimCallFunction Text [Object]
| NvimCreateBuf Bool Bool -- {listed} {scratch}
| NvimOpenWin Object Bool Object
| NvimWinClose Object Bool
| NvimWinGetBuf Object
| NvimWinGetHeight Object
| NvimWinGetWidth Object
mkRequest :: Int -> Command -> Request
mkRequest msgId (NvimBufSetLines b s e strict xs) =
Request msgId (Text.pack "nvim_buf_set_lines") [b, ObjectInt s, ObjectInt e, ObjectBool strict, ObjectArray (Vector.fromList (ObjectStr <$> xs))]
mkRequest msgId (NvimCommand xs) =
Request msgId (Text.pack "nvim_command") xs
mkRequest msgId (NvimCommandOutput xs) =
Request msgId (Text.pack "nvim_command_output") xs
mkRequest msgId (NvimCallFunction m xs) =
Request msgId (Text.pack "nvim_call_function") [ObjectStr m, ObjectArray (Vector.fromList xs)]
mkRequest msgId (NvimCreateBuf l s) =
Request msgId (Text.pack "nvim_create_buf") [ObjectBool l, ObjectBool s]
mkRequest msgId (NvimOpenWin b e cfg) =
Request msgId (Text.pack "nvim_open_win") [b, ObjectBool e, cfg]
mkRequest msgId (NvimWinClose w force) =
Request msgId (Text.pack "nvim_win_close") [w, ObjectBool force]
mkRequest msgId (NvimWinGetBuf w) =
Request msgId (Text.pack "nvim_win_get_buf") [w]
mkRequest msgId (NvimWinGetHeight w) =
Request msgId (Text.pack "nvim_win_get_height") [w]
mkRequest msgId (NvimWinGetWidth w) =
Request msgId (Text.pack "nvim_win_get_width") [w]
| aloiscochard/sarsi | sarsi-nvim/NVIM/Command.hs | apache-2.0 | 1,847 | 0 | 11 | 270 | 593 | 315 | 278 | 38 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Utils.Vigilance.Workers.StaticWatchWorker (runWorker) where
import Data.Acid (AcidState)
import Control.Concurrent.STM ( TChan
, atomically
, readTChan )
import Control.Monad.Trans (lift)
import Control.Lens
import Utils.Vigilance.Logger ( pushLog
, renameLogCtx )
import Utils.Vigilance.TableOps (mergeStaticWatchesS)
import Utils.Vigilance.Types
runWorker :: AcidState AppState -> TChan Config -> LogCtxT IO ()
runWorker acid cfgChan = renameLogCtx "Watch Config Monitor" $ do
pushLog "Waiting for new watches"
cfg <- lift $ atomically $ readTChan cfgChan
pushLog "Merging new static watches"
lift $ mergeStaticWatchesS acid $ cfg ^. configWatches
pushLog "Static watches merged"
| MichaelXavier/vigilance | src/Utils/Vigilance/Workers/StaticWatchWorker.hs | bsd-2-clause | 826 | 0 | 11 | 190 | 185 | 99 | 86 | 19 | 1 |
import Options.Applicative
import Options
main :: IO ()
main = do
opts <- execParser Options.parser
perform (optCommand opts)
| lovasko/swim | src/Main.hs | bsd-2-clause | 132 | 0 | 9 | 24 | 48 | 23 | 25 | 6 | 1 |
{-# LANGUAGE CPP, InstanceSigs, RankNTypes, ScopedTypeVariables, TypeFamilies, UndecidableInstances #-}
-- | Continuation-passing parser for context-free grammars
module Text.Grampa.ContextFree.Continued (Parser(..), Result(..), alt) where
import Control.Applicative (Applicative(..), Alternative(..), liftA2)
import Control.Monad (Monad(..), MonadPlus(..))
#if MIN_VERSION_base(4,13,0)
import Control.Monad (MonadFail(fail))
#endif
import Data.Functor.Classes (Show1(..))
import Data.Functor.Compose (Compose(..))
import Data.Semigroup (Semigroup(..))
import Data.Monoid (Monoid(mappend, mempty))
import Data.Monoid.Factorial(FactorialMonoid)
import Data.Monoid.Textual(TextualMonoid)
import Data.String (fromString)
import Debug.Trace (trace)
import Witherable (Filterable(mapMaybe))
import qualified Data.Monoid.Factorial as Factorial
import qualified Data.Monoid.Null as Null
import qualified Data.Monoid.Textual as Textual
import qualified Data.Semigroup.Cancellative as Cancellative
import qualified Rank2
import qualified Text.Parser.Char
import Text.Parser.Char (CharParsing)
import Text.Parser.Combinators (Parsing(..))
import Text.Parser.LookAhead (LookAheadParsing(..))
import Text.Parser.Input.Position (fromEnd)
import Text.Grampa.Class (CommittedParsing(..), DeterministicParsing(..),
InputParsing(..), InputCharParsing(..), MultiParsing(..),
ParseResults, ParseFailure(..), FailureDescription(..), Pos)
import Text.Grampa.Internal (expected, TraceableParsing(..))
data Result (g :: (* -> *) -> *) s v = Parsed{parsedPrefix :: !v,
parsedSuffix :: !s}
| NoParse (ParseFailure Pos s)
-- | Parser type for context-free grammars that uses a continuation-passing algorithm, fast for grammars in LL(1)
-- class but with potentially exponential performance for longer ambiguous prefixes.
newtype Parser (g :: (* -> *) -> *) s r =
Parser{applyParser :: forall x. s -> (r -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x}
instance Show s => Show1 (Result g s) where
liftShowsPrec showsPrecSub _showList prec Parsed{parsedPrefix= r} rest = "Parsed " ++ showsPrecSub prec r rest
liftShowsPrec _showsPrec _showList _prec (NoParse f) rest = "NoParse " ++ shows f rest
instance Functor (Result g s) where
fmap f (Parsed a rest) = Parsed (f a) rest
fmap _ (NoParse failure) = NoParse failure
instance Functor (Parser g s) where
fmap f (Parser p) = Parser (\input success-> p input (success . f))
{-# INLINABLE fmap #-}
instance Applicative (Parser g s) where
pure a = Parser (\input success failure-> success a input failure)
(<*>) :: forall a b. Parser g s (a -> b) -> Parser g s a -> Parser g s b
Parser p <*> Parser q = Parser r where
r :: forall x. s -> (b -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
r rest success failure = p rest (\f rest'-> q rest' (success . f)) failure
{-# INLINABLE (<*>) #-}
instance (Factorial.FactorialMonoid s, Ord s) => Alternative (Parser g s) where
empty = Parser (\rest _ failure-> failure $ ParseFailure (fromEnd $ Factorial.length rest) [] [])
(<|>) = alt
-- | A named and unconstrained version of the '<|>' operator
alt :: forall g s a. Ord s => Parser g s a -> Parser g s a -> Parser g s a
Parser p `alt` Parser q = Parser r where
r :: forall x. s -> (a -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
r rest success failure = p rest success' failure'
where success' a rest' _ = success a rest' failure'
failure' f1 = q rest success (\f2 -> failure (f1 <> f2))
instance Factorial.FactorialMonoid s => Filterable (Result g s) where
mapMaybe f (Parsed a rest) =
maybe (NoParse $ expected (fromEnd $ Factorial.length rest) "filter") (`Parsed` rest) (f a)
mapMaybe _ (NoParse failure) = NoParse failure
instance Factorial.FactorialMonoid s => Filterable (Parser g s) where
mapMaybe :: forall a b. (a -> Maybe b) -> Parser g s a -> Parser g s b
mapMaybe f (Parser p) = Parser q where
q :: forall x. s -> (b -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q rest success failure = p rest (maybe filterFailure success . f) failure
where filterFailure _ _ = failure (expected (fromEnd $ Factorial.length rest) "filter")
{-# INLINABLE mapMaybe #-}
#if MIN_VERSION_base(4,13,0)
instance Monad (Parser g s) where
#else
instance Factorial.FactorialMonoid s => Monad (Parser g s) where
#endif
return = pure
(>>=) :: forall a b. Parser g s a -> (a -> Parser g s b) -> Parser g s b
Parser p >>= f = Parser r where
r :: forall x. s -> (b -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
r rest success failure = p rest (\a rest'-> applyParser (f a) rest' success) failure
#if MIN_VERSION_base(4,13,0)
instance Factorial.FactorialMonoid s => MonadFail (Parser g s) where
#endif
fail msg = Parser (\rest _ failure->
failure $ ParseFailure (fromEnd $ Factorial.length rest) [] [StaticDescription msg])
instance (Factorial.FactorialMonoid s, Ord s) => MonadPlus (Parser g s) where
mzero = empty
mplus = (<|>)
instance Semigroup x => Semigroup (Parser g s x) where
(<>) = liftA2 (<>)
instance Monoid x => Monoid (Parser g s x) where
mempty = pure mempty
mappend = liftA2 mappend
instance (Factorial.FactorialMonoid s, Ord s) => Parsing (Parser g s) where
try :: forall a. Parser g s a -> Parser g s a
try (Parser p) = Parser q
where q :: forall x. s -> (a -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success (failure . rewindFailure)
where rewindFailure ParseFailure{} = ParseFailure (fromEnd $ Factorial.length input) [] []
(<?>) :: forall a. Parser g s a -> String -> Parser g s a
Parser p <?> msg = Parser q
where q :: forall x. s -> (a -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success (failure . replaceFailure)
where replaceFailure (ParseFailure pos msgs erroneous) =
ParseFailure pos (if pos == fromEnd (Factorial.length input) then [StaticDescription msg]
else msgs) erroneous
eof = Parser p
where p rest success failure
| Null.null rest = success () rest failure
| otherwise = failure (expected (fromEnd $ Factorial.length rest) "end of input")
unexpected msg = Parser (\t _ failure ->
failure $ ParseFailure (fromEnd $ Factorial.length t) [] [StaticDescription msg])
notFollowedBy (Parser p) = Parser q
where q :: forall x. s -> (() -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success' failure'
where success' _ _ _ = failure (expected (fromEnd $ Factorial.length input) "notFollowedBy")
failure' _ = success () input failure
instance (FactorialMonoid s, Ord s) => DeterministicParsing (Parser g s) where
(<<|>) :: forall a. Parser g s a -> Parser g s a -> Parser g s a
Parser p <<|> Parser q = Parser r where
r :: forall x. s -> (a -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
r rest success failure = p rest success' failure'
where success' a rest' _ = success a rest' failure
failure' f1 = q rest success (\f2 -> failure (f1 <> f2))
takeSome p = (:) <$> p <*> takeMany p
takeMany p = takeSome p <<|> pure []
instance (FactorialMonoid s, Ord s) => CommittedParsing (Parser g s) where
type CommittedResults (Parser g s) = ParseResults s
commit :: forall a. Parser g s a -> Parser g s (ParseResults s a)
commit (Parser p) = Parser q
where q :: forall x. s -> (ParseResults s a -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input (success . Right) failure'
where failure' f = success (Left f) input failure
admit :: forall a. Parser g s (ParseResults s a) -> Parser g s a
admit (Parser p) = Parser q
where q :: forall x. s -> (a -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success' failure
where success' (Left f) _rest = const (failure f)
success' (Right a) rest = success a rest
instance (FactorialMonoid s, Ord s) => LookAheadParsing (Parser g s) where
lookAhead :: forall a. Parser g s a -> Parser g s a
lookAhead (Parser p) = Parser q
where q :: forall x. s -> (a -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success' failure'
where success' a _ = success a input
failure' f = failure f
instance (Ord s, Show s, TextualMonoid s) => CharParsing (Parser g s) where
satisfy predicate = Parser p
where p :: forall x. s -> (Char -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Textual.splitCharacterPrefix rest
of Just (first, suffix) | predicate first -> success first suffix failure
_ -> failure (expected (fromEnd $ Factorial.length rest) "Char.satisfy")
string s = Textual.toString (error "unexpected non-character") <$> string (fromString s)
text t = (fromString . Textual.toString (error "unexpected non-character")) <$> string (Textual.fromText t)
instance (Cancellative.LeftReductive s, Factorial.FactorialMonoid s, Ord s) => InputParsing (Parser g s) where
type ParserInput (Parser g s) = s
getInput = Parser p
where p rest success failure = success rest rest failure
anyToken = Parser p
where p rest success failure =
case Factorial.splitPrimePrefix rest
of Just (first, suffix) -> success first suffix failure
_ -> failure (expected (fromEnd $ Factorial.length rest) "anyToken")
satisfy predicate = Parser p
where p :: forall x. s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Factorial.splitPrimePrefix rest
of Just (first, suffix) | predicate first -> success first suffix failure
_ -> failure (expected (fromEnd $ Factorial.length rest) "satisfy")
notSatisfy predicate = Parser p
where p :: forall x. s -> (() -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Factorial.splitPrimePrefix rest
of Just (first, _)
| predicate first -> failure (expected (fromEnd $ Factorial.length rest) "notSatisfy")
_ -> success () rest failure
scan :: forall state. state -> (state -> s -> Maybe state) -> Parser g s s
scan s0 f = Parser (p s0)
where p :: forall x. state -> s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p s rest success failure = success prefix suffix failure
where (prefix, suffix, _) = Factorial.spanMaybe' s f rest
take n = Parser p
where p :: forall x. s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure
| (prefix, suffix) <- Factorial.splitAt n rest,
Factorial.length prefix == n = success prefix suffix failure
| otherwise = failure (expected (fromEnd $ Factorial.length rest) $ "take " ++ show n)
takeWhile predicate = Parser p
where p :: forall x. s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure | (prefix, suffix) <- Factorial.span predicate rest = success prefix suffix failure
takeWhile1 predicate = Parser p
where p :: forall x. s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure
| (prefix, suffix) <- Factorial.span predicate rest =
if Null.null prefix
then failure (expected (fromEnd $ Factorial.length rest) "takeWhile1")
else success prefix suffix failure
string s = Parser p where
p :: forall x. s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p s' success failure
| Just suffix <- Cancellative.stripPrefix s s' = success s suffix failure
| otherwise = failure (ParseFailure (fromEnd $ Factorial.length s') [LiteralDescription s] [])
{-# INLINABLE string #-}
instance InputParsing (Parser g s) => TraceableParsing (Parser g s) where
traceInput :: forall a. (s -> String) -> Parser g s a -> Parser g s a
traceInput description (Parser p) = Parser q
where q :: forall x. s -> (a -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q rest success failure = traceWith "Parsing " (p rest success' failure')
where traceWith prefix = trace (prefix <> description rest)
failure' f = traceWith "Failed " (failure f)
success' r suffix failure'' = traceWith "Parsed " (success r suffix failure'')
instance (Ord s, Show s, TextualMonoid s) => InputCharParsing (Parser g s) where
satisfyCharInput predicate = Parser p
where p :: forall x. s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Textual.splitCharacterPrefix rest
of Just (first, suffix) | predicate first -> success (Factorial.primePrefix rest) suffix failure
_ -> failure (expected (fromEnd $ Factorial.length rest) "satisfyChar")
notSatisfyChar predicate = Parser p
where p :: forall x. s -> (() -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Textual.characterPrefix rest
of Just first | predicate first
-> failure (expected (fromEnd $ Factorial.length rest) "notSatisfyChar")
_ -> success () rest failure
scanChars :: forall state. state -> (state -> Char -> Maybe state) -> Parser g s s
scanChars s0 f = Parser (p s0)
where p :: forall x. state -> s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p s rest success failure = success prefix suffix failure
where (prefix, suffix, _) = Textual.spanMaybe_' s f rest
takeCharsWhile predicate = Parser p
where p :: forall x. s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure
| (prefix, suffix) <- Textual.span_ False predicate rest = success prefix suffix failure
takeCharsWhile1 predicate = Parser p
where p :: forall x. s -> (s -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure
| Null.null prefix = failure (expected (fromEnd $ Factorial.length rest) "takeCharsWhile1")
| otherwise = success prefix suffix failure
where (prefix, suffix) = Textual.span_ False predicate rest
-- | Continuation-passing context-free parser
--
-- @
-- 'parseComplete' :: ("Rank2".'Rank2.Functor' g, 'FactorialMonoid' s) =>
-- g (Continued.'Parser' g s) -> s -> g ('ParseResults' s)
-- @
instance (Cancellative.LeftReductive s, Factorial.FactorialMonoid s, Ord s) => MultiParsing (Parser g s) where
type ResultFunctor (Parser g s) = ParseResults s
-- | Returns an input prefix parse paired with the remaining input suffix.
parsePrefix g input = Rank2.fmap (Compose . (\p-> applyParser p input (\a rest _-> Right (rest, a)) Left)) g
parseComplete g input = Rank2.fmap (\p-> applyParser p input (const . const . Right) Left)
(Rank2.fmap (<* eof) g)
| blamario/grampa | grammatical-parsers/src/Text/Grampa/ContextFree/Continued.hs | bsd-2-clause | 16,355 | 0 | 18 | 4,373 | 6,277 | 3,228 | 3,049 | -1 | -1 |
import Controller
import Network.Wai.Handler.SimpleServer (run)
main :: IO ()
main = putStrLn "Loaded" >> withGloop (run 3000)
| jsgf/gloop | simple-server.hs | bsd-2-clause | 128 | 0 | 8 | 18 | 47 | 25 | 22 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
module SwaggerJSONGen where
import Data.Aeson as A
import Data.Text as T
import qualified Data.List as DL
import Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import GHC.Generics
import Data.Proxy
import qualified Data.HashMap.Strict.InsOrd as HMSIns
import qualified Data.Set as Set
import Safe
import Data.Maybe
import Network.HTTP.Types.Method
import Contract
import Types
import Data.Swagger
import Data.Swagger.Internal
import Data.Swagger.Declare
import Data.Swagger.Lens as SwaggerLens
import Data.Swagger.Operation
import Control.Arrow
import Control.Lens
swaggerJSON :: BSL.ByteString
swaggerJSON = do
let api = (mempty :: Swagger) & paths .~ (HMSIns.fromList [("/user", mempty & post ?~ (mempty
& SwaggerLens.tags .~ (Set.singleton "user")
& responses .~ (mempty & default_ .~ (Just $ Inline (Response "successful operation" Nothing (HMSIns.fromList []) Nothing ) )
& responses .~ (HMSIns.fromList []) )
& summary ?~ "Create user"
& description ?~ "This can only be done by the logged in user."
& operationId ?~ "createUser"
& produces ?~ MimeList ["application/json", "application/xml"]
& parameters .~ [ Inline $ mempty
& SwaggerLens.name .~ "body"
& description ?~ "Created user object"
& required ?~ True
& schema .~ (ParamBody $ Ref $ Reference "User") ] ) ),userUserNamePath, petFindByTagsPath] )
encode api
userUserNamePath :: (FilePath, PathItem)
userUserNamePath = ("/user", mempty
& get ?~ (mempty
& SwaggerLens.tags .~ (Set.singleton "user")
& responses .~ (mempty & default_ .~ (Just $ Inline (Response "successful operation" Nothing (HMSIns.fromList []) Nothing ) )
& responses .~ (HMSIns.fromList [(200, Inline $ mempty & description .~ "successful operation"
& schema ?~ (Ref $ Reference "User") ),
(400, Inline $ mempty & description .~ "Invalid Username supplied" ),
(404, Inline $ mempty & description .~ "User Not Found")]) )
& summary ?~ "Get user by user name"
& description ?~ "This can only be done by the logged in user."
& operationId ?~ "getUserByName"
& produces ?~ MimeList ["application/json", "application/xml"]
& parameters .~ [ Inline $ mempty
& SwaggerLens.name .~ "username"
& description ?~ "The name that needs to be fetched. User User1 for testing"
& required ?~ True
& schema .~ (ParamOther (ParamOtherSchema ParamPath Nothing (mempty & type_ .~ SwaggerString ) ) ) ] )
& put ?~ (mempty
& SwaggerLens.tags .~ (Set.singleton "user")
& responses .~ (mempty & default_ .~ (Just $ Inline (Response "successful operation" Nothing (HMSIns.fromList []) Nothing ) )
& responses .~ (HMSIns.fromList [(400, Inline $ mempty & description .~ "Invalid Username supplied" ),
(404, Inline $ mempty & description .~ "User Not Found")]) )
& summary ?~ "Updated user"
& description ?~ "This can only be done by the logged in user."
& operationId ?~ "updateUser"
& produces ?~ MimeList ["application/json", "application/xml"]
& parameters .~ [ Inline $ mempty
& SwaggerLens.name .~ "username"
& description ?~ "The name that needs to be updated"
& required ?~ True
& schema .~ (ParamOther (ParamOtherSchema ParamPath Nothing (mempty & type_ .~ SwaggerString ) ) ),
Inline $ mempty
& SwaggerLens.name .~ "body"
& description ?~ "Updated User Object"
& required ?~ True
& schema .~ (ParamBody $ Ref $ Reference "User") ] )
& SwaggerLens.delete ?~ (mempty
& SwaggerLens.tags .~ (Set.singleton "user")
& responses .~ (mempty & responses .~ (HMSIns.fromList [(400, Inline $ mempty & description .~ "Invalid Username supplied" ),
(404, Inline $ mempty & description .~ "User Not Found")]) )
& summary ?~ " Delete user"
& description ?~ "This can only be done by the logged in user."
& operationId ?~ "deleteUser"
& produces ?~ MimeList ["application/json", "application/xml"]
& parameters .~ [Inline $ mempty
& SwaggerLens.name .~ "username"
& description ?~ "The name that needs to be deleted"
& required ?~ True
& schema .~ (ParamOther (ParamOtherSchema ParamPath Nothing (mempty & type_ .~ SwaggerString ) ) )]
) )
data ApiTypeDetails = ApiTypeDetails
{
apiOut :: Text
, apiErr :: Maybe Text
, formParam :: Maybe Text
, queryParam :: Maybe Text
, fileParam :: Maybe Text
, headerIn :: Maybe Text
, requestBody :: Maybe Text
, contentTypes :: Maybe Text
-- TODO : Add path params to this?
-- TODO: cookie in/out and header out need to be added when we encounter them
} deriving (Eq, Show)
-- For each Route type (e.g. UserUsernameR) we will have a [(StdMethod, ApiTypeDetails)] and we will parse that in order to generate the arguments required by constructPathOperation function
getParamsAndResponsesFromRoute :: [(StdMethod, ApiTypeDetails)] -> FilePath -> (FilePath, PathItem)
getParamsAndResponsesFromRoute methodWithApiDetails routeName =
let completePathItem = DL.foldl' (\ currentPathItem (stdMethod, apiTypeDetails) -> do
let mFormParams = fmap (\formParamType -> constructParamSchema (Just ParamFormData) formParamType ) (formParam apiTypeDetails)
let mQueryParams = fmap (\qParamType -> constructParamSchema (Just ParamQuery) qParamType ) (queryParam apiTypeDetails)
let mHeaderParams = fmap (\headerParamType -> constructParamSchema (Just ParamHeader) headerParamType ) (headerIn apiTypeDetails)
let mBodyParams = fmap (\bodyParamType -> constructParamSchema (Nothing) bodyParamType ) (requestBody apiTypeDetails)
-- let mFileParams = TODO : Send as SwaggerFile type in FormData param type. Handle separately.
let paramList = DL.concat $ catMaybes [mFormParams, mQueryParams, mHeaderParams, mBodyParams]
-- for ApiOut, call with 200, ApiErr -> 400,
let mSuccessResponse =
case apiOut apiTypeDetails of
"()" -> Nothing
responseType -> Just (200, constructRefResponse responseType)
let mErrorResponse = fmap (\errorType -> (400, constructRefResponse errorType ) ) (apiErr apiTypeDetails)
let responsesList = catMaybes [mSuccessResponse, mErrorResponse]
constructPathOperation stdMethod responsesList paramList currentPathItem
) (mempty::PathItem) methodWithApiDetails
in (routeName, completePathItem)
where
-- for Params, check if body or not, then construct value of ParamAnySchema.
-- If Body Param then the first argument will be `Nothing`
constructParamSchema :: Maybe ParamLocation -> Text -> [(Text, ParamAnySchema)]
constructParamSchema mParamLocation paramType =
case mParamLocation of
Just otherLocation -> paramOtherSchema otherLocation paramType
Nothing -> ("body",ParamBody $ Ref $ Reference paramType ):[] -- default name for body param is "body"
paramOtherSchema paramLocation pType =
case paramLocation of
ParamQuery -> paramOtherSchemaWithCustomData paramLocation pType
ParamFormData -> paramOtherSchemaWithCustomData paramLocation pType
_ ->
case pType `Prelude.elem` primitiveTypes of
True -> let (swaggerType, swFormat) = (getSwaggerTypeFromHType pType)
in ("",ParamOther $ mempty & in_ .~ paramLocation
& paramSchema .~ (mempty & (type_ .~ swaggerType)
& format .~ swFormat ) ):[]
False ->
case T.isPrefixOf "[" pType of
True -> do
let prefixStripped = fromJustNote "Type is Array. But no [ found" $ T.stripPrefix "[" pType
listBracketsRemovedType = fromJustNote "Type is Array. But no ] found" $ T.stripSuffix "]" prefixStripped
case listBracketsRemovedType `Prelude.elem` primitiveTypes of
True -> do -- construct ParamAnySchema with Array of Primitive SwaggerType
let (swaggerType, swFormat) = getSwaggerTypeFromHType listBracketsRemovedType
("", ParamOther $ mempty & in_ .~ paramLocation
& paramSchema .~ (mempty & (type_ .~ SwaggerArray)
& items ?~ SwaggerItemsPrimitive Nothing (mempty & type_ .~ swaggerType
& format .~ swFormat ) ) ):[]
False -> error "Encountered list of custom data type for Param. This needs to be handled!"
-- False -> -- construct ParamAnySchema with Ref type (unless it's a QueryParam)
-- check for Set or Collection here. for MultiCollection
paramOtherSchemaWithCustomData paramLocation paramType =
case paramType `Prelude.elem` primitiveTypes of
True -> let (swaggerType, swFormat) = (getSwaggerTypeFromHType paramType)
in ("", ParamOther $ mempty & in_ .~ paramLocation
& paramSchema .~ (mempty & (type_ .~ swaggerType)
& format .~ swFormat ) ):[]
False ->
case T.isPrefixOf "[" paramType of
True -> error $ "Encountered QueryParam or FormParam with a List. Debug Info: " ++ (show paramType)
False ->
let paramNamesWithTypes = getRecordNamesForQueryParam (Proxy::Proxy UserLoginRGETQueryParam) -- currently being used for all Form and Query params with custom data types
in fmap (\(paramName, paramType) -> (paramName, constructQueryFormParamSchema paramType paramLocation) ) paramNamesWithTypes
constructBasicParamSchema :: Text -> ParamLocation -> ParamAnySchema
constructBasicParamSchema pHaskellType paramLocation =
let (swaggerType, swFormat) = (getSwaggerTypeFromHType pHaskellType)
in (ParamOther $ mempty & in_ .~ paramLocation
& paramSchema .~ (mempty & (type_ .~ swaggerType)
& format .~ swFormat ) )
constructQueryFormParamSchema :: SwaggerType 'Data.Swagger.Internal.SwaggerKindParamOtherSchema -> ParamLocation -> ParamAnySchema
constructQueryFormParamSchema swaggerType pLocation =
(ParamOther $ mempty & in_ .~ pLocation
& paramSchema .~ (mempty & (type_ .~ swaggerType) ) )
constructRefResponse :: Text -> Referenced Response
constructRefResponse typeStr =
-- edgecase : when it's an array, we need to check if it's an array of primitive. Usually would not be the case.
case typeStr `Prelude.elem` primitiveTypes of
True ->
let (swaggerType, swaggerFormat) = getSwaggerTypeFromHType typeStr
in Inline $ mempty & schema ?~ (Inline $ mempty & paramSchema .~ (mempty & type_ .~ swaggerType
& format .~ swaggerFormat) )
False -> Ref $ Reference typeStr -- TODO: take care of arrays of custom types
getSwaggerTypeFromHType haskellType =
case haskellType of
"Text" -> (SwaggerString, Nothing)
"Bool" -> (SwaggerBoolean, Nothing)
"Day" -> (SwaggerString, Just "date")
"UTCTime" -> (SwaggerString, Just "date-time")
"ByteString" -> (SwaggerString, Just "byte")
"Float" -> (SwaggerNumber, Just "float")
"Double" -> (SwaggerNumber, Just "double")
"Int32" -> (SwaggerInteger, Just "int32")
"Int64" -> (SwaggerInteger, Just "int64")
primitiveTypes = ["Day", "UTCTime", "ByteString", "Text", "Float", "Double", "Int32", "Int64", "Bool"]
-- RespCode ParamName (not present in some cases)
constructPathOperation :: StdMethod -> [(Int, Referenced Response)] -> [(Text, ParamAnySchema)] -> PathItem -> PathItem
constructPathOperation stdMethod respCodeWithTypes paramNameWithTypes existingPathItem = do
case stdMethod of
GET -> existingPathItem & get ?~ constructPathItem
POST -> existingPathItem & post ?~ constructPathItem
PUT -> existingPathItem & put ?~ constructPathItem
PATCH -> existingPathItem & patch ?~ constructPathItem
DELETE -> existingPathItem & delete ?~ constructPathItem
where
constructPathItem = (mempty & responses .~ (mempty & responses .~ HMSIns.fromList respCodeWithTypes )
& produces ?~ MimeList ["application/json", "application/xml"]
& parameters .~ (processParams paramNameWithTypes) )
processParams paramList = flip fmap paramList (\(paramName, paramInfo) -> Inline $ mempty & SwaggerLens.name .~ paramName
-- & required ?~ True
& schema .~ paramInfo )
petFindByTagsPath :: (FilePath, PathItem)
petFindByTagsPath = ("/pet/findByTags", mempty
& get ?~ (mempty
& SwaggerLens.tags .~ (Set.singleton "pet")
& responses .~ (mempty & responses .~ (HMSIns.fromList [(200, Inline $ mempty & description .~ "successful operation"
& schema ?~ (Inline $ mempty & (paramSchema .~ (mempty & type_ .~ SwaggerArray
& items ?~ (SwaggerItemsObject (Ref $ Reference "Pet") ) ) ) ) ),
(400, Inline $ mempty & description .~ "Invalid Tag Value" )]) )
& summary ?~ "Finds Pets by tags"
& description ?~ "Muliple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing."
& operationId ?~ "findPetsByTags"
& produces ?~ MimeList ["application/json", "application/xml"]
& parameters .~ [ Inline $ mempty
& SwaggerLens.name .~ "tags"
& description ?~ "Tags to filter by"
& required ?~ True
& schema .~ (ParamOther (ParamOtherSchema ParamQuery Nothing (mempty & type_ .~ SwaggerArray
& items ?~ SwaggerItemsPrimitive (Just CollectionMulti) (mempty & type_ .~ SwaggerString) ) ) ) ] ) )
declTestSwagger :: Declare (Definitions Schema) Swagger
declTestSwagger = do
userResp <- declareResponse (Proxy :: Proxy User)
stsResponse <- declareResponse (Proxy :: Proxy Status)
-- exRoute <- declareResponse (Proxy :: Proxy UserR)
pure $ mempty
userSwagger :: Swagger
userSwagger =
let (defs, spec) = runDeclare declTestSwagger mempty
in spec { _swaggerDefinitions = defs }
getRecordNamesForQueryParam :: (ToSchema a) => Proxy a -> [(Text, SwaggerType 'Data.Swagger.Internal.SwaggerKindParamOtherSchema)]
getRecordNamesForQueryParam inputProxy = do
let (defs, _) = runDeclare (declareResponse inputProxy) mempty
case HMSIns.toList defs of
(dataName, dataSchema):[] -> do
let schemaPropertyList = HMSIns.toList $ _schemaProperties dataSchema
let (recordNames, refSchemaList) = Prelude.unzip schemaPropertyList
let typesList = fmap (\(Inline innerSchema) -> (convertToParamOtherSchema . _paramSchemaType . _schemaParamSchema) innerSchema ) refSchemaList
Prelude.zip recordNames typesList
_ -> error "Expecting only one element in the definitions list"
where
convertToParamOtherSchema :: SwaggerType t -> SwaggerType 'Data.Swagger.Internal.SwaggerKindParamOtherSchema
convertToParamOtherSchema inputSwaggerType =
case inputSwaggerType of
SwaggerString -> SwaggerString
SwaggerNumber -> SwaggerNumber
SwaggerInteger -> SwaggerInteger
SwaggerBoolean -> SwaggerBoolean
| byteally/webapi | webapi-swagger/src/SwaggerJSONGen.hs | bsd-3-clause | 17,180 | 10 | 39 | 5,340 | 3,732 | 1,985 | 1,747 | 249 | 18 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
#if __GLASGOW_HASKELL__ >= 706
{-# LANGUAGE PolyKinds #-}
#endif
{-|
"Data.Implicit" provides both named and unnamed implicit parameters that
support default values (given by the 'Default' class from the @data-default@
package). It makes no use of the @ImplicitParams@ extension and instead
everything is done using type classes.
Here is an example of unnamed implicit parameters:
@
{\-\# LANGUAGE FlexibleContexts #-\}
import "Data.Implicit"
putParam :: 'Implicit_' String => IO ()
putParam = putStrLn $ \"Param was: \" ++ show ('param_' :: String)
@
We define @putParam@, which is a simple function which takes an implicit
parameter of type @String@, and prints it to the screen. The 'param_' function
is used to retrieve the unnamed implicit parameter of type @String@ from
@putParam@'s context. The type signature is necessary to force 'param_' to
return a @String@, as this cannot be inferred due to the polymorphism of
@show@.
>>> putParam
Param was ""
This is how we call @putParam@ without specifying its implicit parameters. If
an implicit parameter is left unspecified, its value is defaulted to 'def',
assuming that its type has a 'Default' instance. If not, then it is a type
error not to specify the value of an implicit parameter.
>>> putParam $~ "hello, world"
Param was "hello, world"
The operator '$~' takes a function @f@ and a value to which to set the
homotypic implicit parameter on @f@. It applies the implicit parameter to @f@
and returns the result. There is also a prefix version of @$~@ whose arguments
are flipped called 'setParam_'.
Here is an example of named implicit parameters:
@
{\-\# LANGUAGE DataKinds, FlexibleContexts, RankNTypes #-\}
import "Data.Implicit"
import "Data.Proxy"
foo :: Proxy \"foo\"
foo = Proxy
bar :: Proxy \"bar\"
bar = Proxy
putFooBar :: ('Implicit' \"foo\" String, 'Implicit' \"bar\" String) => IO ()
putFooBar = do
putStrLn $ \"foo was: \" ++ show (param foo :: String)
putStrLn $ \"bar was: \" ++ show (param bar :: String)
@
The 'Implicit' constraint is the named equivalent of 'Implicit_'. It takes an
additional argument @s@ to specify the name of the implicit parameter.
Implicit parameters can be \"named\" using any type (of any kind, on compilers
that support the @PolyKinds@ extension). (The above code uses type-level
strings of the @Symbol@ kind from the "GHC.TypeLits" module, which is the
recommended way to name implicit parameters. However, @Symbol@ requires the
@DataKinds@ extension and at least version 7.8 of GHC (7.6 is broken; see GHC
bug \#7502), so you are free to use other types of other kinds if you want to
support older versions of GHC.) 'param' and 'setParam' work like their unnamed
counterparts 'param_' and 'setParam_', but they also take a proxy argument to
specify the name of the implicit parameter. The code above defines @foo@ and
@bar@ to hide away the (slightly ugly) proxy stuff.
>>> putFooBar
foo was: ""
bar was: ""
Once again, the defaults of unspecified implicit parameters are given by the
'Default' class.
>>> setParam foo "hello, world" putFooBar
foo was: "hello, world"
bar was: ""
>>> setParam bar "goodbye" $ setParam foo "hello, world" putFooBar
foo was: "hello, world"
bar was: "goodbye"
An infix version of @setParam@ is also provided, '~$'. Using @~$@, the above
example would be:
>>> putFooBar ~$ foo ~$ bar $$ "goodbye" $$ "hello, world"
foo was: "hello, world"
bar was: "goodbye
-}
module Data.Implicit
( Implicit
, param
, setParam
, (~$)
, (~..)
, ($$)
, Implicit_
, param_
, setParam_
, ($~)
, (~.)
)
where
import Data.Default.Class (Default, def)
import Unsafe.Coerce (unsafeCoerce)
------------------------------------------------------------------------------
-- | The constraint @'Implicit' \"foo\" String@ on a function @f@ indicates
-- that an implicit parameter named @\"foo\"@ of type @String@ is passed to
-- @f@.
class Implicit s a where
-- | 'param' retrieves the implicit parameter named @s@ of type @a@ from
-- the context @'Implicit' s a@. The name @s@ is specified by a proxy
-- argument passed to @param@.
param :: proxy s -> a
------------------------------------------------------------------------------
instance Default a => Implicit s a where
param _ = def
------------------------------------------------------------------------------
newtype Param s a b = Param (Implicit s a => b)
------------------------------------------------------------------------------
-- | 'setParam' supplies a value for an implicit parameter named @s@ to a
-- function which takes a homotypic and homonymous implicit parameter. The
-- name @s@ is specified by a proxy argument passed to @setParam@.
setParam :: forall a b proxy s. proxy s -> a -> (Implicit s a => b) -> b
setParam _ a f = unsafeCoerce (Param f :: Param s a b) (const a)
{-# INLINE setParam #-}
------------------------------------------------------------------------------
-- | An infix version of 'setParam' with flipped arguments.
(~$) :: forall a b proxy s. (Implicit s a => b) -> proxy s -> a -> b
infixl 1 ~$
(~$) f _ a = unsafeCoerce (Param f :: Param s a b) (const a)
{-# INLINE (~$) #-}
------------------------------------------------------------------------------
-- | Modify a named implicit parameter.
(~..) :: Implicit s a => (Implicit s b => c) -> proxy s -> (a -> b) -> c
(~..) f proxy g = f ~$ proxy $$ g (param proxy)
infixl 8 ~..
{-# INLINE (~..) #-}
------------------------------------------------------------------------------
-- | A left-associated version of '$'.
($$) :: (a -> b) -> a -> b
infixl 0 $$
($$) = id
{-# INLINE ($$) #-}
------------------------------------------------------------------------------
-- | The constraint @'Implicit_' String@ on a function @f@ indicates that an
-- unnamed implicit parameter of type @String@ is passed to @f@.
class Implicit_ a where
-- | 'param_' retrieves the unnamed implicit parameter of type @a@ from
-- the context @'Implicit_' a@.
param_ :: a
------------------------------------------------------------------------------
instance Default a => Implicit_ a where
param_ = def
------------------------------------------------------------------------------
newtype Param_ a b = Param_ (Implicit_ a => b)
------------------------------------------------------------------------------
-- | 'setParam_' supplies a value for an unnamed implicit parameter to a
-- function which takes a homotypic implicit parameter.
setParam_ :: forall a b. a -> (Implicit_ a => b) -> b
setParam_ a f = unsafeCoerce (Param_ f :: Param_ a b) a
{-# INLINE setParam_ #-}
------------------------------------------------------------------------------
-- | An infix version of 'setParam_' with flipped arguments.
($~) :: forall a b. (Implicit_ a => b) -> a -> b
infixl 1 $~
f $~ a = unsafeCoerce (Param_ f :: Param_ a b) a
{-# INLINE ($~) #-}
------------------------------------------------------------------------------
-- | Modify an unnamed implicit parameter.
(~.) :: Implicit_ a => (Implicit_ b => c) -> (a -> b) -> c
f ~. g = f $~ g param_
infixl 8 ~.
{-# INLINE (~.) #-}
| duairc/implicit-params | src/Data/Implicit.hs | bsd-3-clause | 7,406 | 0 | 11 | 1,281 | 724 | 421 | 303 | 57 | 1 |
module Arhelk.Russian.Lemma.Data(
RawWord
, SemiProcWord(..)
, Word(..)
, Lemma(..)
, SentenceClause
, Sentence
, lemmaWord
, MoreSpecialized(..)
, glueLessSpecialized
, module X
) where
import Arhelk.Russian.Lemma.Data.Adjective as X
import Arhelk.Russian.Lemma.Data.Adverb as X
import Arhelk.Russian.Lemma.Data.Common as X
import Arhelk.Russian.Lemma.Data.Particle as X
import Arhelk.Russian.Lemma.Data.Substantive as X
import Arhelk.Russian.Lemma.Data.Verb as X
import Data.Monoid
import Data.Text as T
import Data.Type.Equality
import Prelude as P hiding (Word)
import qualified Data.Foldable as F
import TextShow
-- | Raw word, unproccessed yet
type RawWord = Text
-- | Shortcut for semiprocessed word
newtype SemiProcWord = SemiProcWord { unSemiProcWord :: Either RawWord Word }
deriving (Eq, Show)
instance TextShow SemiProcWord where
showb (SemiProcWord (Left t)) = fromText t <> " (?)"
showb (SemiProcWord (Right w)) = showb w
-- | Parsed word with detached prefixes, postfixes and determinied speach part
data Word =
-- | Lemma from single word
OneWord {
-- | Initial full word
wordSource :: Text
-- | Semanitc root
, wordRoot :: Text
-- | Semantic prefixes
, wordPrefixes :: [Text]
-- | Semantic postfixes
, wordPostfixes :: [Text]
-- | Semantic ending
, wordEndings :: Text
}
-- | Lemma from many words (ex. particles)
| MultiWord {
-- | Initial full word
wordSources :: [Text]
}
deriving (Eq, Show)
-- | Total function to get original string of word
getWordSource :: Word -> Text
getWordSource OneWord{..} = wordSource
getWordSource MultiWord{..} = T.unwords wordSources
instance TextShow Word where
showb w = case w of
OneWord{..} -> fromText wordSource
MultiWord{..} -> fromText (T.unwords wordSources)
-- | Lemma is a single semantic unit (list or words) that
-- correspond to speach part
data Lemma a =
UnknownWord a -- ^ Unknown part of speach
| Substantive a SubstantiveProperties -- ^ Существительное
| Adjective a AdjectiveProperties -- ^ Прилагательное
| Numeral a -- ^ Числительное
| Pronoun a -- ^ Местоимение
| Verb a VerbProperties -- ^ Глагол
| Adverb a AdverbProperties -- ^ Наречие
| Preposition a -- ^ Предлог
| Conjunction a -- ^ Союз
| GrammarParticle a ParticleProperties -- ^ Частица
| Interjection a -- ^ Междуметие
| Participle a -- ^ Причастие
| Transgressive a -- ^ Деепричастие
deriving (Eq, Show)
-- | Sentence clause, region enclosed with commas
type SentenceClause a = [Lemma a]
-- | Any sentence is list of sentence clauses
type Sentence a = [SentenceClause a]
instance {-# OVERLAPPABLE#-} (TextShow a, (a == Text) ~ False)
=> TextShow (Lemma a) where
showb p = case p of
UnknownWord a -> showb a
Substantive a p -> showb a <> " (сущ., " <> showb p <> ")"
Adjective a p -> showb a <> " (прил.," <> showb p <> ")"
Numeral a -> showb a <> " (числ.)"
Pronoun a -> showb a <> " (мест.)"
Verb a p -> showb a <> " (гл., " <> showb p <> ")"
Adverb a p -> showb a <> " (нар., " <> showb p <> ")"
Preposition a -> showb a <> " (предл.)"
Conjunction a -> showb a <> " (союз)"
GrammarParticle a p -> showb a <> " (част., " <> showb p <> ")"
Interjection a -> showb a <> " (межд.)"
Participle a -> showb a <> " (прич.)"
Transgressive a -> showb a <> " (деепр.)"
instance TextShow (Lemma Text) where
showb p = case p of
UnknownWord a -> fromText a
Substantive a p -> fromText a <> " (сущ., " <> showb p <> ")"
Adjective a p -> fromText a <> " (прил.," <> showb p <> ")"
Numeral a -> fromText a <> " (числ.)"
Pronoun a -> fromText a <> " (мест.)"
Verb a p -> fromText a <> " (гл., " <> showb p <> ")"
Adverb a p -> fromText a <> " (нар., " <> showb p <> ")"
Preposition a -> fromText a <> " (предл.)"
Conjunction a -> fromText a <> " (союз)"
GrammarParticle a p -> fromText a <> " (част., " <> showb p <> ")"
Interjection a -> fromText a <> " (межд.)"
Participle a -> fromText a <> " (прич.)"
Transgressive a -> fromText a <> " (деепр.)"
instance Functor Lemma where
fmap f l = case l of
UnknownWord a -> UnknownWord (f a)
Substantive a p -> Substantive (f a) p
Adjective a p -> Adjective (f a) p
Numeral a -> Numeral (f a)
Pronoun a -> Pronoun (f a)
Verb a p -> Verb (f a) p
Adverb a p -> Adverb (f a) p
Preposition a -> Preposition (f a)
Conjunction a -> Conjunction (f a)
GrammarParticle a p -> GrammarParticle (f a) p
Interjection a -> Interjection (f a)
Participle a -> Participle (f a)
Transgressive a -> Transgressive (f a)
-- | Extract lemma content
lemmaWord :: Lemma a -> a
lemmaWord l = case l of
UnknownWord a -> a
Substantive a _ -> a
Adjective a _ -> a
Numeral a -> a
Pronoun a -> a
Verb a _ -> a
Adverb a _ -> a
Preposition a -> a
Conjunction a -> a
GrammarParticle a _ -> a
Interjection a -> a
Participle a -> a
Transgressive a -> a
class Eq a => MoreSpecialized a where
-- | Check if first value is same as value but provides more info
moreSpecialized :: a -> a -> Bool
instance MoreSpecialized SemiProcWord where
moreSpecialized (SemiProcWord (Right a)) (SemiProcWord (Left b)) = getWordSource a == b
moreSpecialized a b = a == b
instance MoreSpecialized a => MoreSpecialized (Lemma a) where
moreSpecialized a (UnknownWord b) = moreSpecialized (lemmaWord a) b
moreSpecialized a b = a == b
instance MoreSpecialized a => MoreSpecialized [a] where
moreSpecialized a b
| P.length a /= P.length b = False
| otherwise = and $ uncurry moreSpecialized <$> a `P.zip` b
glueLessSpecialized :: MoreSpecialized a => [a] -> [a]
glueLessSpecialized = P.reverse . F.foldl' go []
where
go !acc a = if or $ fmap (`moreSpecialized` a) acc
then acc
else a : P.filter (not . (a `moreSpecialized`)) acc
| Teaspot-Studio/arhelk-russian | src/Arhelk/Russian/Lemma/Data.hs | bsd-3-clause | 6,131 | 0 | 12 | 1,401 | 1,942 | 997 | 945 | -1 | -1 |
-- module Main where
module Network.LibRSync where
import Control.Applicative((<$>),(<*>),pure)
import Control.Monad
import Control.Monad.IO.Class
import Data.ByteString
import Data.Conduit
import Foreign.Marshal.Alloc
import Network.LibRSync.Internal
--------------------------------------------------------------------------------
signature :: MonadResource m => FilePath -> Source m Signature
signature path = bracketP (initSignature path) finalizeSignature signatureSource
initSignature :: FilePath -> IO RSyncSignatureState
initSignature path = do
state <- malloc :: IO RSyncSignatureState
rsres <- cInitSignature path state
return state
-- TODO: check what to do with rsres: if RsError we should throw an error or so
-- case rsres of
-- RsDone ->
finalizeSignature :: RSyncSignatureState -> IO ()
finalizeSignature state = cFinalizeSignature state >> free state
signatureSource :: MonadResource m => RSyncSignatureState -> Source m Signature
signatureSource state = liftIO (status state) >>= \s -> case s of
RsBlocked -> do
liftIO $ cSignatureChunk state True
buf <- liftIO $ outputBuf state
-- TODO: verify that we really execute this.
chunk <- liftIO $ getData buf
yield chunk
signatureSource state
RsDone -> return ()
err -> error "error!"
--------------------------------------------------------------------------------
delta :: FilePath -> Signature -> IO (Maybe Delta)
delta = undefined
--------------------------------------------------------------------------------
patch :: MonadResource m => FilePath -> FilePath -> Sink Delta m ()
patch inPath outPath = bracketP (initPatch inPath outPath) finalizePatch patchSink
initPatch :: FilePath -> FilePath -> IO RSyncPatchState
initPatch inPath outPath = do
state <- malloc :: IO RSyncPatchState
rsres <- cInitPatch inPath outPath state
-- TODO: checks on rsres
return state
finalizePatch :: RSyncPatchState -> IO ()
finalizePatch state = cFinalizePatch state >> free state
patchSink :: MonadResource m => RSyncPatchState -> Sink Delta m ()
patchSink state = await >>= \mdelta -> case mdelta of
Nothing -> patchSink' empty True
Just delta -> patchSink' delta False >> patchSink state
where
patchSink' delta eof = liftIO . useAsCInMemoryBuffer delta $ \deltaBuf ->
updateDeltaState state deltaBuf eof >>
cPatchChunk state
| noinia/hlibrsync | src/Network/LibRSync.hs | bsd-3-clause | 2,815 | 0 | 14 | 835 | 615 | 304 | 311 | 45 | 3 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude, UnicodeSyntax, LambdaCase #-}
module Help.UI.WebSearch where
import Help.Imports hiding (find)
import qualified Prelude as P
import Help.Settings
import Yesod hiding (count)
import Yesod.Form.Jquery
import Network.Wai.Handler.Warp (run)
import Data.Text (init, tail)
import Data.Text.Read (decimal)
import Control.Lens.Getter ((^.))
import Database.MongoDB as M
-- |Start a web application to query the database and return log entries
webSearch ∷ Settings → IO ()
webSearch s = run 3000 =<< (toWaiApp $ Minimal s)
data Minimal = Minimal { settings ∷ Settings }
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage Minimal FormMessage where
renderMessage _ _ = defaultFormMessage
-- mkYesodData "Minimal" [parseRoutes|
-- / QueryR GET
-- |]
-- mkYesodDispatch "Minimal" resourcesApp
instance YesodJquery Minimal
mkYesod "Minimal" [parseRoutes|
/ QueryR GET
|]
instance Yesod Minimal where
defaultLayout widget = do
master <- getYesod
pc <- widgetToPageContent $ do
addScriptEither $ urlJqueryJs master
addStylesheetRemote "http://netdna.bootstrapcdn.com/twitter-bootstrap/2.3.2/css/bootstrap-combined.min.css"
addScriptRemote "http://netdna.bootstrapcdn.com/twitter-bootstrap/2.3.2/js/bootstrap.min.js"
widget
giveUrlRenderer [hamlet|
$doctype 5
<html lang="en">
<head>
<meta charset="utf-8">
$# 良からぬ狼
<title>#{pageTitle pc}
<meta name="description" content="HELP -- Haskell Enterprise Logging Platform">
<meta name="author" content="Elliot Robinson">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
^{pageHead pc}
<body>
<div .container style="width=80%">
^{pageBody pc}
|]
--getBootstrapR ∷ Handler Html
--getBootstrapR = defaultLayout $ do
-- toWidget $(luciusFile "Help/UI/WebSearch/bootstrap.lucius")
-- toWidget $(juliusFile "Help/UI/WebSearch/bootstrap.julius")
data QueryType = WithSearch { _t ∷ Text, skipRecs ∷ Word32 }
| WithoutSearch { skipRecs ∷ Word32 }
getQueryR ∷ Handler Html
getQueryR = do
v1 ← lookupGetParam "query"
v2 ← (maybe (Right (0 ∷ Word32,"")) decimal) <$> (lookupGetParam "skip")
let input = case (v1, v2) of
(Just q, s) → WithSearch q $ either (const 0) (fromIntegral . fst) s
(_, s) → WithoutSearch $ either (const 0) (fromIntegral . fst) s
s ← settings <$> getYesod
pipe ← liftIO $ runIOE $ connect (host $ s^.mongoHost)
eResults ← liftIO $ access pipe slaveOk (s^.database) $ do
curs ← case input of
(WithSearch query skipRec) → find $ (select ["$or" :=
(M.Array [ Doc ["message" =: Regex query ""]
, Doc ["status" =: Regex query ""]])] "default") {M.sort = ["$natural" := Int32 (-1)], limit = 10, skip = skipRec}
(WithoutSearch skipRec) → find $ (select [] "default") {skip = skipRec, limit = 10, M.sort = ["$natural" := Int32 (-1)]}
results <- rest curs
closeCursor curs
return $ map (exclude ["_id"]) results
liftIO $ close pipe
results ← case eResults of
(Right r) → return r
(Left f) → invalidArgs [show f]
defaultLayout $ do
setTitle "Query"
[whamlet|
<header .clearfix>
<h3 .pull-right .muted>
Haskell Enterprise Logging Platform
<h3 .pull-left>HELP
<hr>
<div #form>
<form method=get action=@{QueryR}#form .form-search>
<input type=hidden name=skip value=0>
<input type=search name=query value=#{fromMaybe "" v1} autofocus . input-medium .search-query>
<button type=submit .btn>
Search
<div #results>
<table .table .table-striped>
<tbody>
$forall document <- results
<tr>
$forall field <- document
<td>
$# <span rel="tooltip" data-toggle="tooltip" title=#{retLabel field}>
<small .muted>
#{retLabel field}
<br>
#{retValue field}
<div #nav>
$if skipRecs input >= 10
<div .pull-left>
<form method=get action=@{QueryR}#form >
<input type=hidden name=skip value=#{P.show $ skipRecs input - 10}>
<input type=hidden name=query value=#{fromMaybe "" v1} autofocus>
<input type=submit .textButton value="Prev 10">
<div .pull-right>
<form method=get action=@{QueryR}#form >
<input type=hidden name=skip value=#{P.show $ skipRecs input + 10}>
<input type=hidden name=query value=#{fromMaybe "" v1} autofocus>
<input type=submit .textButton value="Next 10">
|]
toWidget $ [julius|
$(document).ready(function () {
$("[rel=tooltip]").tooltip();
});
|]
toWidget $ [cassius|
.textButton
border: none;
background-color: transparent;
padding: 0;
text-decoration: underline; /* if desired */
color: #00c; /* or whatever other color you want */
|]
retValue ∷ M.Field → Text
retValue = tail . init . show . value
retLabel ∷ M.Field → Text
retLabel = label
| argiopetech/help | Help/UI/WebSearch.hs | bsd-3-clause | 5,827 | 0 | 28 | 1,675 | 906 | 489 | 417 | 66 | 4 |
-- Takes a reversed log file on the standard input and outputs web page.
module Distribution.Server.Pages.Recent (
recentPage,
recentFeed,
revisionsPage,
recentRevisionsFeed
) where
import Distribution.Server.Packages.Types
import qualified Distribution.Server.Users.Users as Users
import Distribution.Server.Users.Users (Users)
import Distribution.Server.Pages.Template
( hackagePageWithHead )
import Distribution.Package
( PackageIdentifier, packageName, packageVersion )
import Distribution.PackageDescription
( GenericPackageDescription(packageDescription)
, PackageDescription(synopsis) )
import Distribution.Text
( display )
import qualified Text.XHtml.Strict as XHtml
import Text.XHtml
( Html, URL, (<<), (!) )
import qualified Text.RSS as RSS
import Text.RSS
( RSS(RSS) )
import Network.URI
( URI(..), uriToString )
import Data.Time.Clock
( UTCTime, addUTCTime )
import Data.Time.Format
( formatTime )
import Data.Time.Locale.Compat
( defaultTimeLocale )
import Data.Maybe
( listToMaybe)
-- | Takes a list of package info, in reverse order by timestamp.
--
recentPage :: Users -> [PkgInfo] -> Html
recentPage users pkgs =
let log_rows = map (makeRow users) (take 20 pkgs)
docBody = [XHtml.h2 << "Recent additions",
XHtml.table ! [XHtml.align "center"] << log_rows,
XHtml.anchor ! [XHtml.href recentRevisionsURL] << XHtml.toHtml "Recent revisions"]
rss_link = XHtml.thelink ! [XHtml.rel "alternate",
XHtml.thetype "application/rss+xml",
XHtml.title "Hackage RSS Feed",
XHtml.href rssFeedURL] << XHtml.noHtml
in hackagePageWithHead [rss_link] "recent additions" docBody
revisionsPage :: Users -> [PkgInfo] -> Html
revisionsPage users pkgs =
let log_rows = map (makeRevisionRow users) (take 40 pkgs)
docBody = [XHtml.h2 << "Recent cabal metadata revisions",
XHtml.table ! [XHtml.align "center"] << log_rows]
rss_link = XHtml.thelink ! [XHtml.rel "alternate",
XHtml.thetype "application/rss+xml",
XHtml.title "Hackage Revisions RSS Feed",
XHtml.href revisionsRssFeedURL] << XHtml.noHtml
in hackagePageWithHead [rss_link] "recent revisions" docBody
makeRow :: Users -> PkgInfo -> Html
makeRow users pkginfo =
XHtml.tr <<
[XHtml.td ! [XHtml.align "right"] <<
[XHtml.toHtml (showTime time), nbsp, nbsp],
XHtml.td ! [XHtml.align "left"] << display user,
XHtml.td ! [XHtml.align "left"] <<
[nbsp, nbsp, XHtml.anchor !
[XHtml.href (packageURL pkgid)] << display pkgid]]
where
nbsp = XHtml.primHtmlChar "nbsp"
user = Users.userIdToName users userId
(time, userId) = pkgOriginalUploadInfo pkginfo
pkgid = pkgInfoId pkginfo
makeRevisionRow :: Users -> PkgInfo -> Html
makeRevisionRow users pkginfo =
XHtml.tr <<
[XHtml.td ! [XHtml.align "right"] <<
[XHtml.toHtml (showTime time), nbsp, nbsp],
XHtml.td ! [XHtml.align "left"] << [XHtml.toHtml ("-r" ++ show (pkgNumRevisions pkginfo - 1)), nbsp, nbsp],
XHtml.td ! [XHtml.align "left"] << display user,
XHtml.td ! [XHtml.align "left"] <<
[nbsp, nbsp, XHtml.anchor !
[XHtml.href (packageURL pkgid ++ "/revisions")] << display pkgid]]
where
nbsp = XHtml.primHtmlChar "nbsp"
user = Users.userIdToName users userId
(time, userId) = pkgLatestUploadInfo pkginfo
pkgid = pkgInfoId pkginfo
showTime :: UTCTime -> String
showTime = formatTime defaultTimeLocale "%c"
-- | URL describing a package.
packageURL :: PackageIdentifier -> URL
packageURL pkgid = "/package/" ++ display pkgid
rssFeedURL :: URL
rssFeedURL = "/recent.rss"
recentAdditionsURL :: URL
recentAdditionsURL = "/recent.html"
revisionsRssFeedURL :: URL
revisionsRssFeedURL = "/packages/recent/revisions.rss"
recentRevisionsURL :: URL
recentRevisionsURL = "/packages/recent/revisions.html"
recentFeed :: Users -> URI -> UTCTime -> [PkgInfo] -> RSS
recentFeed users hostURI now pkgs = RSS
"Recent additions"
(hostURI { uriPath = recentAdditionsURL})
desc
(channel updated)
(map (releaseItem users hostURI) pkgList)
where
desc = "The 20 most recent additions to Hackage (or last 48 hours worth, whichever is greater), the Haskell package database."
twoDaysAgo = addUTCTime (negate $ 60 * 60 * 48) now
pkgListTwoDays = takeWhile (\p -> pkgLatestUploadTime p > twoDaysAgo) pkgs
pkgList = if (length pkgListTwoDays > 20) then pkgListTwoDays else take 20 pkgs
updated = maybe now (fst . pkgOriginalUploadInfo) (listToMaybe pkgList)
recentRevisionsFeed :: Users -> URI -> UTCTime -> [PkgInfo] -> RSS
recentRevisionsFeed users hostURI now pkgs = RSS
"Recent revisions"
(hostURI { uriPath = recentRevisionsURL})
desc
(channel updated)
(map (revisionItem users hostURI) pkgList)
where
desc = "The 40 most recent revisions to cabal metadata in Hackage (or last 48 hours worth, whichever is greater), the Haskell package database."
twoDaysAgo = addUTCTime (negate $ 60 * 60 * 48) now
pkgListTwoDays = takeWhile (\p -> pkgLatestUploadTime p > twoDaysAgo) pkgs
pkgList = if (length pkgListTwoDays > 40) then pkgListTwoDays else take 40 pkgs
updated = maybe now (fst . pkgOriginalUploadInfo) (listToMaybe pkgList)
channel :: UTCTime -> [RSS.ChannelElem]
channel updated =
[ RSS.Language "en"
, RSS.ManagingEditor email
, RSS.WebMaster email
, RSS.ChannelPubDate updated
, RSS.LastBuildDate updated
, RSS.Generator "rss-feed"
]
where
email = "[email protected]" --TODO: make this configurable
releaseItem :: Users -> URI -> PkgInfo -> [RSS.ItemElem]
releaseItem users hostURI pkgInfo =
[ RSS.Title title
, RSS.Link uri
, RSS.Guid True (uriToString id uri "")
, RSS.PubDate time
, RSS.Description desc
]
where
uri = hostURI { uriPath = packageURL pkgId }
title = display (packageName pkgId) ++ " " ++ display (packageVersion pkgId)
body = synopsis (packageDescription (pkgDesc pkgInfo))
desc = "<i>Added by " ++ display user ++ ", " ++ showTime time ++ ".</i>"
++ if null body then "" else "<p>" ++ body
user = Users.userIdToName users userId
(time, userId) = pkgOriginalUploadInfo pkgInfo
pkgId = pkgInfoId pkgInfo
revisionItem :: Users -> URI -> PkgInfo -> [RSS.ItemElem]
revisionItem users hostURI pkgInfo =
[ RSS.Title title
, RSS.Link uri
, RSS.Guid True (uriToString id guid "")
, RSS.PubDate time
, RSS.Description desc
]
where
uri = hostURI { uriPath = packageURL pkgId ++ "/revisions"}
guid = hostURI { uriPath = packageURL pkgId ++ "/revision/" ++ show revision }
title = display (packageName pkgId) ++ " " ++ display (packageVersion pkgId)
body = "Revision #" ++ show revision
desc = "<i>Revised by " ++ display user ++ ", " ++ showTime time ++ ".</i>"
++ if null body then "" else "<p>" ++ body
user = Users.userIdToName users userId
revision = pkgNumRevisions pkgInfo - 1
(time, userId) = pkgLatestUploadInfo pkgInfo
pkgId = pkgInfoId pkgInfo
| edsko/hackage-server | Distribution/Server/Pages/Recent.hs | bsd-3-clause | 7,363 | 0 | 15 | 1,715 | 2,065 | 1,108 | 957 | 158 | 2 |
{-# LANGUAGE UnicodeSyntax #-}
import Test.QuickCheck
-- import Test.Framework
-- import Test.Framework.Providers.HUnit
import Control.Monad
import Data.Monoid
import AXT.TicTacToe.Field as FA(findFreePos, getFreePos)
import AXT.TicTacToe.Rules (isEnd)
import AXT.TicTacToe.Types as GT (CoorOnField, Field(F), State(..), StepResult(..), toCoorOnField)
import Prelude.Unicode
import TestData as TD (fields)
import Test.HUnit
import Test.Hspec (hspec)
import qualified Tests.TicTacToe.Actions as SGA (spec)
-- import Utils
-- prop_reverseReverse :: [Int] -> Bool
-- prop_reverseReverse xs = reverse (reverse xs) == xs
import Helpers (mapTests)
isEndTest = mapTests " isEnd " $ [ (XWIN , isEnd (toCoorOnField 2 0) O $ F ["XO ", "XO ", " "]),
(GA , isEnd (toCoorOnField 1 2) O $ F ["XOX", " ", " "]),
(WARNING1 , isEnd (toCoorOnField 0 2) O $ F ["XOX", " ", " "]),
(ERROR1 , isEnd (toCoorOnField 0 0) X $ F ["XOO", " ", " "]),
(ERROR1 , isEnd (toCoorOnField 0 0) X $ F ["XXX", " ", " "]),
(ERROR1 , isEnd (toCoorOnField 0 2) X $ F ["XXX", " ", " "])]
testGetFreePos = let
gf1 = getFreePos (head fields)
rlgf1 = [toCoorOnField 1 0, toCoorOnField 1 1, toCoorOnField 1 2, toCoorOnField 2 0, toCoorOnField 2 1, toCoorOnField 2 2]
in [TestCase $ assertEqual " Test 1 getFreePos" rlgf1 gf1]
main :: IO Counts
main = runTestTT . TestList $ isEndTest ++ testGetFreePos ++ SGA.spec
| xruzzz/axt-tic-tac-toe-gl-haskell | test/Spec.hs | bsd-3-clause | 1,662 | 0 | 11 | 511 | 501 | 287 | 214 | 25 | 1 |
module SymbolicDifferentiation.AlphaSyntax where
-- | Variable in an expression.
type Var = String
-- | Expression.
data Exp
= N Int -- ^ number
| V Var -- ^ variable
| Plus Exp Exp -- ^ sum
| Times Exp Exp -- ^ product
deriving (Show, Eq)
-- | Derivative of expression with respect to a variable.
deriv :: Exp -> Var -> Exp
deriv (N _) _ = N 0
deriv (V v') v = N (if v' == v then 1 else 0)
deriv (Plus e1 e2) v = plus (deriv e1 v) (deriv e2 v)
deriv (Times e1 e2) v = plus (times e1 (deriv e2 v))
(times (deriv e1 v) e2)
-- | Smart constructor that simplifies while combining subexpressions.
plus :: Exp -> Exp -> Exp
plus (N 0) e = e
plus e (N 0) = e
plus (N n1) (N n2) = N (n1 + n2)
plus e1 e2 = Plus e1 e2
-- | Smart constructor that simplifies while combining subexpressions.
times :: Exp -> Exp -> Exp
times (N 0) _ = N 0
times _ (N 0) = N 0
times (N 1) e = e
times e (N 1) = e
times (N n1) (N n2) = N (n1 * n2)
times e1 e2 = Times e1 e2
| FranklinChen/twenty-four-days2015-of-hackage | src/SymbolicDifferentiation/AlphaSyntax.hs | bsd-3-clause | 1,077 | 0 | 9 | 352 | 442 | 229 | 213 | 26 | 2 |
module Roguelike.WorldMap ( MapObject(..)
, point, object
, WorldMap
, iptraverse
, byPoint
, nearbyObjects
, fromList
) where
import Control.Applicative
import Control.Monad
import Control.Arrow
import Data.Foldable
import Data.List
import Data.Map (Map)
import qualified Data.Map as M
import GHC.Generics (Generic)
import Data.Aeson
import Control.Lens
import Roguelike.Types
data MapObject a = MapObject { _point :: Point
, _object :: a
}
deriveJSON defaultOptions { fieldLabelModifier = tail } ''MapObject
makeLenses ''MapObject
-- Most common world map operations would be:
-- 1) Search by ID
-- 2) Search all entities near a Point
-- TODO: More effectiveness!
-- Add a simple Point index
data WorldMap a = WorldMap { _objects :: Map id (MapObject a)
}
makeLenses ''WorldMap
-- Do not expose indices here, only plain data!
instance (FromJSON id, FromJSON a) => FromJSON (WorldMap id a) where
parseJSON = liftM fromList . parseJSON
instance (ToJSON id, ToJSON a) => ToJSON (WorldMap id a) where
toJSON = toJSON . itoList
instance Functor (WorldMap id) where
fmap = imap . const
instance FunctorWithIndex id (WorldMap id) where
instance Foldable (WorldMap id) where
foldMap = ifoldMap . const
instance FoldableWithIndex id (WorldMap id) where
instance Traversable (WorldMap id) where
traverse = itraverse . const
instance TraversableWithIndex id (WorldMap id) where
itraverse = objects . itraverse . _2
type instance Index (WorldMap id) = id
type instance IxValue (WorldMap k a) = MapObject a
instance Ixed (WorldMap k a) where
ix k = objects . ix k
instance At (IndexedSet k a) where
at k = objects . at k
iptraverse :: Traversal (WorldMap id a) (WorldMap id b) (MapObject a) (MapObject b)
iptraverse = objects . itraverse
filterPoint :: (Point -> Bool) -> WorldMap id a -> [(id, a)]
-- Slow!
filterPoint f = filter (p . _point) . M.toList . _objects
byPoint :: Point -> WorldMap id a -> [(id, a)]
byPoint p = filterPoint (== p)
nearbyObjects :: Point -> Radius -> WorldMap id a -> [(id, a)]
nearbyObjects p r = filterPoint (near r p)
fromList :: [(id, MapObject a)] -> WorldMap id a
fromList = WorldMap . M.fromList
| abbradar/roguelike | src/Roguelike/WorldMap.hs | bsd-3-clause | 2,434 | 0 | 11 | 661 | 750 | 406 | 344 | -1 | -1 |
module Language.SPL.Printer
( module Text.PrettyPrint.ANSI.Leijen
, tabstop
, parensized
, block
, keyword
, constant
, annotation
, identifier
, comment
, operator
, dullify
, prettify
) where
import Text.PrettyPrint.ANSI.Leijen
tabstop :: Int
tabstop = 4
parensized :: (Pretty a) => [a] -> Doc
parensized = hang 1 . parens . hcat . punctuate (comma <> softline) . map pretty
--parensized = tupled . map pretty
block :: (Pretty a) => a -> Doc
block b = braces $ line <> indent tabstop (pretty b) <> line
keyword, constant, annotation, identifier, comment, operator :: (Pretty a) => a -> Doc
keyword = yellow . pretty
constant = red . pretty
annotation = green . pretty
identifier = cyan . pretty
comment = blue . pretty
operator = magenta . pretty
prettify :: (Pretty a) => a -> String
prettify = show . pretty
dullify :: (Pretty a) => a -> String
dullify x = (displayS . renderCompact . pretty $ x) ""--FIXME
{-
instance Monoid Doc where
mempty = empty -- empty
mappend = (<>) -- <> or ++
mconcat = hcat -- concat
space = hsep
lines = vsep
-}
| timjs/spl-compiler | src/Language/SPL/Printer.hs | bsd-3-clause | 1,112 | 0 | 9 | 259 | 330 | 191 | 139 | 31 | 1 |
{-# LANGUAGE CPP #-}
module Main where
import GHC.RTS.Events
import GHC.RTS.Events.Analysis.Duplication
import Control.Monad
import Data.Maybe
import System.Environment
import System.IO
import System.Exit
import System.FilePath
import Text.Printf
main = getArgs >>= command
command ["--help"] = do
putStr usage
command ["dup", name] = do
origLog <- readLogOrDie (name <.> "replay")
dupsLog <- readLogOrDie (name <.> "eventlog")
let origEvents = toCapEvents origLog
dupEvents = toCapEvents dupsLog
events = merge origEvents dupEvents
dupSparks = findDuplicatedSparks events
capLost = findLostTime dupSparks events
format capLost (sum (map snd capLost) `div` fromIntegral (length capLost))
command ["merge", name] = do
origLog <- readLogOrDie (name <.> "replay")
dupsLog <- readLogOrDie (name <.> "eventlog")
let origEvents = toCapEvents origLog
dupEvents = toCapEvents dupsLog
events = merge origEvents dupEvents
marked = markDuplicated events
writeEventLogToFile (name <.> "merged") (EventLog (header dupsLog) (Data (map toBlockEvent marked)))
command ["filter", name] = do
log <- readLogOrDie name
let evs = events (dat log)
filtered = filterReplay evs
writeEventLogToFile (dropExtension name <.> "filtered") (EventLog (filterH (header log)) (Data filtered))
command _ = putStr usage >> die "Unrecognized command"
format :: [(Int,Ts)] -> Ts -> IO ()
format tss m = mapM_ (\(c, ts) -> printf "cap %d: %s" c (fmt ts)) tss >> printf "\nMean: %s\n" (fmt m)
where
fmt :: Ts -> String
fmt (Single ts) = printf "%dms\n" (ms ts)
fmt (Dual ts1 ts2) = printf "%d-%dms\n" (ms ts1) (ms ts2)
ms n = n `div` 10^6
toCapEvents :: EventLog -> [(Maybe Int, [Event])]
toCapEvents = groupEvents . events . dat
toBlockEvent :: (Maybe Int, [TsEvent]) -> Event
toBlockEvent (c, es) = Event 0 (EventBlock 0 (fromMaybe (-1) c) (map toEvent es))
where
toEvent :: TsEvent -> Event
toEvent (TsEvent ts ev) = Event (fromIntegral ts) ev
filterH :: Header -> Header
filterH (Header ets) = (Header (go ets))
where
go :: [EventType] -> [EventType]
go [] = []
go (et@(EventType n _ _):ets)
| n <= 59 = et:go ets
| otherwise = []
usage = unlines $ map pad strings
where
align = 4 + (maximum . map (length . fst) $ strings)
pad (x, y) = zipWith const (x ++ repeat ' ') (replicate align ()) ++ y
strings = [ ("ghc-events --help:", "Display this help.")
, ("ghc-events dup <progname>:", "Calculate duplicated work.")
, ("ghc-events merge <progname>:", "Merges the original eventlog with the replayed one with duplicates to provide to ghc-events-analyze.")
, ("ghc-events filter <progname>:", "Removes events used for replay.")
]
readLogOrDie file = do
e <- readEventLogFromFile file
case e of
Left s -> die ("Failed to parse " ++ file ++ ": " ++ s)
Right log -> return log
#if ! MIN_VERSION_base(4,8,0)
die s = do hPutStrLn stderr s; exitWith (ExitFailure 1)
#endif
| hferreiro/ghc-events-replay | Replay.hs | bsd-3-clause | 3,169 | 0 | 14 | 784 | 1,120 | 575 | 545 | 69 | 2 |
-- Copyright (c) 2014, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file. An additional grant of patent rights can
-- be found in the PATENTS file.
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ConstraintKinds #-}
-- | Bucketing requests by 'DataSource'.
--
-- When a request is issued by the client via 'dataFetch', it is placed
-- in the 'RequestStore'. When we are ready to fetch the current batch
-- of requests, the 'contents' operation extracts the fetches, bucketed
-- by 'DataSource'.
--
module Haxl.Core.RequestStore (
BlockedFetches(..), RequestStore,
noRequests, addRequest, contents,
requestsOfType
) where
import Haxl.Core.Types
import Data.Map (Map)
import qualified Data.Map.Strict as Map
import Data.Typeable
import Unsafe.Coerce
-- | A container for multiple 'BlockedFetch' objects.
newtype RequestStore u = RequestStore (Map TypeRep (BlockedFetches u))
-- Since we don't know which data sources we will be using, the store
-- is dynamically-typed. It maps the TypeRep of the request to the
-- 'BlockedFetches' for that 'DataSource'.
-- | A batch of 'BlockedFetch' objects for a single 'DataSource'
data BlockedFetches u =
forall r. (DataSource u r) => BlockedFetches [BlockedFetch r]
-- | A new empty 'RequestStore'.
noRequests :: RequestStore u
noRequests = RequestStore Map.empty
-- | Adds a 'BlockedFetch' to a 'RequestStore'.
addRequest
:: forall u r. (DataSource u r)
=> BlockedFetch r -> RequestStore u -> RequestStore u
addRequest bf (RequestStore m) =
RequestStore $ Map.insertWith combine ty (BlockedFetches [bf]) m
where
combine :: BlockedFetches u -> BlockedFetches u -> BlockedFetches u
combine _ (BlockedFetches bfs)
| typeOf1 (getR bfs) == ty = BlockedFetches (unsafeCoerce bf:bfs)
| otherwise = error "RequestStore.insert"
-- the dynamic type check here should be unnecessary, but if
-- there are bugs in `Typeable` or `Map` then we'll get an
-- error instead of a crash. The overhead is negligible.
-- a type conversion only, so we can get the type of the reqeusts from
-- the list of BlockedFetch.
getR :: [BlockedFetch r1] -> r1 a
getR _ = undefined
-- The TypeRep of requests for this data source
ty :: TypeRep
ty = typeOf1 (undefined :: r a)
-- | Retrieves the whole contents of the 'RequestStore'.
contents :: RequestStore u -> [BlockedFetches u]
contents (RequestStore m) = Map.elems m
-- | Retrieves requests in the 'RequestStore' that have the same type
-- as a given request.
requestsOfType :: forall r a u . (Typeable r, Request r a) => r a -> RequestStore u -> [BlockedFetch r]
requestsOfType _ (RequestStore rs) =
let ty = typeOf1 (undefined :: r a)
in case Map.lookup ty rs of
Just (BlockedFetches result) -> map unsafeCoerce result
Nothing -> []
| kantp/Haxl | Haxl/Core/RequestStore.hs | bsd-3-clause | 2,983 | 0 | 13 | 583 | 547 | 301 | 246 | 39 | 2 |
module Main where
import Control.Lens
import Data.Monoid
import Network.HTTP.Types
import Network.Wai
import Network.Waitra
import qualified Network.Wai.Handler.Warp as Warp
import Network.Wai.Middleware.Cors
import Network.Wai.Middleware.RequestLogger
import System.Environment
import System.Exit
import System.IO
import qualified Authenticate
import qualified Config
import qualified Proxy
warpSettings :: Config.T -> Warp.Settings
warpSettings conf =
Warp.setPort (view Config.serverPort conf)
$ Warp.setHost "127.0.0.1" Warp.defaultSettings
main :: IO ()
main = getArgs >>= \case
[confPath] -> do
conf <- Config.load confPath
newApp conf >>= Warp.runSettings (warpSettings conf)
_ -> do
hPutStrLn stderr "Usage: auth-proxy CONFIG_FILE"
exitFailure
corsPolicy :: Config.T -> Request -> Maybe CorsResourcePolicy
corsPolicy conf =
const . Just
$ CorsResourcePolicy
{ corsOrigins = Just (view Config.serverAllowOrigins conf, True)
, corsMethods =
[ methodGet
, methodPost
, methodHead
, methodPut
, methodDelete
, methodOptions
, methodPatch
]
, corsRequestHeaders = [hContentType]
, corsExposedHeaders = Just [hContentType]
, corsMaxAge = Just 3600
, corsVaryOrigin = True
, corsRequireOrigin = False
, corsIgnoreFailures = False
}
newApp :: Config.T -> IO Application
newApp conf =
logger
. cors (corsPolicy conf)
. waitraMiddleware (routes <> Authenticate.routes conf)
<$> Proxy.newApp conf
where
logger = if view Config.debug conf then logStdoutDev else logStdout
routes :: [Route]
routes =
[ simpleGet "/favicon.ico" $ const ($ responseLBS status404 [] "")
]
| ktvoelker/auth-proxy | src/Main.hs | bsd-3-clause | 1,704 | 0 | 14 | 343 | 467 | 257 | 210 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
module Data.Graph.Libgraph.AlgoDebug where
import Data.Graph.Libgraph.Core
import Data.Graph.Libgraph.Dagify(collapse,remove)
import Prelude hiding (Right)
import Data.List(find)
import Data.Maybe(isJust)
import GHC.Generics
data AssistedMessage = InconclusiveProperty String | PassingProperty String deriving (Eq,Show,Ord,Generic)
data Judgement = Right | Wrong | Unassessed | Assisted [AssistedMessage] deriving (Eq,Show,Ord,Generic)
findFaulty_dag :: (Ord v, Eq a, Show v) => (v -> Judgement) -> Graph v a -> [v]
findFaulty_dag judge g = filter isFaulty (vertices g)
where isFaulty v = (judge v == Wrong)
&& (null $ filter ((/=Right) . judge) (succs g v))
findFaulty :: (Ord v, Eq a, Show v)
=> (v -> Judgement) -> ([v]->v) -> Graph v a -> [v]
findFaulty isWrong merge = (findFaulty_dag isWrong) . (collapse merge) . remove
next_step :: Eq v => Graph v a -> (v -> Judgement) -> v
next_step tree j = case go r of
Just v -> v
Nothing -> r -- no defect?
where
r = root tree
go v | v == r = findJust (map go (succs tree v))
| j v == Right = Nothing -- v is right; don't examine (grand)children
| j v == Wrong = case findJust (map go (succs tree v)) of
Nothing -> (Just v) -- v is a faulty node
(Just w) -> (Just w) -- found next node w in (grand)children of v
| otherwise = Just v -- v is an unassed node
findJust mvs = case (find (isJust) mvs) of Just justv -> justv; Nothing -> Nothing
{- Divide and Query: Try to find a node that divides the tree in two. -}
next_daq :: Ord v => Graph v a -> (v -> Judgement) -> v
next_daq tree j = snd (foldr1 f ws)
where c = succCache tree
ws = weight j c
-- start from deepest wrong node; if no wrong nodes start from the root node
(case start j c (root tree) of (Just r) -> r; Nothing -> root tree)
w = (maximum (map fst ws)) `div` 2 -- the "ideal" weight
f x y | abs (w - (fst x)) < abs (w - (fst y)) = x
| otherwise = y
weight :: (v -> Judgement) -> (v -> [v]) -> v -> [(Int,v)]
weight j c v | j v == Right = [(0,v)] -- discard subtrees that are right
| otherwise = (w,v) : foldr (++) [] vs
where vs = map (weight j c) (c v) -- weighted children
w = 1 + (sum $ map (fst . head) vs) -- weight of v
-- find starting point (i.e. the "deepest" node that is wrong)
start :: (v -> Judgement) -> (v -> [v]) -> v -> Maybe v
start j c v = case filter isJust (map (start j c) (c v)) of
[] -> if j v == Wrong then Just v else Nothing
(w:_) -> w
| MaartenFaddegon/libgraph | Data/Graph/Libgraph/AlgoDebug.hs | bsd-3-clause | 2,727 | 0 | 15 | 800 | 1,098 | 576 | 522 | 46 | 4 |
import qualified TestOrdECMWithTestSequence as OrdTest
import qualified TestHashECMWithTestSequence as HashTest
import qualified TestOrdECMWithTestSequenceInvalidating as OrdTestInvalidating
import qualified TestHashECMWithTestSequenceInvalidating as HashTestInvalidating
main = do
HashTest.testWithTestSequence
OrdTest.testWithTestSequence
HashTestInvalidating.testWithTestSequenceInvalidating
OrdTestInvalidating.testWithTestSequenceInvalidating
return ()
| elblake/expiring-cache-map | tests/TestWithTestSequence.hs | bsd-3-clause | 470 | 0 | 8 | 41 | 58 | 34 | 24 | 10 | 1 |
module WASH.CGI.HTMLMail where
import Prelude hiding (head,div,span)
import WASH.HTML.HTMLMonad hiding (map)
import WASH.Mail.MIME
-- |transform HTML Element into document suitable for sending as email.
htmlDOC :: Element -> DOC
htmlDOC el =
binaryDOC "text" "html" (show el)
| nh2/WashNGo | WASH/CGI/HTMLMail.hs | bsd-3-clause | 282 | 0 | 7 | 43 | 71 | 43 | 28 | 7 | 1 |
#!/usr/bin/env runhaskell
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
module TimeTests where
import Control.Monad
import Data.Foldable (for_)
import Data.Monoid
import qualified Data.Text as T
import Prelude hiding (FilePath)
import Shelly
import Text.Printf
default (T.Text)
graphGenerationBinaryLocation :: FilePath
graphGenerationBinaryLocation = "../rand-code-graph/dist/build/random-level-graphs/random-level-graphs"
outputLocation :: FilePath
outputLocation = "results"
graphsToGenerate :: Int
graphsToGenerate = 5
maxLevel :: Int
maxLevel = 10
seed :: T.Text
seed = "12345"
formatSeconds :: Int -> String
formatSeconds n
| n > 60 = formatMinutes (n `div` 60) ++ if n `mod` 60 == 0 then "" else " and " ++ show (n `mod` 60) ++ " seconds"
| otherwise = show n ++ " seconds"
where
formatMinutes :: Int -> String
formatMinutes n
| n > 60 = formatHours (n `div` 60) ++ if n `mod` 60 == 0 then "" else ", " ++ show (n `mod` 60) ++ " minutes"
| otherwise = show n ++ " minutes"
formatHours :: Int -> String
formatHours = (++ " hours" ) . show
main :: IO ()
main = shelly $ print_stdout False $ escaping False $ do
mkdir_p outputLocation
mkdir_p "generated"
run "cabal" ["install", "--only-dependencies"]
echo $ T.pack $ printf "Building and testing %i graphs" (graphsToGenerate * maxLevel)
(generationTime, _) <- time $ run
graphGenerationBinaryLocation
[ "-L", "HaskellDoApp"
, "-p", "resources/SlowPreamble.hs"
, "-o", "generated/SlowTestGraphs.hs"
, "-l", T.pack $ show maxLevel
, "-n", T.pack $ show (graphsToGenerate * maxLevel)
, "-s", seed
, "-S"
]
echo $ T.pack $ printf "Generated %i graphs in %s" (graphsToGenerate * maxLevel) (formatSeconds $ ceiling generationTime)
(compilationTime, _) <- time $ run "cabal" ["build"]
echo $ T.pack $ printf "Compiled test program in %s" (formatSeconds $ ceiling compilationTime)
(runTime, _) <- time $ run "dist/build/haxl-test-execution-time/haxl-test-execution-time" [] >>=
writefile (outputLocation </> "haskell-timed.json")
echo $ T.pack $ printf "Ran program in %s" (formatSeconds $ ceiling runTime)
| JustusAdam/haxl-test-generated-graph | app/TimeTests.hs | bsd-3-clause | 2,407 | 0 | 15 | 591 | 657 | 352 | 305 | 53 | 3 |
module Data.Geo.GPX.Lens.GeoidheightL where
import Data.Lens.Common
class GeoidheightL a where
geoidheightL :: Lens a (Maybe Double)
| tonymorris/geo-gpx | src/Data/Geo/GPX/Lens/GeoidheightL.hs | bsd-3-clause | 138 | 0 | 9 | 20 | 40 | 23 | 17 | 4 | 0 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_pipeline_executable_properties - device extension
--
-- == VK_KHR_pipeline_executable_properties
--
-- [__Name String__]
-- @VK_KHR_pipeline_executable_properties@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 270
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@
--
-- [__Special Use__]
--
-- - <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#extendingvulkan-compatibility-specialuse Developer tools>
--
-- [__Contact__]
--
-- - Jason Ekstrand
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_pipeline_executable_properties] @jekstrand%0A<<Here describe the issue or question you have about the VK_KHR_pipeline_executable_properties extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2019-05-28
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__; __Contributors__]
--
-- - Jason Ekstrand, Intel
--
-- - Ian Romanick, Intel
--
-- - Kenneth Graunke, Intel
--
-- - Baldur Karlsson, Valve
--
-- - Jesse Hall, Google
--
-- - Jeff Bolz, Nvidia
--
-- - Piers Daniel, Nvidia
--
-- - Tobias Hector, AMD
--
-- - Jan-Harald Fredriksen, ARM
--
-- - Tom Olson, ARM
--
-- - Daniel Koch, Nvidia
--
-- - Spencer Fricke, Samsung
--
-- == Description
--
-- When a pipeline is created, its state and shaders are compiled into zero
-- or more device-specific executables, which are used when executing
-- commands against that pipeline. This extension adds a mechanism to query
-- properties and statistics about the different executables produced by
-- the pipeline compilation process. This is intended to be used by
-- debugging and performance tools to allow them to provide more detailed
-- information to the user. Certain compile-time shader statistics provided
-- through this extension may be useful to developers for debugging or
-- performance analysis.
--
-- == New Commands
--
-- - 'getPipelineExecutableInternalRepresentationsKHR'
--
-- - 'getPipelineExecutablePropertiesKHR'
--
-- - 'getPipelineExecutableStatisticsKHR'
--
-- == New Structures
--
-- - 'PipelineExecutableInfoKHR'
--
-- - 'PipelineExecutableInternalRepresentationKHR'
--
-- - 'PipelineExecutablePropertiesKHR'
--
-- - 'PipelineExecutableStatisticKHR'
--
-- - 'PipelineInfoKHR'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDevicePipelineExecutablePropertiesFeaturesKHR'
--
-- == New Unions
--
-- - 'PipelineExecutableStatisticValueKHR'
--
-- == New Enums
--
-- - 'PipelineExecutableStatisticFormatKHR'
--
-- == New Enum Constants
--
-- - 'KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME'
--
-- - 'KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PipelineCreateFlagBits':
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR'
--
-- - 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR'
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PIPELINE_INFO_KHR'
--
-- == Issues
--
-- 1) What should we call the pieces of the pipeline which are produced by
-- the compilation process and about which you can query properties and
-- statistics?
--
-- __RESOLVED__: Call them “executables”. The name “binary” was used in
-- early drafts of the extension but it was determined that “pipeline
-- binary” could have a fairly broad meaning (such as a binary serialized
-- form of an entire pipeline) and was too big of a namespace for the very
-- specific needs of this extension.
--
-- == Version History
--
-- - Revision 1, 2019-05-28 (Jason Ekstrand)
--
-- - Initial draft
--
-- == See Also
--
-- 'PhysicalDevicePipelineExecutablePropertiesFeaturesKHR',
-- 'PipelineExecutableInfoKHR',
-- 'PipelineExecutableInternalRepresentationKHR',
-- 'PipelineExecutablePropertiesKHR',
-- 'PipelineExecutableStatisticFormatKHR',
-- 'PipelineExecutableStatisticKHR', 'PipelineExecutableStatisticValueKHR',
-- 'PipelineInfoKHR', 'getPipelineExecutableInternalRepresentationsKHR',
-- 'getPipelineExecutablePropertiesKHR',
-- 'getPipelineExecutableStatisticsKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_pipeline_executable_properties ( getPipelineExecutablePropertiesKHR
, getPipelineExecutableStatisticsKHR
, getPipelineExecutableInternalRepresentationsKHR
, PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(..)
, PipelineInfoKHR(..)
, PipelineExecutablePropertiesKHR(..)
, PipelineExecutableInfoKHR(..)
, PipelineExecutableStatisticKHR(..)
, PipelineExecutableInternalRepresentationKHR(..)
, PipelineExecutableStatisticValueKHR(..)
, peekPipelineExecutableStatisticValueKHR
, PipelineExecutableStatisticFormatKHR( PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR
, PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR
, PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR
, PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR
, ..
)
, KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION
, pattern KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION
, KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME
, pattern KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME
) where
import Vulkan.CStruct.Utils (FixedArray)
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import Vulkan.Internal.Utils (traceAroundEvent)
import Control.Exception.Base (bracket)
import Control.Monad (unless)
import Control.Monad.IO.Class (liftIO)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Alloc (callocBytes)
import Foreign.Marshal.Alloc (free)
import GHC.Base (when)
import GHC.IO (throwIO)
import GHC.Ptr (castPtr)
import GHC.Ptr (nullFunPtr)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import GHC.Show (showsPrec)
import Data.ByteString (packCString)
import Data.Coerce (coerce)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Control.Monad.Trans.Cont (runContT)
import Data.Vector (generateM)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero)
import Vulkan.Zero (Zero(..))
import Control.Monad.IO.Class (MonadIO)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.C.Types (CChar)
import Foreign.C.Types (CDouble)
import Foreign.C.Types (CDouble(..))
import Foreign.C.Types (CDouble(CDouble))
import Foreign.C.Types (CSize)
import Foreign.C.Types (CSize(..))
import Foreign.C.Types (CSize(CSize))
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import GHC.IO.Exception (IOErrorType(..))
import GHC.IO.Exception (IOException(..))
import Data.Int (Int32)
import Data.Int (Int64)
import Foreign.Ptr (FunPtr)
import Foreign.Ptr (Ptr)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Data.Word (Word32)
import Data.Word (Word64)
import Data.ByteString (ByteString)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.CStruct.Utils (advancePtrBytes)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.CStruct.Utils (lowerArrayPtr)
import Vulkan.CStruct.Utils (pokeFixedLengthNullTerminatedByteString)
import Vulkan.NamedType ((:::))
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Handles (Device)
import Vulkan.Core10.Handles (Device(..))
import Vulkan.Core10.Handles (Device(Device))
import Vulkan.Dynamic (DeviceCmds(pVkGetPipelineExecutableInternalRepresentationsKHR))
import Vulkan.Dynamic (DeviceCmds(pVkGetPipelineExecutablePropertiesKHR))
import Vulkan.Dynamic (DeviceCmds(pVkGetPipelineExecutableStatisticsKHR))
import Vulkan.Core10.Handles (Device_T)
import Vulkan.Core10.APIConstants (MAX_DESCRIPTION_SIZE)
import Vulkan.Core10.Handles (Pipeline)
import Vulkan.Core10.Enums.Result (Result)
import Vulkan.Core10.Enums.Result (Result(..))
import Vulkan.Core10.Enums.ShaderStageFlagBits (ShaderStageFlags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Exception (VulkanException(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PIPELINE_INFO_KHR))
import Vulkan.Core10.Enums.Result (Result(SUCCESS))
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetPipelineExecutablePropertiesKHR
:: FunPtr (Ptr Device_T -> Ptr PipelineInfoKHR -> Ptr Word32 -> Ptr PipelineExecutablePropertiesKHR -> IO Result) -> Ptr Device_T -> Ptr PipelineInfoKHR -> Ptr Word32 -> Ptr PipelineExecutablePropertiesKHR -> IO Result
-- | vkGetPipelineExecutablePropertiesKHR - Get the executables associated
-- with a pipeline
--
-- = Description
--
-- If @pProperties@ is @NULL@, then the number of pipeline executables
-- associated with the pipeline is returned in @pExecutableCount@.
-- Otherwise, @pExecutableCount@ /must/ point to a variable set by the user
-- to the number of elements in the @pProperties@ array, and on return the
-- variable is overwritten with the number of structures actually written
-- to @pProperties@. If @pExecutableCount@ is less than the number of
-- pipeline executables associated with the pipeline, at most
-- @pExecutableCount@ structures will be written, and
-- 'Vulkan.Core10.Enums.Result.INCOMPLETE' will be returned instead of
-- 'Vulkan.Core10.Enums.Result.SUCCESS', to indicate that not all the
-- available properties were returned.
--
-- == Valid Usage
--
-- - #VUID-vkGetPipelineExecutablePropertiesKHR-pipelineExecutableInfo-03270#
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-pipelineExecutableInfo pipelineExecutableInfo>
-- /must/ be enabled
--
-- - #VUID-vkGetPipelineExecutablePropertiesKHR-pipeline-03271#
-- @pipeline@ member of @pPipelineInfo@ /must/ have been created with
-- @device@
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkGetPipelineExecutablePropertiesKHR-device-parameter#
-- @device@ /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkGetPipelineExecutablePropertiesKHR-pPipelineInfo-parameter#
-- @pPipelineInfo@ /must/ be a valid pointer to a valid
-- 'PipelineInfoKHR' structure
--
-- - #VUID-vkGetPipelineExecutablePropertiesKHR-pExecutableCount-parameter#
-- @pExecutableCount@ /must/ be a valid pointer to a @uint32_t@ value
--
-- - #VUID-vkGetPipelineExecutablePropertiesKHR-pProperties-parameter# If
-- the value referenced by @pExecutableCount@ is not @0@, and
-- @pProperties@ is not @NULL@, @pProperties@ /must/ be a valid pointer
-- to an array of @pExecutableCount@ 'PipelineExecutablePropertiesKHR'
-- structures
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- - 'Vulkan.Core10.Enums.Result.INCOMPLETE'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'Vulkan.Core10.Handles.Device', 'PipelineExecutablePropertiesKHR',
-- 'PipelineInfoKHR'
getPipelineExecutablePropertiesKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the device that created the pipeline.
Device
-> -- | @pPipelineInfo@ describes the pipeline being queried.
PipelineInfoKHR
-> io (Result, ("properties" ::: Vector PipelineExecutablePropertiesKHR))
getPipelineExecutablePropertiesKHR device pipelineInfo = liftIO . evalContT $ do
let vkGetPipelineExecutablePropertiesKHRPtr = pVkGetPipelineExecutablePropertiesKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkGetPipelineExecutablePropertiesKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetPipelineExecutablePropertiesKHR is null" Nothing Nothing
let vkGetPipelineExecutablePropertiesKHR' = mkVkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHRPtr
let device' = deviceHandle (device)
pPipelineInfo <- ContT $ withCStruct (pipelineInfo)
pPExecutableCount <- ContT $ bracket (callocBytes @Word32 4) free
r <- lift $ traceAroundEvent "vkGetPipelineExecutablePropertiesKHR" (vkGetPipelineExecutablePropertiesKHR' device' pPipelineInfo (pPExecutableCount) (nullPtr))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pExecutableCount <- lift $ peek @Word32 pPExecutableCount
pPProperties <- ContT $ bracket (callocBytes @PipelineExecutablePropertiesKHR ((fromIntegral (pExecutableCount)) * 536)) free
_ <- traverse (\i -> ContT $ pokeZeroCStruct (pPProperties `advancePtrBytes` (i * 536) :: Ptr PipelineExecutablePropertiesKHR) . ($ ())) [0..(fromIntegral (pExecutableCount)) - 1]
r' <- lift $ traceAroundEvent "vkGetPipelineExecutablePropertiesKHR" (vkGetPipelineExecutablePropertiesKHR' device' pPipelineInfo (pPExecutableCount) ((pPProperties)))
lift $ when (r' < SUCCESS) (throwIO (VulkanException r'))
pExecutableCount' <- lift $ peek @Word32 pPExecutableCount
pProperties' <- lift $ generateM (fromIntegral (pExecutableCount')) (\i -> peekCStruct @PipelineExecutablePropertiesKHR (((pPProperties) `advancePtrBytes` (536 * (i)) :: Ptr PipelineExecutablePropertiesKHR)))
pure $ ((r'), pProperties')
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetPipelineExecutableStatisticsKHR
:: FunPtr (Ptr Device_T -> Ptr PipelineExecutableInfoKHR -> Ptr Word32 -> Ptr PipelineExecutableStatisticKHR -> IO Result) -> Ptr Device_T -> Ptr PipelineExecutableInfoKHR -> Ptr Word32 -> Ptr PipelineExecutableStatisticKHR -> IO Result
-- | vkGetPipelineExecutableStatisticsKHR - Get compile time statistics
-- associated with a pipeline executable
--
-- = Description
--
-- If @pStatistics@ is @NULL@, then the number of statistics associated
-- with the pipeline executable is returned in @pStatisticCount@.
-- Otherwise, @pStatisticCount@ /must/ point to a variable set by the user
-- to the number of elements in the @pStatistics@ array, and on return the
-- variable is overwritten with the number of structures actually written
-- to @pStatistics@. If @pStatisticCount@ is less than the number of
-- statistics associated with the pipeline executable, at most
-- @pStatisticCount@ structures will be written, and
-- 'Vulkan.Core10.Enums.Result.INCOMPLETE' will be returned instead of
-- 'Vulkan.Core10.Enums.Result.SUCCESS', to indicate that not all the
-- available statistics were returned.
--
-- == Valid Usage
--
-- - #VUID-vkGetPipelineExecutableStatisticsKHR-pipelineExecutableInfo-03272#
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-pipelineExecutableInfo pipelineExecutableInfo>
-- /must/ be enabled
--
-- - #VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03273#
-- @pipeline@ member of @pExecutableInfo@ /must/ have been created with
-- @device@
--
-- - #VUID-vkGetPipelineExecutableStatisticsKHR-pipeline-03274#
-- @pipeline@ member of @pExecutableInfo@ /must/ have been created with
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkGetPipelineExecutableStatisticsKHR-device-parameter#
-- @device@ /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkGetPipelineExecutableStatisticsKHR-pExecutableInfo-parameter#
-- @pExecutableInfo@ /must/ be a valid pointer to a valid
-- 'PipelineExecutableInfoKHR' structure
--
-- - #VUID-vkGetPipelineExecutableStatisticsKHR-pStatisticCount-parameter#
-- @pStatisticCount@ /must/ be a valid pointer to a @uint32_t@ value
--
-- - #VUID-vkGetPipelineExecutableStatisticsKHR-pStatistics-parameter# If
-- the value referenced by @pStatisticCount@ is not @0@, and
-- @pStatistics@ is not @NULL@, @pStatistics@ /must/ be a valid pointer
-- to an array of @pStatisticCount@ 'PipelineExecutableStatisticKHR'
-- structures
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- - 'Vulkan.Core10.Enums.Result.INCOMPLETE'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'Vulkan.Core10.Handles.Device', 'PipelineExecutableInfoKHR',
-- 'PipelineExecutableStatisticKHR'
getPipelineExecutableStatisticsKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the device that created the pipeline.
Device
-> -- | @pExecutableInfo@ describes the pipeline executable being queried.
PipelineExecutableInfoKHR
-> io (Result, ("statistics" ::: Vector PipelineExecutableStatisticKHR))
getPipelineExecutableStatisticsKHR device executableInfo = liftIO . evalContT $ do
let vkGetPipelineExecutableStatisticsKHRPtr = pVkGetPipelineExecutableStatisticsKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkGetPipelineExecutableStatisticsKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetPipelineExecutableStatisticsKHR is null" Nothing Nothing
let vkGetPipelineExecutableStatisticsKHR' = mkVkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHRPtr
let device' = deviceHandle (device)
pExecutableInfo <- ContT $ withCStruct (executableInfo)
pPStatisticCount <- ContT $ bracket (callocBytes @Word32 4) free
r <- lift $ traceAroundEvent "vkGetPipelineExecutableStatisticsKHR" (vkGetPipelineExecutableStatisticsKHR' device' pExecutableInfo (pPStatisticCount) (nullPtr))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pStatisticCount <- lift $ peek @Word32 pPStatisticCount
pPStatistics <- ContT $ bracket (callocBytes @PipelineExecutableStatisticKHR ((fromIntegral (pStatisticCount)) * 544)) free
_ <- traverse (\i -> ContT $ pokeZeroCStruct (pPStatistics `advancePtrBytes` (i * 544) :: Ptr PipelineExecutableStatisticKHR) . ($ ())) [0..(fromIntegral (pStatisticCount)) - 1]
r' <- lift $ traceAroundEvent "vkGetPipelineExecutableStatisticsKHR" (vkGetPipelineExecutableStatisticsKHR' device' pExecutableInfo (pPStatisticCount) ((pPStatistics)))
lift $ when (r' < SUCCESS) (throwIO (VulkanException r'))
pStatisticCount' <- lift $ peek @Word32 pPStatisticCount
pStatistics' <- lift $ generateM (fromIntegral (pStatisticCount')) (\i -> peekCStruct @PipelineExecutableStatisticKHR (((pPStatistics) `advancePtrBytes` (544 * (i)) :: Ptr PipelineExecutableStatisticKHR)))
pure $ ((r'), pStatistics')
foreign import ccall
#if !defined(SAFE_FOREIGN_CALLS)
unsafe
#endif
"dynamic" mkVkGetPipelineExecutableInternalRepresentationsKHR
:: FunPtr (Ptr Device_T -> Ptr PipelineExecutableInfoKHR -> Ptr Word32 -> Ptr PipelineExecutableInternalRepresentationKHR -> IO Result) -> Ptr Device_T -> Ptr PipelineExecutableInfoKHR -> Ptr Word32 -> Ptr PipelineExecutableInternalRepresentationKHR -> IO Result
-- | vkGetPipelineExecutableInternalRepresentationsKHR - Get internal
-- representations of the pipeline executable
--
-- = Description
--
-- If @pInternalRepresentations@ is @NULL@, then the number of internal
-- representations associated with the pipeline executable is returned in
-- @pInternalRepresentationCount@. Otherwise,
-- @pInternalRepresentationCount@ /must/ point to a variable set by the
-- user to the number of elements in the @pInternalRepresentations@ array,
-- and on return the variable is overwritten with the number of structures
-- actually written to @pInternalRepresentations@. If
-- @pInternalRepresentationCount@ is less than the number of internal
-- representations associated with the pipeline executable, at most
-- @pInternalRepresentationCount@ structures will be written, and
-- 'Vulkan.Core10.Enums.Result.INCOMPLETE' will be returned instead of
-- 'Vulkan.Core10.Enums.Result.SUCCESS', to indicate that not all the
-- available representations were returned.
--
-- While the details of the internal representations remain
-- implementation-dependent, the implementation /should/ order the internal
-- representations in the order in which they occur in the compiled
-- pipeline with the final shader assembly (if any) last.
--
-- == Valid Usage
--
-- - #VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipelineExecutableInfo-03276#
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#features-pipelineExecutableInfo pipelineExecutableInfo>
-- /must/ be enabled
--
-- - #VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03277#
-- @pipeline@ member of @pExecutableInfo@ /must/ have been created with
-- @device@
--
-- - #VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pipeline-03278#
-- @pipeline@ member of @pExecutableInfo@ /must/ have been created with
-- 'Vulkan.Core10.Enums.PipelineCreateFlagBits.PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR'
--
-- == Valid Usage (Implicit)
--
-- - #VUID-vkGetPipelineExecutableInternalRepresentationsKHR-device-parameter#
-- @device@ /must/ be a valid 'Vulkan.Core10.Handles.Device' handle
--
-- - #VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pExecutableInfo-parameter#
-- @pExecutableInfo@ /must/ be a valid pointer to a valid
-- 'PipelineExecutableInfoKHR' structure
--
-- - #VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentationCount-parameter#
-- @pInternalRepresentationCount@ /must/ be a valid pointer to a
-- @uint32_t@ value
--
-- - #VUID-vkGetPipelineExecutableInternalRepresentationsKHR-pInternalRepresentations-parameter#
-- If the value referenced by @pInternalRepresentationCount@ is not
-- @0@, and @pInternalRepresentations@ is not @NULL@,
-- @pInternalRepresentations@ /must/ be a valid pointer to an array of
-- @pInternalRepresentationCount@
-- 'PipelineExecutableInternalRepresentationKHR' structures
--
-- == Return Codes
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-successcodes Success>]
--
-- - 'Vulkan.Core10.Enums.Result.SUCCESS'
--
-- - 'Vulkan.Core10.Enums.Result.INCOMPLETE'
--
-- [<https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#fundamentals-errorcodes Failure>]
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_HOST_MEMORY'
--
-- - 'Vulkan.Core10.Enums.Result.ERROR_OUT_OF_DEVICE_MEMORY'
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'Vulkan.Core10.Handles.Device', 'PipelineExecutableInfoKHR',
-- 'PipelineExecutableInternalRepresentationKHR'
getPipelineExecutableInternalRepresentationsKHR :: forall io
. (MonadIO io)
=> -- | @device@ is the device that created the pipeline.
Device
-> -- | @pExecutableInfo@ describes the pipeline executable being queried.
PipelineExecutableInfoKHR
-> io (Result, ("internalRepresentations" ::: Vector PipelineExecutableInternalRepresentationKHR))
getPipelineExecutableInternalRepresentationsKHR device executableInfo = liftIO . evalContT $ do
let vkGetPipelineExecutableInternalRepresentationsKHRPtr = pVkGetPipelineExecutableInternalRepresentationsKHR (case device of Device{deviceCmds} -> deviceCmds)
lift $ unless (vkGetPipelineExecutableInternalRepresentationsKHRPtr /= nullFunPtr) $
throwIO $ IOError Nothing InvalidArgument "" "The function pointer for vkGetPipelineExecutableInternalRepresentationsKHR is null" Nothing Nothing
let vkGetPipelineExecutableInternalRepresentationsKHR' = mkVkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHRPtr
let device' = deviceHandle (device)
pExecutableInfo <- ContT $ withCStruct (executableInfo)
pPInternalRepresentationCount <- ContT $ bracket (callocBytes @Word32 4) free
r <- lift $ traceAroundEvent "vkGetPipelineExecutableInternalRepresentationsKHR" (vkGetPipelineExecutableInternalRepresentationsKHR' device' pExecutableInfo (pPInternalRepresentationCount) (nullPtr))
lift $ when (r < SUCCESS) (throwIO (VulkanException r))
pInternalRepresentationCount <- lift $ peek @Word32 pPInternalRepresentationCount
pPInternalRepresentations <- ContT $ bracket (callocBytes @PipelineExecutableInternalRepresentationKHR ((fromIntegral (pInternalRepresentationCount)) * 552)) free
_ <- traverse (\i -> ContT $ pokeZeroCStruct (pPInternalRepresentations `advancePtrBytes` (i * 552) :: Ptr PipelineExecutableInternalRepresentationKHR) . ($ ())) [0..(fromIntegral (pInternalRepresentationCount)) - 1]
r' <- lift $ traceAroundEvent "vkGetPipelineExecutableInternalRepresentationsKHR" (vkGetPipelineExecutableInternalRepresentationsKHR' device' pExecutableInfo (pPInternalRepresentationCount) ((pPInternalRepresentations)))
lift $ when (r' < SUCCESS) (throwIO (VulkanException r'))
pInternalRepresentationCount' <- lift $ peek @Word32 pPInternalRepresentationCount
pInternalRepresentations' <- lift $ generateM (fromIntegral (pInternalRepresentationCount')) (\i -> peekCStruct @PipelineExecutableInternalRepresentationKHR (((pPInternalRepresentations) `advancePtrBytes` (552 * (i)) :: Ptr PipelineExecutableInternalRepresentationKHR)))
pure $ ((r'), pInternalRepresentations')
-- | VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR - Structure
-- describing whether pipeline executable properties are available
--
-- = Members
--
-- This structure describes the following feature:
--
-- = Description
--
-- If the 'PhysicalDevicePipelineExecutablePropertiesFeaturesKHR' structure
-- is included in the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceFeatures2',
-- it is filled in to indicate whether each corresponding feature is
-- supported. 'PhysicalDevicePipelineExecutablePropertiesFeaturesKHR' /can/
-- also be used in the @pNext@ chain of
-- 'Vulkan.Core10.Device.DeviceCreateInfo' to selectively enable these
-- features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDevicePipelineExecutablePropertiesFeaturesKHR = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
{ -- | #features-pipelineExecutableInfo# @pipelineExecutableInfo@ indicates
-- that the implementation supports reporting properties and statistics
-- about the pipeline executables associated with a compiled pipeline.
pipelineExecutableInfo :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDevicePipelineExecutablePropertiesFeaturesKHR)
#endif
deriving instance Show PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
instance ToCStruct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDevicePipelineExecutablePropertiesFeaturesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (pipelineExecutableInfo))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR where
peekCStruct p = do
pipelineExecutableInfo <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
(bool32ToBool pipelineExecutableInfo)
instance Storable PhysicalDevicePipelineExecutablePropertiesFeaturesKHR where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDevicePipelineExecutablePropertiesFeaturesKHR where
zero = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
zero
-- | VkPipelineInfoKHR - Structure describing a pipeline
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'Vulkan.Core10.Handles.Pipeline',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'getPipelineExecutablePropertiesKHR'
data PipelineInfoKHR = PipelineInfoKHR
{ -- | @pipeline@ is a 'Vulkan.Core10.Handles.Pipeline' handle.
--
-- #VUID-VkPipelineInfoKHR-pipeline-parameter# @pipeline@ /must/ be a valid
-- 'Vulkan.Core10.Handles.Pipeline' handle
pipeline :: Pipeline }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineInfoKHR)
#endif
deriving instance Show PipelineInfoKHR
instance ToCStruct PipelineInfoKHR where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Pipeline)) (pipeline)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Pipeline)) (zero)
f
instance FromCStruct PipelineInfoKHR where
peekCStruct p = do
pipeline <- peek @Pipeline ((p `plusPtr` 16 :: Ptr Pipeline))
pure $ PipelineInfoKHR
pipeline
instance Storable PipelineInfoKHR where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PipelineInfoKHR where
zero = PipelineInfoKHR
zero
-- | VkPipelineExecutablePropertiesKHR - Structure describing a pipeline
-- executable
--
-- = Description
--
-- Not all implementations have a 1:1 mapping between shader stages and
-- pipeline executables and some implementations /may/ reduce a given
-- shader stage to fixed function hardware programming such that no
-- pipeline executable is available. No guarantees are provided about the
-- mapping between shader stages and pipeline executables and @stages@
-- /should/ be considered a best effort hint. Because the application
-- /cannot/ rely on the @stages@ field to provide an exact description,
-- @name@ and @description@ provide a human readable name and description
-- which more accurately describes the given pipeline executable.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.ShaderStageFlags',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'getPipelineExecutablePropertiesKHR'
data PipelineExecutablePropertiesKHR = PipelineExecutablePropertiesKHR
{ -- | @stages@ is a bitmask of zero or more
-- 'Vulkan.Core10.Enums.ShaderStageFlagBits.ShaderStageFlagBits' indicating
-- which shader stages (if any) were principally used as inputs to compile
-- this pipeline executable.
stages :: ShaderStageFlags
, -- | @name@ is an array of 'Vulkan.Core10.APIConstants.MAX_DESCRIPTION_SIZE'
-- @char@ containing a null-terminated UTF-8 string which is a short human
-- readable name for this pipeline executable.
name :: ByteString
, -- | @description@ is an array of
-- 'Vulkan.Core10.APIConstants.MAX_DESCRIPTION_SIZE' @char@ containing a
-- null-terminated UTF-8 string which is a human readable description for
-- this pipeline executable.
description :: ByteString
, -- | @subgroupSize@ is the subgroup size with which this pipeline executable
-- is dispatched.
subgroupSize :: Word32
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineExecutablePropertiesKHR)
#endif
deriving instance Show PipelineExecutablePropertiesKHR
instance ToCStruct PipelineExecutablePropertiesKHR where
withCStruct x f = allocaBytes 536 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineExecutablePropertiesKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr ShaderStageFlags)) (stages)
pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 20 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (name)
pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 276 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (description)
poke ((p `plusPtr` 532 :: Ptr Word32)) (subgroupSize)
f
cStructSize = 536
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr ShaderStageFlags)) (zero)
pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 20 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (mempty)
pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 276 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (mempty)
poke ((p `plusPtr` 532 :: Ptr Word32)) (zero)
f
instance FromCStruct PipelineExecutablePropertiesKHR where
peekCStruct p = do
stages <- peek @ShaderStageFlags ((p `plusPtr` 16 :: Ptr ShaderStageFlags))
name <- packCString (lowerArrayPtr ((p `plusPtr` 20 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))))
description <- packCString (lowerArrayPtr ((p `plusPtr` 276 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))))
subgroupSize <- peek @Word32 ((p `plusPtr` 532 :: Ptr Word32))
pure $ PipelineExecutablePropertiesKHR
stages name description subgroupSize
instance Storable PipelineExecutablePropertiesKHR where
sizeOf ~_ = 536
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PipelineExecutablePropertiesKHR where
zero = PipelineExecutablePropertiesKHR
zero
mempty
mempty
zero
-- | VkPipelineExecutableInfoKHR - Structure describing a pipeline executable
-- to query for associated statistics or internal representations
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'Vulkan.Core10.Handles.Pipeline',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'getPipelineExecutableInternalRepresentationsKHR',
-- 'getPipelineExecutableStatisticsKHR'
data PipelineExecutableInfoKHR = PipelineExecutableInfoKHR
{ -- | @pipeline@ is the pipeline to query.
--
-- #VUID-VkPipelineExecutableInfoKHR-pipeline-parameter# @pipeline@ /must/
-- be a valid 'Vulkan.Core10.Handles.Pipeline' handle
pipeline :: Pipeline
, -- | @executableIndex@ is the index of the pipeline executable to query in
-- the array of executable properties returned by
-- 'getPipelineExecutablePropertiesKHR'.
--
-- #VUID-VkPipelineExecutableInfoKHR-executableIndex-03275#
-- @executableIndex@ /must/ be less than the number of pipeline executables
-- associated with @pipeline@ as returned in the @pExecutableCount@
-- parameter of 'getPipelineExecutablePropertiesKHR'
executableIndex :: Word32
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineExecutableInfoKHR)
#endif
deriving instance Show PipelineExecutableInfoKHR
instance ToCStruct PipelineExecutableInfoKHR where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineExecutableInfoKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Pipeline)) (pipeline)
poke ((p `plusPtr` 24 :: Ptr Word32)) (executableIndex)
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Pipeline)) (zero)
poke ((p `plusPtr` 24 :: Ptr Word32)) (zero)
f
instance FromCStruct PipelineExecutableInfoKHR where
peekCStruct p = do
pipeline <- peek @Pipeline ((p `plusPtr` 16 :: Ptr Pipeline))
executableIndex <- peek @Word32 ((p `plusPtr` 24 :: Ptr Word32))
pure $ PipelineExecutableInfoKHR
pipeline executableIndex
instance Storable PipelineExecutableInfoKHR where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PipelineExecutableInfoKHR where
zero = PipelineExecutableInfoKHR
zero
zero
-- | VkPipelineExecutableStatisticKHR - Structure describing a compile-time
-- pipeline executable statistic
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'PipelineExecutableStatisticFormatKHR',
-- 'PipelineExecutableStatisticValueKHR',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'getPipelineExecutableStatisticsKHR'
data PipelineExecutableStatisticKHR = PipelineExecutableStatisticKHR
{ -- | @name@ is an array of 'Vulkan.Core10.APIConstants.MAX_DESCRIPTION_SIZE'
-- @char@ containing a null-terminated UTF-8 string which is a short human
-- readable name for this statistic.
name :: ByteString
, -- | @description@ is an array of
-- 'Vulkan.Core10.APIConstants.MAX_DESCRIPTION_SIZE' @char@ containing a
-- null-terminated UTF-8 string which is a human readable description for
-- this statistic.
description :: ByteString
, -- | @format@ is a 'PipelineExecutableStatisticFormatKHR' value specifying
-- the format of the data found in @value@.
format :: PipelineExecutableStatisticFormatKHR
, -- | @value@ is the value of this statistic.
value :: PipelineExecutableStatisticValueKHR
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineExecutableStatisticKHR)
#endif
deriving instance Show PipelineExecutableStatisticKHR
instance ToCStruct PipelineExecutableStatisticKHR where
withCStruct x f = allocaBytes 544 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineExecutableStatisticKHR{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
lift $ pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 16 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (name)
lift $ pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 272 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (description)
lift $ poke ((p `plusPtr` 528 :: Ptr PipelineExecutableStatisticFormatKHR)) (format)
ContT $ pokeCStruct ((p `plusPtr` 536 :: Ptr PipelineExecutableStatisticValueKHR)) (value) . ($ ())
lift $ f
cStructSize = 544
cStructAlignment = 8
pokeZeroCStruct p f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
lift $ pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 16 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (mempty)
lift $ pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 272 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (mempty)
lift $ poke ((p `plusPtr` 528 :: Ptr PipelineExecutableStatisticFormatKHR)) (zero)
ContT $ pokeCStruct ((p `plusPtr` 536 :: Ptr PipelineExecutableStatisticValueKHR)) (zero) . ($ ())
lift $ f
instance FromCStruct PipelineExecutableStatisticKHR where
peekCStruct p = do
name <- packCString (lowerArrayPtr ((p `plusPtr` 16 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))))
description <- packCString (lowerArrayPtr ((p `plusPtr` 272 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))))
format <- peek @PipelineExecutableStatisticFormatKHR ((p `plusPtr` 528 :: Ptr PipelineExecutableStatisticFormatKHR))
value <- peekPipelineExecutableStatisticValueKHR format ((p `plusPtr` 536 :: Ptr PipelineExecutableStatisticValueKHR))
pure $ PipelineExecutableStatisticKHR
name description format value
instance Zero PipelineExecutableStatisticKHR where
zero = PipelineExecutableStatisticKHR
mempty
mempty
zero
zero
-- | VkPipelineExecutableInternalRepresentationKHR - Structure describing the
-- textual form of a pipeline executable internal representation
--
-- = Description
--
-- If @pData@ is @NULL@, then the size, in bytes, of the internal
-- representation data is returned in @dataSize@. Otherwise, @dataSize@
-- must be the size of the buffer, in bytes, pointed to by @pData@ and on
-- return @dataSize@ is overwritten with the number of bytes of data
-- actually written to @pData@ including any trailing null character. If
-- @dataSize@ is less than the size, in bytes, of the internal
-- representation’s data, at most @dataSize@ bytes of data will be written
-- to @pData@, and 'Vulkan.Core10.Enums.Result.INCOMPLETE' will be returned
-- instead of 'Vulkan.Core10.Enums.Result.SUCCESS', to indicate that not
-- all the available representation was returned.
--
-- If @isText@ is 'Vulkan.Core10.FundamentalTypes.TRUE' and @pData@ is not
-- @NULL@ and @dataSize@ is not zero, the last byte written to @pData@ will
-- be a null character.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType',
-- 'getPipelineExecutableInternalRepresentationsKHR'
data PipelineExecutableInternalRepresentationKHR = PipelineExecutableInternalRepresentationKHR
{ -- | @name@ is an array of 'Vulkan.Core10.APIConstants.MAX_DESCRIPTION_SIZE'
-- @char@ containing a null-terminated UTF-8 string which is a short human
-- readable name for this internal representation.
name :: ByteString
, -- | @description@ is an array of
-- 'Vulkan.Core10.APIConstants.MAX_DESCRIPTION_SIZE' @char@ containing a
-- null-terminated UTF-8 string which is a human readable description for
-- this internal representation.
description :: ByteString
, -- | @isText@ specifies whether the returned data is text or opaque data. If
-- @isText@ is 'Vulkan.Core10.FundamentalTypes.TRUE' then the data returned
-- in @pData@ is text and is guaranteed to be a null-terminated UTF-8
-- string.
isText :: Bool
, -- | @dataSize@ is an integer related to the size, in bytes, of the internal
-- representation’s data, as described below.
dataSize :: Word64
, -- | @pData@ is either @NULL@ or a pointer to a block of data into which the
-- implementation will write the internal representation.
data' :: Ptr ()
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PipelineExecutableInternalRepresentationKHR)
#endif
deriving instance Show PipelineExecutableInternalRepresentationKHR
instance ToCStruct PipelineExecutableInternalRepresentationKHR where
withCStruct x f = allocaBytes 552 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PipelineExecutableInternalRepresentationKHR{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 16 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (name)
pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 272 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (description)
poke ((p `plusPtr` 528 :: Ptr Bool32)) (boolToBool32 (isText))
poke ((p `plusPtr` 536 :: Ptr CSize)) (CSize (dataSize))
poke ((p `plusPtr` 544 :: Ptr (Ptr ()))) (data')
f
cStructSize = 552
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 16 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (mempty)
pokeFixedLengthNullTerminatedByteString ((p `plusPtr` 272 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))) (mempty)
poke ((p `plusPtr` 528 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 536 :: Ptr CSize)) (CSize (zero))
f
instance FromCStruct PipelineExecutableInternalRepresentationKHR where
peekCStruct p = do
name <- packCString (lowerArrayPtr ((p `plusPtr` 16 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))))
description <- packCString (lowerArrayPtr ((p `plusPtr` 272 :: Ptr (FixedArray MAX_DESCRIPTION_SIZE CChar))))
isText <- peek @Bool32 ((p `plusPtr` 528 :: Ptr Bool32))
dataSize <- peek @CSize ((p `plusPtr` 536 :: Ptr CSize))
pData <- peek @(Ptr ()) ((p `plusPtr` 544 :: Ptr (Ptr ())))
pure $ PipelineExecutableInternalRepresentationKHR
name description (bool32ToBool isText) (coerce @CSize @Word64 dataSize) pData
instance Storable PipelineExecutableInternalRepresentationKHR where
sizeOf ~_ = 552
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PipelineExecutableInternalRepresentationKHR where
zero = PipelineExecutableInternalRepresentationKHR
mempty
mempty
zero
zero
zero
data PipelineExecutableStatisticValueKHR
= B32 Bool
| I64 Int64
| U64 Word64
| F64 Double
deriving (Show)
instance ToCStruct PipelineExecutableStatisticValueKHR where
withCStruct x f = allocaBytes 8 $ \p -> pokeCStruct p x (f p)
pokeCStruct :: Ptr PipelineExecutableStatisticValueKHR -> PipelineExecutableStatisticValueKHR -> IO a -> IO a
pokeCStruct p = (. const) . runContT . \case
B32 v -> lift $ poke (castPtr @_ @Bool32 p) (boolToBool32 (v))
I64 v -> lift $ poke (castPtr @_ @Int64 p) (v)
U64 v -> lift $ poke (castPtr @_ @Word64 p) (v)
F64 v -> lift $ poke (castPtr @_ @CDouble p) (CDouble (v))
pokeZeroCStruct :: Ptr PipelineExecutableStatisticValueKHR -> IO b -> IO b
pokeZeroCStruct _ f = f
cStructSize = 8
cStructAlignment = 8
instance Zero PipelineExecutableStatisticValueKHR where
zero = I64 zero
peekPipelineExecutableStatisticValueKHR :: PipelineExecutableStatisticFormatKHR -> Ptr PipelineExecutableStatisticValueKHR -> IO PipelineExecutableStatisticValueKHR
peekPipelineExecutableStatisticValueKHR tag p = case tag of
PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR -> B32 <$> (do
b32 <- peek @Bool32 (castPtr @_ @Bool32 p)
pure $ bool32ToBool b32)
PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR -> I64 <$> (peek @Int64 (castPtr @_ @Int64 p))
PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR -> U64 <$> (peek @Word64 (castPtr @_ @Word64 p))
PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR -> F64 <$> (do
f64 <- peek @CDouble (castPtr @_ @CDouble p)
pure $ coerce @CDouble @Double f64)
-- | VkPipelineExecutableStatisticFormatKHR - Enum describing a pipeline
-- executable statistic
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_KHR_pipeline_executable_properties VK_KHR_pipeline_executable_properties>,
-- 'PipelineExecutableStatisticKHR'
newtype PipelineExecutableStatisticFormatKHR = PipelineExecutableStatisticFormatKHR Int32
deriving newtype (Eq, Ord, Storable, Zero)
-- | 'PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR' specifies that the
-- statistic is returned as a 32-bit boolean value which /must/ be either
-- 'Vulkan.Core10.FundamentalTypes.TRUE' or
-- 'Vulkan.Core10.FundamentalTypes.FALSE' and /should/ be read from the
-- @b32@ field of 'PipelineExecutableStatisticValueKHR'.
pattern PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = PipelineExecutableStatisticFormatKHR 0
-- | 'PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR' specifies that the
-- statistic is returned as a signed 64-bit integer and /should/ be read
-- from the @i64@ field of 'PipelineExecutableStatisticValueKHR'.
pattern PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = PipelineExecutableStatisticFormatKHR 1
-- | 'PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR' specifies that the
-- statistic is returned as an unsigned 64-bit integer and /should/ be read
-- from the @u64@ field of 'PipelineExecutableStatisticValueKHR'.
pattern PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = PipelineExecutableStatisticFormatKHR 2
-- | 'PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR' specifies that the
-- statistic is returned as a 64-bit floating-point value and /should/ be
-- read from the @f64@ field of 'PipelineExecutableStatisticValueKHR'.
pattern PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = PipelineExecutableStatisticFormatKHR 3
{-# complete PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR,
PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR,
PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR :: PipelineExecutableStatisticFormatKHR #-}
conNamePipelineExecutableStatisticFormatKHR :: String
conNamePipelineExecutableStatisticFormatKHR = "PipelineExecutableStatisticFormatKHR"
enumPrefixPipelineExecutableStatisticFormatKHR :: String
enumPrefixPipelineExecutableStatisticFormatKHR = "PIPELINE_EXECUTABLE_STATISTIC_FORMAT_"
showTablePipelineExecutableStatisticFormatKHR :: [(PipelineExecutableStatisticFormatKHR, String)]
showTablePipelineExecutableStatisticFormatKHR =
[ (PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR , "BOOL32_KHR")
, (PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR , "INT64_KHR")
, (PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR , "UINT64_KHR")
, (PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR, "FLOAT64_KHR")
]
instance Show PipelineExecutableStatisticFormatKHR where
showsPrec = enumShowsPrec enumPrefixPipelineExecutableStatisticFormatKHR
showTablePipelineExecutableStatisticFormatKHR
conNamePipelineExecutableStatisticFormatKHR
(\(PipelineExecutableStatisticFormatKHR x) -> x)
(showsPrec 11)
instance Read PipelineExecutableStatisticFormatKHR where
readPrec = enumReadPrec enumPrefixPipelineExecutableStatisticFormatKHR
showTablePipelineExecutableStatisticFormatKHR
conNamePipelineExecutableStatisticFormatKHR
PipelineExecutableStatisticFormatKHR
type KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION"
pattern KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION = 1
type KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME = "VK_KHR_pipeline_executable_properties"
-- No documentation found for TopLevel "VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME"
pattern KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME = "VK_KHR_pipeline_executable_properties"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_KHR_pipeline_executable_properties.hs | bsd-3-clause | 57,437 | 331 | 19 | 9,737 | 5,842 | 4,121 | 1,721 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module OpenRTB.Types.Enum.VideoQualitySpec where
import Control.Applicative
import Data.Aeson
import Data.Aeson.TH
import Test.Hspec
import Test.QuickCheck
import Test.Instances
import OpenRTB.Types.Enum.VideoQuality
data Mock = Mock { vct :: VideoQuality } deriving (Eq, Show)
$(deriveJSON defaultOptions ''Mock)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "VideoQuality" $ do
context "JSON" $ do
it "should convert back and forth" $ property $ do
\m -> (decode . encode) m == Just (m :: Mock)
instance Arbitrary Mock where
arbitrary = Mock <$> arbitrary
| ankhers/openRTB-hs | spec/OpenRTB/Types/Enum/VideoQualitySpec.hs | bsd-3-clause | 662 | 0 | 19 | 112 | 196 | 106 | 90 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module OpenRTB.Types.Enum.VASTCompanionTypeSpec where
import Control.Applicative
import Data.Aeson
import Data.Aeson.TH
import Test.Hspec
import Test.QuickCheck
import Test.Instances
import OpenRTB.Types.Enum.VASTCompanionType
data Mock = Mock { vct :: VASTCompanionType } deriving (Eq, Show)
$(deriveJSON defaultOptions ''Mock)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "VASTCompanionType" $ do
context "JSON" $ do
it "should convert back and forth" $ property $ do
\m -> (decode . encode) m == Just (m :: Mock)
instance Arbitrary Mock where
arbitrary = Mock <$> arbitrary
| ankhers/openRTB-hs | spec/OpenRTB/Types/Enum/VASTCompanionTypeSpec.hs | bsd-3-clause | 682 | 0 | 19 | 112 | 197 | 106 | 91 | 21 | 1 |
import Watch
import Watch.Options
main :: IO ()
main = execParser optsI >>= \ args -> watch (wTarget args) args
where
optsI = info (helper <*> opts)
(fullDesc
<> progDesc "Watch a source directory for changes"
<> header "src-watch" )
| pikajude/src-watch | src/Main.hs | bsd-3-clause | 291 | 0 | 11 | 97 | 83 | 42 | 41 | 8 | 1 |
-- | This module is a linter for the input and output of our programs. This
-- means it checks properties that, if they fail, are a bug somewhere in the
-- program.
--
-- It should not be possible to trigger the linter via the usual interface (but
-- easily so via the debugging interface).
--
-- The existence of this module means that Haskell’s type system is not perfect...
--
-- The name "linter" comes from GHC, which also has that.
module Lint where
import qualified Data.Map as M
import Text.Printf
import Data.Monoid
import Data.Tagged
import Types
type Lints = [String]
lint :: Context -> Task -> Proof -> Lints
lint logic _task proof = mconcat
[ wrongLocalHyps, missingRule, wrongBlock, wrongPort, wrongSourceType, wrongTargetType, nonUniqueBlockNums, nonPositiveBlockNums ]
where
wrongLocalHyps =
[ printf "local hypothesis \"%s\" of rule \"%s\" has an invalid consumedBy field \"%s\""
(untag portKey) (untag ruleKey) (untag consumedBy)
| (ruleKey, rule) <- M.toList (ctxtRules logic)
, (portKey, Port (PTLocalHyp consumedBy) _ _) <- M.toList (ports rule)
, maybe True (not.isAssumption) $ M.lookup consumedBy (ports rule)
]
where isAssumption (Port PTAssumption _ _) = True
isAssumption _ = False
missingRule =
[ printf "Block \"%s\" references unknown rule \"%s\"" (untag blockKey) (untag ruleKey)
| (blockKey, Block _ ruleKey) <- M.toList (blocks proof)
, ruleKey `M.notMember` ctxtRules logic
]
wrongBlock =
[ printf "Connection \"%s\" references unknown block \"%s\""
(untag connKey) (untag blockKey)
| (connKey, conn) <- M.toList (connections proof)
, BlockPort blockKey _ <- [connFrom conn, connTo conn]
, blockKey `M.notMember` blocks proof
]
wrongPort =
[ printf "Connection \"%s\" references unknown port \"%s\" of block \"%s\", rule \"%s\""
(untag connKey) (untag portKey) (untag blockKey) (untag ruleKey)
| (connKey, conn) <- M.toList (connections proof)
, BlockPort blockKey portKey <- [connFrom conn, connTo conn]
, Just (Block _ ruleKey) <- return $ M.lookup blockKey (blocks proof)
, Just rule <- return $ M.lookup ruleKey (ctxtRules logic)
, portKey `M.notMember` ports rule
]
wrongSourceType =
[ printf "Connection \"%s\" begins in port \"%s\" of block \"%s\", which is not a conclusion or local hypothesis"
(untag connKey) (untag portKey) (untag blockKey)
| (connKey, conn) <- M.toList (connections proof)
, BlockPort blockKey portKey <- return $ connFrom conn
, Just (Block _ ruleKey) <- return $ M.lookup blockKey (blocks proof)
, Just rule <- return $ M.lookup ruleKey (ctxtRules logic)
, Just port <- return $ M.lookup portKey (ports rule)
, not $ isOk port
] ++
[ printf "Connection \"%s\" begins in conclusion %d"
(untag connKey) n
| (connKey, conn) <- M.toList (connections proof)
, ConclusionPort n <- return $ connFrom conn
]
where isOk (Port (PTLocalHyp _) _ _) = True
isOk (Port PTConclusion _ _) = True
isOk _ = False
wrongTargetType =
[ printf "Connection \"%s\" ends in port \"%s\" of block \"%s\", which is not an assumption."
(untag connKey) (untag portKey) (untag blockKey)
| (connKey, conn) <- M.toList (connections proof)
, BlockPort blockKey portKey <- return $ connTo conn
, Just (Block _ ruleKey) <- return $ M.lookup blockKey (blocks proof)
, Just rule <- return $ M.lookup ruleKey (ctxtRules logic)
, Just port <- return $ M.lookup portKey (ports rule)
, not $ isOk port
] ++
[ printf "Connection \"%s\" begins in global assumption %d"
(untag connKey) n
| (connKey, conn) <- M.toList (connections proof)
, AssumptionPort n <- return $ connTo conn
]
where isOk (Port PTAssumption _ _) = True
isOk _ = False
nonPositiveBlockNums
= [ printf "Block number %d of block %s is not positive" n (untag bk)
| (bk, b) <- M.toList (blocks proof)
, let n = blockNum b
, n <= 0
]
nonUniqueBlockNums
= [ printf "Block number %d assigned %d times." n c | (n,c) <- M.toList bad ]
where
bad = M.filter (>1) $
M.fromListWith (+) $
[ (blockNum b ,(1::Int)) | b <- M.elems (blocks proof) ]
lintsToEither :: Lints -> Either String ()
lintsToEither [] = Right ()
lintsToEither ls = Left (unlines ls)
| psibi/incredible | logic/Lint.hs | mit | 4,678 | 0 | 15 | 1,306 | 1,389 | 707 | 682 | 82 | 5 |
module Foundation where
import Import.NoFoundation
import Database.Persist.Sql (ConnectionPool, runSqlPool)
import Text.Hamlet (hamletFile)
import Text.Jasmine (minifym)
import Yesod.Auth.BrowserId (authBrowserId)
import Yesod.Auth.Message (AuthMessage (InvalidLogin))
import Yesod.Default.Util (addStaticContentExternal)
import Yesod.Core.Types (Logger)
import qualified Yesod.Core.Unsafe as Unsafe
-- | The foundation datatype for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ appSettings :: AppSettings
, appStatic :: Static -- ^ Settings for static file serving.
, appConnPool :: ConnectionPool -- ^ Database connection pool.
, appHttpManager :: Manager
, appLogger :: Logger
}
instance HasHttpManager App where
getHttpManager = appHttpManager
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/routing-and-handlers
--
-- Note that this is really half the story; in Application.hs, mkYesodDispatch
-- generates the rest of the code. Please see the linked documentation for an
-- explanation for this split.
--
-- This function also generates the following type synonyms:
-- type Handler = HandlerT App IO
-- type Widget = WidgetT App IO ()
mkYesodData "App" $(parseRoutesFile "config/routes")
-- | A convenient synonym for creating forms.
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
-- Controls the base of generated URLs. For more information on modifying,
-- see: https://github.com/yesodweb/yesod/wiki/Overriding-approot
approot = ApprootMaster $ appRoot . appSettings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = Just <$> defaultClientSessionBackend
120 -- timeout in minutes
"config/client_session_key.aes"
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
currentRoute <- getCurrentRoute
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
addStylesheet $ StaticR css_bootstrap_css
addStylesheet $ StaticR css_haskell_font_css
addScriptRemote "https://ajax.googleapis.com/ajax/libs/jquery/1.11.3/jquery.min.js"
addScript $ StaticR js_bootstrap_min_js
addScript $ StaticR js_ie10_viewport_bug_workaround_js
$(widgetFile "default-layout")
withUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- Routes not requiring authentication.
isAuthorized (AuthR _) _ = return Authorized
isAuthorized FaviconR _ = return Authorized
isAuthorized RobotsR _ = return Authorized
-- Default to Authorized for now.
isAuthorized _ _ = return Authorized
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent ext mime content = do
master <- getYesod
let staticDir = appStaticDir $ appSettings master
addStaticContentExternal
minifym
genFileName
staticDir
(StaticR . flip StaticRoute [])
ext
mime
content
where
-- Generate a unique filename based on the content itself
genFileName lbs = "autogen-" ++ base64md5 lbs
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog app _source level =
appShouldLogAll (appSettings app)
|| level == LevelWarn
|| level == LevelError
makeLogger = return . appLogger
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlBackend
runDB action = do
master <- getYesod
runSqlPool action $ appConnPool master
instance YesodPersistRunner App where
getDBRunner = defaultGetDBRunner appConnPool
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
-- Override the above two destinations when a Referer: header is present
redirectToReferer _ = True
authenticate creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
return $ case x of
Just (Entity uid _) -> Authenticated uid
Nothing -> UserError InvalidLogin
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authBrowserId def]
authHttpManager = getHttpManager
instance YesodAuthPersist App
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
unsafeHandler :: App -> Handler a -> IO a
unsafeHandler = Unsafe.fakeHandlerGetLogger appLogger
-- Note: Some functionality previously present in the scaffolding has been
-- moved to documentation in the Wiki. Following are some hopefully helpful
-- links:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
-- https://github.com/yesodweb/yesod/wiki/Serve-static-files-from-a-separate-domain
-- https://github.com/yesodweb/yesod/wiki/i18n-messages-in-the-scaffolding
| rcook/seattlehaskell-org | Foundation.hs | mit | 6,374 | 0 | 15 | 1,434 | 851 | 454 | 397 | -1 | -1 |
module Main (main) where
import Pos.Util.Log.LoggerConfig (LoggerConfig)
import Pos.Infra.Network.Yaml (Topology)
import Data.List.Split
import System.Environment
import Data.Yaml
import Control.Monad.Catch
import Data.Monoid
main :: IO ()
main = do
runTest "logConfigs" checkLogConfig
runTest "topologyConfigs" checkTopology
outpath <- getEnv "out"
writeFile outpath "done"
runTest :: FromJSON a => String -> (String -> IO a) -> IO ()
runTest var func = do
paths <- getEnv var
let
pathList = splitOn " " paths
doTest path = do
putStrLn $ "testing: " <> path
func path
mapM_ doTest pathList
checkLogConfig :: String -> IO LoggerConfig
checkLogConfig path = do
either throwM return =<< decodeFileEither path
checkTopology :: String -> IO Topology
checkTopology path = either throwM return =<< decodeFileEither path
| input-output-hk/pos-haskell-prototype | yaml-validation/Main.hs | mit | 856 | 0 | 13 | 158 | 278 | 137 | 141 | 28 | 1 |
{-# LANGUAGE PackageImports #-}
import Control.Monad
import Data.Pipe
import Data.Char
import System.IO
import "monads-tf" Control.Monad.Trans
main :: IO ()
main = do
_ <- runPipe $
readFileP "sample.txt"
=$= takeP 3
=$= convert (map toUpper)
=$= writeString
return ()
readFileP :: FilePath -> Pipe () String IO ()
readFileP fp = bracket (openFile fp ReadMode) hClose hRead
hRead :: Handle -> Pipe () String IO ()
hRead h = do
eof <- lift $ hIsEOF h
unless eof $ do
l <- lift $ hGetLine h
yield l
hRead h
writeString :: Pipe String () IO ()
writeString = do
ms <- await
case ms of
Just s -> lift (putStrLn s) >> writeString
_ -> return ()
convert :: Monad m => (a -> b) -> Pipe a b m ()
convert f = do
mx <- await
case mx of
Just x -> yield (f x) >> convert f
_ -> return ()
takeP :: Monad m => Int -> Pipe a a m ()
takeP 0 = return ()
takeP n = do
mx <- await
case mx of
Just x -> yield x >> takeP (n - 1)
_ -> return ()
| YoshikuniJujo/forest | subprojects/pipeline/examples/upperFile.hs | bsd-3-clause | 968 | 2 | 13 | 242 | 478 | 226 | 252 | 42 | 2 |
{-
Teak synthesiser for the Balsa language
Copyright (C) 2007-2010 The University of Manchester
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Andrew Bardsley <[email protected]> (and others, see AUTHORS)
School of Computer Science, The University of Manchester
Oxford Road, MANCHESTER, M13 9PL, UK
-}
{-# LANGUAGE RankNTypes #-}
module Gui (
gui,
GuiOption (..),
GuiDisplayMode (..),
guiOptionUsage
)
where
import System.Exit
import System.FilePath
import Control.Monad
import Control.Monad.Trans
import Data.Maybe
import Data.List
import System.Posix
import Data.IORef
import Prelude hiding (catch)
import Control.Exception
import qualified Data.IntMap as IM
import Data.Array
import Data.Char
import qualified Data.Text as Text
import Misc
import NetParts
import GuiSupport
import Layout
import Optim
import Plot
import Rule
import Report
import Dot
import Gen
import Context
import ParseTree
import ToolOptions
import Monitor
import Latch
import Graph
import Options
import Balsa
import Teak
-- import Graphics.UI.Gtk hiding ({- fill, -} toLayout)
import qualified Graphics.UI.Gtk as Gtk
import qualified Graphics.UI.Gtk.Gdk.Events as Events
import System.Glib
gui :: (Read network, NetworkIF network) => ToolOptions network -> [Part network] -> IO [Part network]
data LeadView network = LeadView {
leadViewIndex :: Int,
leadViewSummary :: Maybe String,
leadViewCompType :: String,
leadViewLead :: Lead network }
data {- NetworkIF network => -} FlagView network = FlagView {
flagViewEnabled :: Bool,
flagViewOpt :: Maybe (Optim network),
flagViewName :: String,
flagViewEffect :: String }
data {- NetworkIF network => -} GuiInfo network = GuiInfo {
guiHistory :: IORef (GuiHistory network),
guiPartLayout :: IORef (GuiLayout network PartGraphical),
guiFromLayout :: IORef (GuiLayout network PartGraphical),
guiToLayout :: IORef (GuiLayout network PartGraphical),
guiOLayout :: IORef (GuiLayout network OGraphical),
guiParts :: [Part network],
guiLeads :: [Lead network],
guiUpdateLeads :: IO (),
guiGetLeadView :: Int -> Maybe (LeadView network),
guiPartList :: Gtk.TreeView,
guiLeadsList :: Gtk.TreeView,
guiSetFlagList :: IO (),
guiPartListModel :: Gtk.ListStore String,
guiDiffWindow :: Gtk.Window,
guiTimeWindow :: Gtk.Window,
guiOWindow :: Gtk.Window,
guiTimeLimits :: (Integer, Integer),
guiTime :: Integer,
guiStateColours :: Array MonitorState Colour,
guiSetPartSelectedSensitive :: Bool -> IO (),
guiSetTimeLimits :: (Integer, Integer) -> IO (),
guiSetBaseName :: Maybe String -> IO (),
guiLastBalsaFile :: Maybe FilePath,
guiLastNetworkFile :: Maybe FilePath,
guiLastOptimRulesFile :: Maybe FilePath,
guiToolOptions :: ToolOptions network }
data {- NetworkIF network => -} GuiHistory network = GuiHistory {
guiHistoryVerbose :: Bool,
guiHistoryPosition :: Int, -- index of 'current' state. Undo will come from +1, redo from -1
guiHistoryMaxLength :: Int,
guiHistoryCheckpoints :: [GuiCheckpoint network],
guiHistorySetSensitive :: IORef (GuiHistory network) -> IO () }
data {- NetworkIF network => -} GuiCheckpoint network = GuiCheckpoint {
guiCheckpointInfo :: GuiInfo network,
guiCheckpointPartLayout :: GuiLayout network PartGraphical,
guiCheckpointFromLayout :: GuiLayout network PartGraphical,
guiCheckpointToLayout :: GuiLayout network PartGraphical,
guiCheckpointOLayout :: GuiLayout network OGraphical }
historySetSensitive :: NetworkIF network => Gtk.Action -> Gtk.Action -> IORef (GuiHistory network) -> IO ()
historySetSensitive undoAction redoAction historyRef = do
history <- readIORef historyRef
let
position = guiHistoryPosition history
checkpoints = guiHistoryCheckpoints history
currentLength = length checkpoints
Gtk.actionSetSensitive undoAction (position + 1 < currentLength)
Gtk.actionSetSensitive redoAction (position - 1 >= 0)
loseHistory :: NetworkIF network => IORef (GuiInfo network) -> IO ()
loseHistory infoRef = do
info <- readIORef infoRef
let historyRef = guiHistory info
history <- readIORef historyRef
writeIORef historyRef $ history { guiHistoryPosition = 0, guiHistoryCheckpoints = [] }
pushHistory :: NetworkIF network => IORef (GuiInfo network) -> IO ()
pushHistory infoRef = do
info <- readIORef infoRef
partLayout <- readIORef $ guiPartLayout info
fromLayout <- readIORef $ guiFromLayout info
toLayout <- readIORef $ guiToLayout info
oLayout <- readIORef $ guiOLayout info
let historyRef = guiHistory info
history <- readIORef historyRef
let
position = guiHistoryPosition history
maxLength = guiHistoryMaxLength history
checkpoints = guiHistoryCheckpoints history
checkpoint = GuiCheckpoint info partLayout fromLayout toLayout oLayout
currentLength = length checkpoints
checkpoints'
| position > 0 = checkpoint : drop position checkpoints
| currentLength >= maxLength = checkpoint : take (maxLength - 1) checkpoints
| otherwise = checkpoint : checkpoints
when (guiHistoryVerbose history) $ do
putStrLn $ "PUSH"
putStrLn $ "position: " ++ show position
putStrLn $ "currentLength: " ++ show currentLength
writeIORef historyRef $ history { guiHistoryPosition = 0, guiHistoryCheckpoints = checkpoints' }
guiHistorySetSensitive history historyRef
restoreFromCheckpoint :: NetworkIF network => IORef (GuiInfo network) -> GuiCheckpoint network -> IO ()
restoreFromCheckpoint infoRef checkpoint = do
let info' = guiCheckpointInfo checkpoint
writeIORef infoRef info'
writeIORef (guiPartLayout info') $ guiCheckpointPartLayout checkpoint
writeIORef (guiFromLayout info') $ guiCheckpointFromLayout checkpoint
writeIORef (guiToLayout info') $ guiCheckpointToLayout checkpoint
writeIORef (guiOLayout info') $ guiCheckpointOLayout checkpoint
redrawAfterUndo :: NetworkIF network => IORef (GuiInfo network) -> IO ()
redrawAfterUndo infoRef = do
info <- readIORef infoRef
exposeLayout $ guiFromLayout info
exposeLayout $ guiToLayout info
exposeLayout $ guiOLayout info
partName <- liftM (maybe "" networkName) $ guiPart infoRef
updatePartSelection infoRef partName
updateLeads infoRef
exposeLayout $ guiPartLayout info
undo :: NetworkIF network => IORef (GuiInfo network) -> IO ()
undo infoRef = do
info <- readIORef infoRef
let historyRef = guiHistory info
history <- readIORef historyRef
let
position = guiHistoryPosition history
checkpoints = guiHistoryCheckpoints history
currentLength = length checkpoints
position' = position + 1
checkpoint = checkpoints !! position'
when (guiHistoryVerbose history) $ do
putStrLn $ "UNDO"
putStrLn $ "position: " ++ show position
putStrLn $ "currentLength: " ++ show currentLength
if position' >= currentLength
then print "*** can't undo any further"
else do
restoreFromCheckpoint infoRef checkpoint
writeIORef historyRef $ history { guiHistoryPosition = position' }
redrawAfterUndo infoRef
guiHistorySetSensitive history historyRef
redo :: NetworkIF network => IORef (GuiInfo network) -> IO ()
redo infoRef = do
info <- readIORef infoRef
let historyRef = guiHistory info
history <- readIORef historyRef
let
position = guiHistoryPosition history
checkpoints = guiHistoryCheckpoints history
currentLength = length checkpoints
position' = position - 1
checkpoint = checkpoints !! position'
when (guiHistoryVerbose history) $ do
putStrLn $ "REDO"
putStrLn $ "position: " ++ show position
putStrLn $ "currentLength: " ++ show currentLength
if position' < 0
then print "*** can't redo any further"
else do
restoreFromCheckpoint infoRef checkpoint
writeIORef historyRef $ history { guiHistoryPosition = position' }
redrawAfterUndo infoRef
guiHistorySetSensitive history historyRef
initLeadsList :: NetworkIF network => IORef (GuiInfo network) -> Gtk.TreeView -> IO ()
initLeadsList infoRef tree = do
model <- Gtk.listStoreNew []
renderer <- Gtk.cellRendererTextNew
compNoColumn <- Gtk.treeViewColumnNew
Gtk.treeViewColumnSetTitle compNoColumn "No."
Gtk.treeViewColumnSetResizable compNoColumn True
Gtk.treeViewColumnPackStart compNoColumn renderer True
compTypeColumn <- Gtk.treeViewColumnNew
Gtk.treeViewColumnSetTitle compTypeColumn ""
Gtk.treeViewColumnSetResizable compTypeColumn True
Gtk.treeViewColumnPackStart compTypeColumn renderer True
nameColumn <- Gtk.treeViewColumnNew
Gtk.treeViewColumnSetTitle nameColumn "Optim."
Gtk.treeViewColumnSetResizable nameColumn True
Gtk.treeViewColumnPackStart nameColumn renderer True
summaryColumn <- Gtk.treeViewColumnNew
Gtk.treeViewColumnSetTitle summaryColumn "Summary"
Gtk.treeViewColumnSetResizable summaryColumn True
Gtk.treeViewColumnPackStart summaryColumn renderer True
let
showComp leadView = [ Gtk.cellText Gtk.:= show (nwComp $ leadCompNo $ leadViewLead leadView) ]
showCompType leadView = [ Gtk.cellText Gtk.:= leadViewCompType leadView ]
showName leadView = [ Gtk.cellText Gtk.:= optimName (leadOptim (leadViewLead leadView)) ]
showSummary (LeadView { leadViewSummary = Nothing }) = [ Gtk.cellText Gtk.:= "" ]
showSummary (LeadView { leadViewSummary = Just summary }) = [ Gtk.cellText Gtk.:= ("x " ++ summary) ]
Gtk.cellLayoutSetAttributes compNoColumn renderer model showComp
Gtk.cellLayoutSetAttributes compTypeColumn renderer model showCompType
Gtk.cellLayoutSetAttributes nameColumn renderer model showName
Gtk.cellLayoutSetAttributes summaryColumn renderer model showSummary
Gtk.treeViewAppendColumn tree compNoColumn
Gtk.treeViewAppendColumn tree compTypeColumn
Gtk.treeViewAppendColumn tree nameColumn
Gtk.treeViewAppendColumn tree summaryColumn
Gtk.treeViewSetModel tree model
let
updateLeads = do
info <- readIORef infoRef
maybePart <- guiPart infoRef
let
toolOpts = guiToolOptions info
Just part = maybePart
leads = guiLeads info
-- leadsModel = guiLeadsModel info
context = OptimContext { optimContextParts = guiParts info }
summaryMaxLength = 40 :: Int
formatSummary Nothing = Nothing
formatSummary (Just str)
| listAtLeastLength summaryMaxLength str' = Just (take summaryMaxLength str' ++ " ...")
| otherwise = Just str'
where
str' = concatMap escChar str
escChar '\n' = " "
escChar c = [c]
showOnlyPassing = findBoolSubOption (optGui toolOpts) GuiOnlyPassingLeads
addLead (i, lead) = do
let
compXsummary = do
-- Remove leads with no component, where optimTestEnabled is false,
-- and for which optimTestComp no longer holds
leadComp <- tryPart part $ nwGetComp $ leadCompNo lead
-- liftIf $ optimTestEnabled (leadOptim lead) flags
when (not (optimTestComp (leadOptim lead) leadComp)) $ fail ""
let leadSummary = formatSummary $ optimPassesTest context part lead
return (leadComp, leadSummary)
Just (comp, summary) = compXsummary
if isJust compXsummary && (not showOnlyPassing || isJust summary)
then do
let view = LeadView i summary (nwCompShortName comp) lead
Gtk.listStoreAppend model view
return $ Just view
else return Nothing
Gtk.listStoreClear model
getLead <- if isJust maybePart
then do
displayLeadViews <- liftM catMaybes $ mapM addLead $ zip [0..] leads
return $ Just . (displayLeadViews !!)
else return $ \_ -> Nothing
modifyIORef infoRef $ \info -> info { guiGetLeadView = getLead }
modifyIORef infoRef $ \info -> info { guiUpdateLeads = updateLeads }
return ()
makePartList :: NetworkIF network => Gtk.TreeView -> IORef (GuiInfo network) -> IO (Gtk.ListStore String)
makePartList tree infoRef = do
model <- Gtk.listStoreNew []
renderer <- Gtk.cellRendererTextNew
partColumn <- Gtk.treeViewColumnNew
Gtk.treeViewColumnSetTitle partColumn "Part"
Gtk.treeViewColumnSetResizable partColumn True
Gtk.treeViewColumnPackStart partColumn renderer True
Gtk.cellLayoutSetAttributes partColumn renderer model $ \row -> [ Gtk.cellText Gtk.:= row ]
Gtk.treeViewAppendColumn tree partColumn
selection <- Gtk.treeViewGetSelection tree
Gtk.onSelectionChanged selection $ do
rows <- Gtk.treeSelectionGetSelectedRows selection
case rows of
[[rowNo]] -> do
-- print $ "Row select " ++ show rowNo
info <- readIORef infoRef
let layout = guiPartLayout info
let part = guiParts info !! rowNo
setPart infoRef $ Just part
setLeads infoRef []
updatePartGraphical infoRef
fitLayout layout
exposeLayout layout
partLeads infoRef
updateLeads infoRef
setLayoutButtonsSensitive (guiPartLayout info) True
guiSetPartSelectedSensitive info True
_ -> do
-- print "Other row select"
info <- readIORef infoRef
let layout = guiPartLayout info
oldPart <- guiPart infoRef
setPart infoRef Nothing
setLeads infoRef []
-- You get this signal really early on, before the drawing area is set
-- check we're unselecting a part, rather than initialising
when (isJust oldPart) $ do
updateLeads infoRef
clearLayout layout
setLayoutButtonsSensitive (guiPartLayout info) False
guiSetPartSelectedSensitive info False
loseHistory infoRef
pushHistory infoRef
Gtk.treeViewSetModel tree model
return model
setParts :: NetworkIF network => IORef (GuiInfo network) -> [Part network] -> IO ()
setParts infoRef parts = modifyIORef infoRef $ \info -> info { guiParts = parts }
setPart :: NetworkIF network => IORef (GuiInfo network) -> Maybe (Part network) -> IO ()
setPart infoRef part = do
info <- readIORef infoRef
setLayoutPart (guiPartLayout info) part
setLeads :: NetworkIF network => IORef (GuiInfo network) -> [Lead network] -> IO ()
setLeads infoRef leads = modifyIORef infoRef $ \info -> info { guiLeads = leads }
updateLeads :: NetworkIF network => IORef (GuiInfo network) -> IO ()
updateLeads infoRef = readIORef infoRef >>= guiUpdateLeads
updatePartList :: NetworkIF network => IORef (GuiInfo network) -> IO ()
updatePartList infoRef = do
info <- readIORef infoRef
let partsListModel = guiPartListModel info
Gtk.listStoreClear partsListModel
mapM_ (Gtk.listStoreAppend partsListModel . networkName) $ guiParts info
guiPart :: NetworkIF network => IORef (GuiInfo network) -> IO (Maybe (Part network))
guiPart infoRef = do
info <- readIORef infoRef
liftM layoutPart $ readIORef $ guiPartLayout info
makePartGraphical :: NetworkIF network =>
[GuiOption] -> [PlotOption] -> Part network -> IO PartGraphical
makePartGraphical guiOpts plotOpts part = do
graphical <- case displayMode of
GuiSpring -> partGraphicalSpringInfo part'
GuiDot -> partGraphicalInfo False plotOpts name part'
return $ graphical { partShowLinkColours = showLinkWidthColours }
where
part' = trimPartForPartialPlot plotOpts part
name = networkName part
showLinkWidthColours = findBoolSubOption guiOpts GuiShowLinkWidthColours
GuiDisplayMode displayMode = getSubOption guiOpts $ GuiDisplayMode GuiDot
updatePartGraphical :: NetworkIF network => IORef (GuiInfo network) -> IO ()
updatePartGraphical infoRef = do
info <- readIORef infoRef
maybePart <- guiPart infoRef
let
Just part = maybePart
plotOpts = optPlot $ guiToolOptions info
when (isJust maybePart) $ do
graphical <- makePartGraphical
(optGui $ guiToolOptions info)
-- (optPlot $ guiToolOptions info)
([{- PlotRatio 1, -} PlotSize (250, 250), PlotOLength 40]
++ mapMaybe (findSubOption plotOpts) [PlotBreakVariables, PlotPartial undefined,
PlotShowUnconnected])
part
let
showLinkWidthColours = findBoolSubOption (optGui $ guiToolOptions info) GuiShowLinkWidthColours
graphical' = graphical { partShowLinkColours = showLinkWidthColours }
setLayoutMoving (guiPartLayout info) Nothing
setLayoutGraphical (guiPartLayout info) $ Just graphical'
readIORef (guiPartLayout info) >>= layoutUpdateScrollbars
redrawPart :: NetworkIF network => IORef (GuiInfo network) -> IO ()
redrawPart infoRef = do
maybePart <- guiPart infoRef
when (isJust maybePart) $ do
do
info <- readIORef infoRef
let layout = guiPartLayout info
propViewOrigin <- getLayoutProportionalOrigin layout
updatePartGraphical infoRef
setLayoutProportionalOrigin layout propViewOrigin
info <- readIORef infoRef
let layout = guiPartLayout info
exposeLayout layout
redrawPartAfterOptionChange :: NetworkIF network => IORef (GuiInfo network) -> IO ()
redrawPartAfterOptionChange infoRef = do
redrawPart infoRef
updatePartSelection :: NetworkIF network => IORef (GuiInfo network) -> String -> IO ()
updatePartSelection infoRef name = do
info <- readIORef infoRef
let
parts = guiParts info
maybeI = nwFindPartIndex parts name
Just i = maybeI
selection <- Gtk.treeViewGetSelection $ guiPartList info
Gtk.treeSelectionSelectPath selection $ if isJust maybeI then [i] else []
partLeads :: NetworkIF network => IORef (GuiInfo network) -> IO ()
partLeads infoRef = do
info <- readIORef infoRef
maybePart <- guiPart infoRef
let Just part = maybePart
when (isJust maybePart) $ do
setLeads infoRef $ makePartLeads part $ optEnabledOptims $ guiToolOptions info
-- skipToPassingLead : skip leads in the given list until one passes. Returns (skipped leads, tail of leads list
-- starting with first passing lead)
skipToPassingLead :: NetworkIF network => OptimContext network -> Part network -> [Lead network] ->
([Lead network], [Lead network])
skipToPassingLead context part leads = body [] leads
where
body skipped [] = (reverse skipped, [])
body skipped (lead:leads)
| isJust (optimPassesTest context part lead) = (reverse skipped, lead:leads)
| otherwise = body (lead:skipped) leads
modifyToolOptions :: NetworkIF network => IORef (GuiInfo network) ->
(ToolOptions network -> ToolOptions network) -> IO ()
modifyToolOptions infoRef f = modifyIORef infoRef $ \info -> info { guiToolOptions = f (guiToolOptions info) }
compileBalsaToParts :: NetworkIF network => IORef (GuiInfo network) -> String ->
IO (Context Decl, [Part network], [Report])
compileBalsaToParts infoRef filename = do
balsaSearchPath <- liftM (optBalsaSearchPath . guiToolOptions) $ readIORef infoRef
let
splitFilename = splitWith "/" filename
dirName = joinWith "/" $ init splitFilename
baseName = dirName ++ "/" ++ (joinWith "." $ init $ splitWith "." $ last splitFilename)
otherPaths
| dirName == "" = []
| otherwise = [dirName]
parseTree <- runWhyT $ compileBalsaFile (otherPaths ++ balsaSearchPath) filename
let
connect = defaultConnectWhy []
unalias partsIn = map ((flip runPart_) networkRemoveAliases) partsIn
Why r parts = parseTree `connect`
(\context -> do
parts <- teak defaultTeakOptions context
return $ unalias parts)
Why _ context = parseTree
case r of
Incomplete -> return (context, [], [Report (PosFile filename (ImportFile filename)) "incomplete"])
Complete -> do
info <- readIORef infoRef
guiSetBaseName info $ Just baseName
return (context, parts, [])
Wrong reports -> return (context, [], reports)
updateAll :: NetworkIF network => IORef (GuiInfo network) -> IO ()
updateAll infoRef = do
updatePartList infoRef
maybePart <- guiPart infoRef
info <- readIORef infoRef
if isJust maybePart
then do
partLeads infoRef
redrawPart infoRef
else do
let layout = guiPartLayout info
clearLayout layout
updateLeads infoRef
catchAndRestore :: NetworkIF network => IORef (GuiInfo network) -> IO () -> IO ()
catchAndRestore infoRef m = do
info <- readIORef infoRef
part <- guiPart infoRef
catch m $ \(ErrorCall str) -> do
writeIORef infoRef info
setPart infoRef part
putStrLn $ "ERROR: " ++ str
updateAll infoRef
when (isJust part) $ updatePartSelection infoRef $ networkName $ fromJust part
newParts :: NetworkIF network => IORef (GuiInfo network) -> [Part network] -> IO ()
newParts _ [] = return ()
newParts infoRef parts = do
info <- readIORef infoRef
setParts infoRef parts
setPart infoRef Nothing
setLeads infoRef []
setLayoutButtonsSensitive (guiPartLayout info) False
guiSetPartSelectedSensitive info False
updateAll infoRef
replacePart :: NetworkIF network => IORef (GuiInfo network) -> Part network -> IO ()
replacePart infoRef newPart = do
info <- readIORef infoRef
oldPart <- guiPart infoRef
let
parts = guiParts info
Just i = nwFindPartIndex parts $ networkName newPart
parts' = replaceAt parts i newPart
setParts infoRef parts'
setPart infoRef $ Just newPart
-- redraw if we were previously looking at this part
when (isJust oldPart && networkName (fromJust oldPart) == networkName newPart) $ redrawPart infoRef
return ()
showNetworkDiff :: NetworkIF network => IORef (GuiInfo network) -> Part network -> Part network -> IO ()
showNetworkDiff infoRef fromPart toPart = do
showDiff <- isSetGuiOption infoRef GuiDiffWhenStepping
when showDiff $ do
info <- readIORef infoRef
setLayoutPart (guiFromLayout info) $ Just fromPart
setLayoutPart (guiToLayout info) $ Just toPart
redrawNetworkDiff infoRef
type GetSet whole elem = (whole -> elem, whole -> elem -> whole)
guiGetSet :: NetworkIF network => GetSet (ToolOptions network) [GuiOption]
guiGetSet = (optGui, \toolOpts opts -> toolOpts { optGui = opts })
plotGetSet :: NetworkIF network => GetSet (ToolOptions network) [PlotOption]
plotGetSet = (optPlot, \toolOpts opts -> toolOpts { optPlot = opts })
type ModifyM network subOption = IORef (GuiInfo network) -> subOption ->
(Maybe subOption -> [subOption] -> [subOption]) -> IO ()
type SetM network subOption = IORef (GuiInfo network) -> subOption -> IO ()
type FindM network subOption = IORef (GuiInfo network) -> subOption -> IO (Maybe subOption)
type GetM network subOption = IORef (GuiInfo network) -> subOption -> IO subOption
type ClearM network subOption = IORef (GuiInfo network) -> subOption -> IO ()
type IsSetM network subOption = IORef (GuiInfo network) -> subOption -> IO Bool
-- modifySubOptionM : modify the sub-option of ToolOptions matching the prototype option. Apply the
-- function f to `Maybe' the option (if found in the given list) and the
-- original option list w/o the named option to give a new option list. That is, to *keep* an option
-- you need to reinsert it in the list.
-- `get' and `set' are functions to extract and insert the sub-options into ToolOptions
modifySubOptionM :: (SubOption subOption, NetworkIF network) =>
GetSet (ToolOptions network) [subOption] -> ModifyM network subOption
modifySubOptionM (get, set) infoRef prototypeOpt f = modifyToolOptions infoRef $ \toolOpts ->
let
opts = get toolOpts
opt = findSubOption opts prototypeOpt
optsWOOpt = removeSubOption opts prototypeOpt
opts' = f opt optsWOOpt
in
set toolOpts opts'
setSubOptionM :: (SubOption subOption, NetworkIF network) =>
GetSet (ToolOptions network) [subOption] -> SetM network subOption
setSubOptionM getSet infoRef opt = modifySubOptionM getSet infoRef opt (const (opt:))
clearSubOptionM :: (SubOption subOption, NetworkIF network) =>
GetSet (ToolOptions network) [subOption] -> ClearM network subOption
clearSubOptionM getSet infoRef opt = modifySubOptionM getSet infoRef opt (const id)
findSubOptionM :: (SubOption subOption, NetworkIF network) =>
GetSet (ToolOptions network) [subOption] -> FindM network subOption
findSubOptionM (get, _) infoRef opt = do
info <- readIORef infoRef
return $ findSubOption (get $ guiToolOptions info) opt
getSubOptionM :: (SubOption subOption, NetworkIF network) =>
GetSet (ToolOptions network) [subOption] -> GetM network subOption
getSubOptionM (get, _) infoRef opt = do
info <- readIORef infoRef
return $ getSubOption (get $ guiToolOptions info) opt
isSetSubOptionM :: (SubOption subOption, NetworkIF network) =>
GetSet (ToolOptions network) [subOption] -> IsSetM network subOption
isSetSubOptionM getSet infoRef opt = do
option <- findSubOptionM getSet infoRef opt
return $ isJust option
modifyGuiOption :: NetworkIF network => ModifyM network GuiOption
modifyGuiOption = modifySubOptionM guiGetSet
-- findGuiOption :: NetworkIF network => FindM network GuiOption
-- findGuiOption = findSubOptionM guiGetSet
getGuiOption :: NetworkIF network => GetM network GuiOption
getGuiOption = getSubOptionM guiGetSet
setGuiOption :: NetworkIF network => SetM network GuiOption
setGuiOption = setSubOptionM guiGetSet
-- clearGuiOption :: NetworkIF network => ClearM network GuiOption
-- clearGuiOption = clearSubOptionM guiGetSet
isSetGuiOption :: NetworkIF network => IsSetM network GuiOption
isSetGuiOption = isSetSubOptionM guiGetSet
tableBoolOption :: (SubOption subOption, NetworkIF network) =>
GetSet (ToolOptions network) [subOption] -> IO () ->
IORef (GuiInfo network) -> String -> subOption -> TableOption
tableBoolOption getSet afterSetAction infoRef desc opt =
TableOption desc TableOptionTypeBool Nothing getOpt setOpt
where
getOpt = do
value <- isSetSubOptionM getSet infoRef opt
return $ TableOptionValueBool value
setOpt (TableOptionValueBool value) = do
(if value then setSubOptionM getSet else clearSubOptionM getSet) infoRef opt
afterSetAction
setOpt _ = error "tableBoolOption: not a boolean value"
redrawNetworkDiff :: NetworkIF network => IORef (GuiInfo network) -> IO ()
redrawNetworkDiff infoRef = do
info <- readIORef infoRef
fromLayout <- readIORef $ guiFromLayout info
toLayout <- readIORef $ guiToLayout info
GuiDiffDepth depth <- getGuiOption infoRef (GuiDiffDepth 1)
when (isJust (layoutPart fromLayout) && isJust (layoutPart toLayout)) $ do
let
Just fromPart = layoutPart fromLayout
Just toPart = layoutPart toLayout
(from, to) <- diffParts depth fromPart toPart
setLayoutGraphical (guiFromLayout info) $ Just from
setLayoutGraphical (guiToLayout info) $ Just to
presentWindow $ guiDiffWindow info
info2 <- readIORef infoRef
fitLayout (guiFromLayout info2)
fitLayout (guiToLayout info2)
exposeLayout (guiFromLayout info2)
exposeLayout (guiToLayout info2)
return ()
setRules :: NetworkIF network => IORef (GuiInfo network) -> RuleSet network -> IO ()
setRules infoRef ruleSet = do
modifyToolOptions infoRef $ \opts -> opts { optRules = ruleSet }
info <- readIORef infoRef
let
toolOpts = guiToolOptions info
prevEnabledOptimNames = map optimName $ optEnabledOptims toolOpts
prevAllOptimNames = map optimName $ optAllOptims toolOpts
optims = map ruleToOptim $ ruleSetRules ruleSet
-- use the current on/off optim. status for existing (re-read?) optims.
enableOptim optim = optimName optim `elem` prevEnabledOptimNames ||
(optimName optim `notElem` prevAllOptimNames && optimOnByDefault optim)
modifyToolOptions infoRef $ \opts -> opts {
optAllOptims = optims, optEnabledOptims = filter enableOptim optims }
setLeads infoRef []
updateLeads infoRef
guiSetFlagList info
layoutClicked :: NetworkIF network => IORef (GuiInfo network) -> IORef (GuiLayout network PartGraphical) ->
DPoint -> Events.Event -> IO ()
layoutClicked infoRef layoutRef point event = do
(maybePart, maybeGraphical, drawingArea) <- do
layout <- readIORef layoutRef
return (layoutPart layout, layoutGraphical layout, layoutDrawingArea layout)
when (isJust maybeGraphical && isJust maybePart) $ do
let
Just graphical = maybeGraphical
Just part = maybePart
comps = map Comp $ pointInBoundingBoxs (map compNodeBoundingBox) (partCompNodes graphical) point
getComp = fromJust . tryPart part . nwGetComp
firstComp = getComp (head comps)
compStr = tryPart part $ do
compBodys <- liftM (map fromJust) $ mapM nwGetComp comps
return $ joinWith "\n" $ map show compBodys
case (Events.eventClick event, length comps, firstComp) of
(Events.DoubleClick, 1, _) | isInstanceComp firstComp -> do
updatePartSelection infoRef $ nwPartName firstComp
(Events.DoubleClick, 0, _) -> do
info <- readIORef infoRef
let
parts = guiParts info
parentPartAndComp part2 = catMaybes $ tryPart part2 $ nwMapComps $ \comp ->
if isInstanceComp comp && nwPartName comp == networkName part
then return $ Just (part2, refComp comp)
else return Nothing
parent = concatMap parentPartAndComp parts
case parent of
[(parentPart, compInParent)] -> do
updatePartSelection infoRef $ networkName parentPart
partLayoutSetHighlightComps layoutRef [compInParent]
let compNode = IM.lookup (nwComp compInParent) $ partCompNodes graphical
when (isJust compNode) $ do
let Just (node:_) = compNode
(width, height) <- Gtk.widgetGetSize drawingArea
let
viewMinorDimension = fromIntegral $ min width height
compMajorDimension = max (compNodeWidth node) (compNodeHeight node)
scale = (viewMinorDimension / compMajorDimension) * 0.2
-- FIXME, check this is working properly
print node
print scale
setLayoutViewOrigin layoutRef $ compNodeOrigin node
setLayoutViewScale layoutRef scale
exposeLayout layoutRef
_ -> putStrLn $ "Can't find parent for part " ++ networkName part
(Events.SingleClick, _, _) -> do
partLayoutSetHighlightComps layoutRef comps
exposeLayout layoutRef
when (not $ null comps) $ do
info <- readIORef infoRef
let
compRef = head comps
leads = guiLeads info
leadNos = findIndices ((== compRef) . leadCompNo) leads
comp = getComp compRef
let oLayout = guiOLayout info
{- when (null leadNos) $ -}
putStrLn compStr
let showPortLinkInfo = True
when showPortLinkInfo $ do
let
portInfo
| isInstanceComp comp = map networkPortToPortInfo $ nwCompPorts comp
| otherwise = teakCompPortInfo $ nwTeakCompInfo $ nwTeakType comp
portInfoXlinks = zip portInfo (map flattenSome $ nwCompLinks comp)
forM_ portInfoXlinks $ \(info, links) -> do
let
sense = networkPortSense info
portName = networkPortName info
arrayed = networkPortIsArrayed info
forM (zip [(0::Int)..] links) $ \(i, link) -> do
let width = tryPart part $ nwGetLinkWidth link
putStr $ portName ++ (if arrayed then "[" ++ show i ++ "]"
else "") ++ " " ++ show link ++ " (width = " ++ show width ++ ") "
case sense of
Passive -> do
let prevs = tryPart part $ prevLinks [] link
putStrLn $ "prev: " ++ show prevs
Active -> do
let nexts = tryPart part $ nextLinks False [] link
putStrLn $ "next (->): " ++ show nexts
let nexts = tryPart part $ nextLinks True [] link
putStrLn $ "next (-|->): " ++ show nexts
if isTeakO comp
then do
let
TeakO terms = nwTeakType comp
[One inp, _] = nwCompLinks comp
inputWidth = tryPart part $ nwGetLinkWidth inp
setLayoutPart oLayout maybePart
oGraphical <- oGraphicalInfo inputWidth terms
setLayoutGraphical oLayout $ Just oGraphical
presentWindow $ guiOWindow info
fitLayout oLayout
exposeLayout oLayout
setLayoutButtonsSensitive oLayout True
return ()
else do
-- Gtk.widgetHide $ guiOWindow info
{-
layout <- readIORef oLayout
when (isJust $ layoutGraphical layout) $ do
clearLayout oLayout
setLayoutGraphical oLayout Nothing
setLayoutButtonsSensitive oLayout False
-}
return ()
-- FIXME, multiple selection
leadSelection <- Gtk.treeViewGetSelection $ guiLeadsList info
Gtk.treeSelectionSelectPath leadSelection $ take 1 leadNos
_ -> return ()
makeGates :: NetworkIF network => Gtk.Window -> IORef (GuiInfo network) -> IO ()
makeGates parent infoRef = do
info <- readIORef infoRef
let
baseName = optBaseName $ guiToolOptions info
defaultFilename = fromMaybe "netlist.v" $ do
base <- baseName
return $ base ++ ".v"
ok <- Gtk.buttonNewFromStock Gtk.stockOk
close <- Gtk.buttonNewFromStock Gtk.stockClose
entry <- Gtk.entryNew
Gtk.entrySetText entry defaultFilename
techCombo <- Gtk.comboBoxNewText
mappingFileEntry <- Gtk.entryNew
techsModel <- Gtk.comboBoxGetModelText techCombo
table <- Gtk.tableNew 3 2 False
tableAddNameValue table 0 0 "Netlist File" entry
tableAddNameValue table 0 1 "Technology" techCombo
mappingLabel <- tableAddNameValue table 0 2 "Mapping File" mappingFileEntry
let
setMappingEntrySensitive = do
activeRow <- Gtk.comboBoxGetActive techCombo
let usingMappingFile = activeRow == 0
Gtk.widgetSetSensitivity mappingFileEntry usingMappingFile
Gtk.widgetSetSensitivity mappingLabel usingMappingFile
setEntryTech = do
let toolOpts = guiToolOptions info
techs <- teakFindTechs toolOpts
Gtk.listStoreClear techsModel
mapM_ (Gtk.listStoreAppend techsModel) $ map Text.pack ("From Mapping File":techs)
info <- readIORef infoRef
let
tech = optTech $ guiToolOptions info
index = maybe 0 (+1) $ findIndex (== tech) techs
usingMappingFile = index == 0
Gtk.comboBoxSetActive techCombo index
Gtk.entrySetText mappingFileEntry $ if usingMappingFile then tech else ""
setMappingEntrySensitive
getEntryTech = do
activeRow <- Gtk.comboBoxGetActive techCombo
case activeRow of
0 -> Gtk.entryGetText mappingFileEntry
_ -> Gtk.comboBoxGetActiveText techCombo >>= return . (fromMaybe (Text.pack ""))
setEntryTech
Gtk.on techCombo Gtk.changed setMappingEntrySensitive
window <- makeDialogue parent "Make Gate-Level Netlist" [ok, close] table -- vBox
Gtk.widgetSetSizeRequest window 350 200
Gtk.onClicked close $ Gtk.widgetDestroy window
Gtk.onClicked ok $ do
info2 <- readIORef infoRef
filename <- Gtk.entryGetText entry
let toolOpts = guiToolOptions info
tech <- getEntryTech
putStrLn $ "Tech: " ++ (Text.unpack tech)
Why comp techMapping <- runWhyT $ teakFindTechMapping toolOpts (Text.unpack tech)
case comp of
Complete -> genMakeGatesFile True [] -- FIXME
"by teak gui" -- FIXME
techMapping
filename
(guiParts info2)
_ -> do
printCompleteness noPosContext comp
return ()
Gtk.widgetDestroy window
presentWindow window
makeFlagsWindow :: NetworkIF network => Gtk.Window -> IORef (GuiInfo network) -> IO Gtk.Window
makeFlagsWindow parent infoRef = do
scrolledWindow <- Gtk.scrolledWindowNew Nothing Nothing
flagList <- Gtk.treeViewNew
Gtk.set flagList [ Gtk.treeViewHeadersVisible Gtk.:= False ]
Gtk.containerAdd scrolledWindow flagList
Gtk.set scrolledWindow [ Gtk.scrolledWindowHscrollbarPolicy Gtk.:= Gtk.PolicyAutomatic,
Gtk.scrolledWindowVscrollbarPolicy Gtk.:= Gtk.PolicyAlways ]
close <- Gtk.buttonNewFromStock Gtk.stockClose
window <- makeDialogue parent "Optimisation Flags" [close] scrolledWindow
Gtk.widgetSetSizeRequest window 500 500
Gtk.onClicked close $ Gtk.widgetHide window
Gtk.on window Gtk.deleteEvent $ lift $ Gtk.widgetHide window >> return True
model <- Gtk.listStoreNew []
textRenderer <- Gtk.cellRendererTextNew
toggleRenderer <- Gtk.cellRendererToggleNew
enableColumn <- Gtk.treeViewColumnNew
Gtk.treeViewColumnSetTitle enableColumn ""
Gtk.treeViewColumnSetResizable enableColumn True
Gtk.treeViewColumnPackStart enableColumn toggleRenderer True
nameColumn <- Gtk.treeViewColumnNew
Gtk.treeViewColumnSetTitle nameColumn "Name"
Gtk.treeViewColumnSetResizable nameColumn True
Gtk.treeViewColumnPackStart nameColumn textRenderer True
effectColumn <- Gtk.treeViewColumnNew
Gtk.treeViewColumnSetTitle effectColumn "Effect"
Gtk.treeViewColumnSetResizable effectColumn True
Gtk.treeViewColumnPackStart effectColumn textRenderer True
let
showEnable row = [ Gtk.cellToggleActive Gtk.:= flagViewEnabled row, Gtk.cellToggleActivatable Gtk.:= True ]
showName row = [ Gtk.cellText Gtk.:= flagViewName row ]
showEffect row = [ Gtk.cellText Gtk.:= flagViewEffect row ]
updateFlags = do
flagViews <- Gtk.listStoreToList model
modifyToolOptions infoRef $ \opts -> opts { optEnabledOptims =
mapMaybe flagViewOpt $ filter flagViewEnabled flagViews }
setRowEnable enableF rowNo = do
row <- Gtk.listStoreGetValue model rowNo
Gtk.listStoreSetValue model rowNo $ row { flagViewEnabled = enableF (flagViewEnabled row) }
Gtk.cellLayoutSetAttributes enableColumn toggleRenderer model showEnable
Gtk.cellLayoutSetAttributes nameColumn textRenderer model showName
Gtk.cellLayoutSetAttributes effectColumn textRenderer model showEffect
Gtk.treeViewAppendColumn flagList enableColumn
Gtk.treeViewAppendColumn flagList nameColumn
Gtk.treeViewAppendColumn flagList effectColumn
Gtk.on toggleRenderer Gtk.cellToggled $ \s -> do
let
rowNo :: Int
rowNo = read s
case rowNo of
0 -> do
size <- Gtk.listStoreGetSize model
allRow <- Gtk.listStoreGetValue model 0
mapM_ (setRowEnable (const (not (flagViewEnabled allRow)))) [0..size-1]
_ -> do
setRowEnable not rowNo
setRowEnable (const False) 0
updateFlags
updateLeads infoRef
Gtk.treeViewSetModel flagList model
let
setFlagList = do
info <- readIORef infoRef
let optims = optEnabledOptims $ guiToolOptions info
let
setFlagViewElem optim =
FlagView (isJust (find (isOptim name) optims)) (Just optim) name effect
where
name = optimName optim
effect = optimDescription optim
allFlagView = FlagView False Nothing "all" "set/reset all flags"
Gtk.listStoreClear model
mapM_ (Gtk.listStoreAppend model) $
allFlagView : (map setFlagViewElem (optAllOptims $ guiToolOptions info))
modifyIORef infoRef $ \info -> info { guiSetFlagList = setFlagList }
setFlagList
return window
makePreferencesWindow :: NetworkIF network => Gtk.Window -> IORef (GuiInfo network) -> IO Gtk.Window
makePreferencesWindow parent infoRef = do
table <- Gtk.tableNew 2 2 False
let
redraw = redrawPartAfterOptionChange infoRef
-- FIXME, improve redraw options
guiBoolOpt desc opt = tableBoolOption guiGetSet redraw infoRef desc opt
plotBoolOpt desc opt = tableBoolOption plotGetSet redraw infoRef desc opt
stateColourOpt state desc = TableOption desc TableOptionTypeColour Nothing get set
where
get = do
info <- readIORef infoRef
return $ TableOptionValueColour $ guiStateColours info ! state
set (TableOptionValueColour colour) = do
modifyIORef infoRef $ \info -> info {
guiStateColours = (guiStateColours info) // [(state, colour)] }
info <- readIORef infoRef
exposeLayout $ guiPartLayout info
set _ = error "stateColourOpt: bad value"
options = [
TableOption "Layout mode"
(TableOptionTypeEnum ["dot", "spring"])
Nothing
(do
GuiDisplayMode displayMode <- getGuiOption infoRef (GuiDisplayMode GuiDot)
return $ TableOptionValueEnum $ fromEnum displayMode)
(\(TableOptionValueEnum i) -> do
setGuiOption infoRef $ GuiDisplayMode $ toEnum i
redrawPartAfterOptionChange infoRef
return ()
),
guiBoolOpt "Show link widths as colours" GuiShowLinkWidthColours,
guiBoolOpt "Show only passing leads" GuiOnlyPassingLeads,
guiBoolOpt "Show network diff. when stepping" GuiDiffWhenStepping,
guiBoolOpt "Show monitor events on network" GuiShowMonitorEvents,
plotBoolOpt "Break Vs into read/write portions" PlotBreakVariables,
TableOptionSpacer "Handshake State Colours" 2,
stateColourOpt HS_SPACER "spacer (SPACER)",
stateColourOpt HS_r "req. rising, incomplete (r)",
stateColourOpt HS_R "req. up, complete (R)",
stateColourOpt HS_RA "req up, ack. up (RA)",
stateColourOpt HS_rA "req falling, ack up (rA)",
stateColourOpt HS_A "just ack up (A)"
]
mapM (\(row, opt) -> tableOptionMakeTableEntry table 0 row opt) $ zip [0..] options
close <- Gtk.buttonNewFromStock Gtk.stockClose
scrolledWindow <- makeAlignedScrolledWindow table
window <- makeDialogue parent "Preferences" [close] scrolledWindow
Gtk.widgetSetSizeRequest window 350 400
Gtk.onClicked close $ Gtk.widgetHide window
Gtk.on window Gtk.deleteEvent $ lift $ Gtk.widgetHide window >> return True
return window
plot :: NetworkIF network => Gtk.Window -> IORef (GuiInfo network) -> IO ()
plot parent infoRef = do
info <- readIORef infoRef
let
toolOpts = guiToolOptions info
PlotLanguage language0 = getSubOption (optPlot toolOpts) (PlotLanguage PlotPS)
baseName = optBaseName toolOpts
defaultFilename = do
fromMaybe "plot.ps" $ do
base <- baseName
return $ base ++ ".ps"
table <- Gtk.tableNew 2 2 False
plotAll <- newIORef True
plotFilename <- newIORef defaultFilename
let
boolOpt desc opt = tableBoolOption plotGetSet (return ()) infoRef desc opt
languages = [PlotPS, PlotPDF, PlotSVG]
options = [
TableOption "Output file" TableOptionTypeString Nothing
(readIORef plotFilename >>= return . TableOptionValueString)
(\(TableOptionValueString value) -> writeIORef plotFilename value),
boolOpt "Show part titles" PlotShowTitle,
TableOption "Parts to plot" (TableOptionTypeEnum ["All", "Selected"]) Nothing
(readIORef plotAll >>= return . TableOptionValueEnum . (\v -> if v then 0 else 1))
(\(TableOptionValueEnum value) -> writeIORef plotAll $ value == 0),
TableOption "Language" (TableOptionTypeEnum (map (map toUpper . show) languages)) Nothing
(do
info <- readIORef infoRef
let
opts = optPlot $ guiToolOptions info
PlotLanguage lang = getSubOption opts $ PlotLanguage language0
return $ TableOptionValueEnum $ fromJust $ findIndex (== lang) languages
)
(\(TableOptionValueEnum value) -> do
modifyToolOptions infoRef $ \opts -> opts { optPlot =
replaceSubOption (optPlot opts) $ PlotLanguage $ languages !! value })
]
mapM (\(row, opt) -> tableOptionMakeTableEntry table 0 row opt) $ zip [0..] options
close <- Gtk.buttonNewFromStock Gtk.stockClose
ok <- Gtk.buttonNewFromStock Gtk.stockOk
scrolledWindow <- makeAlignedScrolledWindow table
window <- makeDialogue parent "Plot Parts" [ok, close] scrolledWindow
Gtk.widgetSetSizeRequest window 350 400
Gtk.onClicked close $ Gtk.widgetHide window
Gtk.on window Gtk.deleteEvent $ lift $ Gtk.widgetHide window >> return True
Gtk.onClicked ok $ do
info <- readIORef infoRef
all <- readIORef plotAll
filename <- readIORef plotFilename
parts <- if all
then return $ guiParts info
else liftM maybeToList $ guiPart infoRef
let
guiOpts = optGui $ guiToolOptions info
opts = replaceSubOption (optPlot $ guiToolOptions info) $ PlotOnlyParts $ map networkName parts
pre
| findBoolSubOption guiOpts GuiShowMonitorEvents = Just $ \part graphical -> renderMonitorEvents
(guiStateColours info) guiOpts (optPartMonitorEvents toolOpts)
(guiTime info) part graphical
| otherwise = Nothing
plotParts filename False opts False (makePartGraphical guiOpts) pre $ guiParts info
return ()
Gtk.widgetDestroy window
presentWindow window
makeInsertLatchesWindow :: NetworkIF network => Gtk.Window -> IORef (GuiInfo network) -> IO Gtk.Window
makeInsertLatchesWindow parent infoRef = do
table <- Gtk.tableNew 2 10 False
let
eqStrategy strategy opt@(LatchOption {}) = strategy == teakLatchStrategy opt
eqStrategy _ _ = False
updateDepth strategy depth opts = fromMaybe (newOpt:opts) $ do
i <- findIndex (eqStrategy strategy) opts
return $ replaceAt opts i newOpt
where newOpt = LatchOption strategy depth
getDepth strategy = do
info <- readIORef infoRef
let
depth = fromMaybe 0 $ do
oldOpt <- find (eqStrategy strategy) $ optLatch $ guiToolOptions info
return $ teakLatchDepth oldOpt
return depth
get strategy = do
depth <- getDepth strategy
return $ TableOptionValueInt depth
set opt (TableOptionValueInt value) = do
modifyToolOptions infoRef $ \opts -> opts { optLatch = updateDepth opt value (optLatch opts) }
set _ _ = error "set: not an integer"
latchOpt (_, subOption) = case sampleValue of
-- Only add `simple' rules
([LatchOption strategy _], []) -> do
apply <- Gtk.buttonNewFromStock Gtk.stockApply
Gtk.onClicked apply $ do
TableOptionValueInt depth <- get strategy
insertLatches [LatchOption strategy depth]
return $ Just $ TableOption (capitalise desc) (TableOptionTypeIntSpin 0 20)
(Just (Gtk.castToWidget apply)) (get strategy) (set strategy)
_ -> return Nothing
where
desc = subOptionDescription subOption
sampleValue = subOptionParseValue subOption $ subOptionSampleValue subOption
insertLatches opts = do
maybePart <- guiPart infoRef
when (isJust maybePart) $ do
let
Just part = maybePart
opts' = filter (\opt -> isLatchOption opt && teakLatchDepth opt > 0) opts
Why comp part' = runWhyTPart_ part $ nwInsertLatches opts'
bad <- printCompleteness noPosContext comp
when (not (null opts') && not bad) $ do
replacePart infoRef part'
setLeads infoRef []
updateLeads infoRef
pushHistory infoRef
readIORef infoRef >>= fitLayout . guiPartLayout
doApplyAll = do
info <- readIORef infoRef
let opts = optLatch $ guiToolOptions info
insertLatches opts
options <- liftM catMaybes $ mapM latchOpt $ subOptionUsages latchOptionUsage
mapM (\(row, opt) -> tableOptionMakeTableEntry table 0 row opt) $ zip [0..] options
ok <- Gtk.buttonNewFromStock Gtk.stockOk
apply <- Gtk.buttonNewFromStock Gtk.stockApply
close <- Gtk.buttonNewFromStock Gtk.stockClose
scrolledWindow <- makeAlignedScrolledWindow table
window <- makeDialogue parent "Insert Latches" [apply, ok, close] scrolledWindow
Gtk.widgetSetSizeRequest window 500 450
Gtk.onClicked close $ Gtk.widgetHide window
Gtk.onClicked ok $ doApplyAll >> Gtk.widgetHide window
Gtk.onClicked apply doApplyAll
Gtk.on window Gtk.deleteEvent $ lift $ Gtk.widgetHide window >> return True
return window
makeDiffWindow :: NetworkIF network => Gtk.Window -> IORef (GuiInfo network) -> Bool -> Int ->
IO (Gtk.Window, IORef (GuiLayout network PartGraphical), IORef (GuiLayout network PartGraphical))
makeDiffWindow parent infoRef showDepth diffDepth = do
(fromLayoutRef, fromLayoutWidget) <- makeLayoutTable (Nothing :: Maybe PartGraphical) False False Nothing
setLayoutSizeRequest fromLayoutRef 200 200
(toLayoutRef, toLayoutWidget) <- makeLayoutTable (Nothing :: Maybe PartGraphical) False False Nothing
setLayoutSizeRequest toLayoutRef 200 200
diffPanes <- makeHPaned fromLayoutWidget toLayoutWidget
close <- Gtk.buttonNewFromStock Gtk.stockClose
diffWindow <- if showDepth
then do
diffVBox <- Gtk.vBoxNew False 2
Gtk.boxPackStart diffVBox diffPanes Gtk.PackGrow 2
spinHBox <- Gtk.hBoxNew False 2
diffSpin <- Gtk.spinButtonNewWithRange 1 10 1
Gtk.spinButtonSetValue diffSpin (fromIntegral diffDepth)
Gtk.onValueSpinned diffSpin $ do
-- FIXME, don't know why this is invoked more than once for each click
depth <- liftM floor $ Gtk.spinButtonGetValue diffSpin
modifyGuiOption infoRef (GuiDiffDepth 1) $ \_ opts -> GuiDiffDepth depth : opts
redrawNetworkDiff infoRef
spinLabel <- Gtk.labelNewWithMnemonic "Depth"
Gtk.boxPackStart spinHBox spinLabel Gtk.PackNatural 2
Gtk.boxPackStart spinHBox diffSpin Gtk.PackGrow 2
Gtk.boxPackStart diffVBox spinHBox Gtk.PackNatural 2
makeDialogue parent "Network difference" [close] diffVBox
else makeDialogue parent "Network difference" [close] diffPanes
Gtk.onClicked close $ Gtk.widgetHide diffWindow
Gtk.on diffWindow Gtk.deleteEvent $ lift $ Gtk.widgetHide diffWindow >> return True
return (diffWindow, fromLayoutRef, toLayoutRef)
xmlTag :: String -> [(String, String)] -> Bool -> String
xmlTag tag nvps close = "<" ++ tag ++ nvps' ++ (if close then "/" else "") ++ ">"
where
nvps'
| null nvps = ""
| otherwise = " " ++ (joinWith " " $ map (\(name, value) -> name ++ "=\"" ++ value ++ "\"") nvps)
xmlCloseTag :: String -> String
xmlCloseTag tag = "</" ++ tag ++ ">"
xmlFormat :: String -> [(String, String)] -> [String] -> String
xmlFormat tag nvps bodies = xmlTag tag nvps False ++ "\n" ++
joinWith "\n" (map (indent tab) bodies) ++ "\n" ++ xmlCloseTag tag
where tab = "\t"
mainMenuUI :: String
mainMenuUI = xmlFormat "ui" [] [
xmlFormat "menubar" [("name", "mainMenubar")] menus,
xmlFormat "toolbar" [("name", "leadsToolbar")] leadsToolitems,
xmlFormat "toolbar" [("name", "timeToolbar")] timeToolitems]
where
menus = [
menu "File" [
menuitem "Compile Balsa...", menuitem "Open Network...",
menuitem "Save Network", menuitem "Save Network As...",
separator,
menuitem "Make Gate-Level Netlist...",
menuitem "Plot...",
separator,
menuitem "Read Optimisation Rules...",
menuitem "Read Monitor Events...",
separator,
menuitem "Quit"],
menu "Edit" [
menuitem "Undo", menuitem "Redo",
separator,
menuitem "Optimisation Flags...",
menuitem "Preferences..."],
menu "Selected Part" [
menuitem "Set As Top Level",
menuitem "Insert Latches...",
menuitem "Remove All Latches",
menuitem "Experimental" ],
menu "Windows" [
menuitem "Time Window",
menuitem "Network Diff. Window",
menuitem "O Component Window"
]]
leadsToolitems = [
toolitem "New Leads",
toolitem "(Leads) Apply All",
toolitem "(Leads) Step" ]
timeToolitems = [
toolitem "(Time) Play",
toolitem "(Time) Stop",
toolitem "(Time) Step Backward",
toolitem "(Time) Step Forward" ]
menu name items = xmlFormat "menu" [("action", name)] items
menuitem action = xmlTag "menuitem" [("action", action)] True
toolitem action = xmlTag "toolitem" [("action", action)] True
separator = xmlTag "separator" [] True
makeTimeWindow :: NetworkIF network => IORef (GuiInfo network) -> Gtk.Window -> Gtk.UIManager ->
(Integer, Integer) -> Integer -> IO (Gtk.Window, (Integer, Integer) -> IO ())
makeTimeWindow infoRef parent ui timeLimits0 stepSize0 = do
-- Note that makeTimeWindow *must not* use infoRef in setting up the window, only once events start
-- to be received
close <- Gtk.buttonNewFromStock Gtk.stockClose
let (low, high) = timeLimits0
Just playAction <- Gtk.uiManagerGetAction ui "/ui/timeToolbar/(Time) Play"
Just stopAction <- Gtk.uiManagerGetAction ui "/ui/timeToolbar/(Time) Stop"
Just stepForwardAction <- Gtk.uiManagerGetAction ui "/ui/timeToolbar/(Time) Step Forward"
Just stepBackwardAction <- Gtk.uiManagerGetAction ui "/ui/timeToolbar/(Time) Step Backward"
toolbar <- liftM (Gtk.castToToolbar . fromJust) $ Gtk.uiManagerGetWidget ui "/ui/timeToolbar"
toolbarSetup toolbar Gtk.OrientationHorizontal Gtk.ToolbarIcons
vBox <- Gtk.vBoxNew False 2
table <- Gtk.tableNew 3 2 False
scrolledWindow <- makeAlignedScrolledWindow table
Gtk.boxPackStart vBox toolbar Gtk.PackNatural 0
Gtk.boxPackStart vBox scrolledWindow Gtk.PackGrow 0
timeScale <- Gtk.hScaleNewWithRange (fromInteger low) (fromInteger high) 1
Gtk.scaleSetDigits timeScale 0
stepSpin <- Gtk.spinButtonNewWithRange 1 1000000 $ fromInteger stepSize0
Gtk.spinButtonSetValue stepSpin $ fromInteger stepSize0
tableAddNameValue table 0 1 "Time" timeScale
tableAddNameValue table 0 2 "Play Step" stepSpin
playHandler <- newIORef (Nothing :: Maybe Gtk.HandlerId)
let
setTimeLimits (low, high)
| high <= low = setTimeLimits (low, low + 1)
| otherwise = do
info <- readIORef infoRef
let
time = guiTime info
time'
| low > time = low
| high < time = high
| otherwise = time
Gtk.rangeSetRange timeScale (fromInteger low) (fromInteger high)
setTime time'
modifyIORef infoRef $ \info -> info { guiTimeLimits = (low, high) }
setTime :: Integer -> IO ()
setTime time = do
Gtk.rangeSetValue timeScale (fromInteger time)
modifyIORef infoRef $ \info -> info { guiTime = time }
getStepSize = do
GuiTimeStepSize stepSize <- getGuiOption infoRef (GuiTimeStepSize 100)
return stepSize
atEndOfTrace = do
info <- readIORef infoRef
let
(_, high) = guiTimeLimits info
time = guiTime info
stepSize <- getStepSize
return $ time + stepSize > high
stopPlaying = do
handler <- readIORef playHandler
when (isJust handler) $ Gtk.timeoutRemove $ fromJust handler
writeIORef playHandler Nothing
return ()
step stepMult = do
atEnd <- atEndOfTrace
when (not atEnd) $ do
info <- readIORef infoRef
stepSize <- getStepSize
let time' = guiTime info + stepMult * stepSize
setTime time'
return atEnd
playStep = do
playing <- readIORef playHandler
when (isJust playing) $ do
atEnd <- step 1
when atEnd stopPlaying
liftM isJust $ readIORef playHandler
Gtk.onValueSpinned stepSpin $ do
stepSize <- Gtk.spinButtonGetValue stepSpin
setGuiOption infoRef $ GuiTimeStepSize $ floor stepSize
Gtk.onRangeValueChanged timeScale $ do
newTime <- liftM floor $ Gtk.rangeGetValue timeScale
modifyIORef infoRef $ \info -> info { guiTime = newTime }
info <- readIORef infoRef
exposeLayout $ guiPartLayout info
Gtk.on playAction Gtk.actionActivated $ do
playing <- readIORef playHandler
when (isNothing playing) $ do
atEnd <- atEndOfTrace
when atEnd $ do
(low, _) <- liftM guiTimeLimits $ readIORef infoRef
setTime low
handler <- Gtk.timeoutAddFull playStep Gtk.priorityLow 50
writeIORef playHandler $ Just handler
return ()
Gtk.on stopAction Gtk.actionActivated stopPlaying
Gtk.on stepForwardAction Gtk.actionActivated $ step 1 >> return ()
Gtk.on stepBackwardAction Gtk.actionActivated $ step (-1) >> return ()
window <- makeDialogue parent "Time" [close] vBox
let closeWindow = stopPlaying >> Gtk.widgetHide window
Gtk.onClicked close closeWindow
Gtk.on window Gtk.deleteEvent $ do
lift $ closeWindow
return True
Gtk.widgetSetSizeRequest window 350 200
return (window, setTimeLimits)
-- gui : start GUI
-- gui :: (Read network, NetworkIF network) => ToolOptions network -> [Part network] -> IO [Part network]
gui toolOpts0 parts0 = do
Gtk.initGUI
infoRef <- newIORef (undefined :: GuiInfo network)
actions <- Gtk.actionGroupNew "Actions"
-- Get some of the initial toolOpts/guiOpts that are needed to build windows etc.
let
GuiDisplayMode displayMode0 = getSubOption (optGui toolOpts0) $ GuiDisplayMode GuiDot
GuiTimeStepSize timeStepSize0 = getSubOption (optGui toolOpts0) $ GuiTimeStepSize 100
GuiDiffDepth diffDepth0 = getSubOption (optGui toolOpts0) $ GuiDiffDepth 1
newAction name displayedName maybeTooltip maybeIcon maybeAccel = do
action <- Gtk.actionNew name displayedName maybeTooltip maybeIcon
Gtk.actionGroupAddActionWithAccel actions action (maybeAccel :: Maybe String)
return action
newAction "File" "_File" Nothing Nothing Nothing
newAction "Edit" "_Edit" Nothing Nothing Nothing
partAction <- newAction "Selected Part" "Selected _Part" Nothing Nothing Nothing
newAction "Windows" "_Windows" Nothing Nothing Nothing
compileBalsaAction <- newAction "Compile Balsa..." "Compile Balsa..."
Nothing (Just Gtk.stockOpen) (Just "<Control>b")
openNetworkAction <- newAction "Open Network..." "Open Network..."
Nothing (Just Gtk.stockOpen) (Just "<Control>o")
saveNetworkAction <- newAction "Save Network" "Save Network" Nothing (Just Gtk.stockSave) (Just "<Control>s")
saveAsNetworkAction <- newAction "Save Network As..." "Save Network As..."
Nothing (Just Gtk.stockSaveAs) Nothing
readRulesAction <- newAction "Read Optimisation Rules..." "Read Optimisation Rules..."
Nothing (Just Gtk.stockOpen) (Just "")
readMonitorEventsAction <- newAction "Read Monitor Events..." "Read Monitor Events..."
Nothing (Just Gtk.stockOpen) (Just "")
quitAction <- newAction "Quit" "Quit" Nothing (Just Gtk.stockQuit) (Just "<Control>q")
undoAction <- newAction "Undo" "Undo" Nothing (Just Gtk.stockUndo) (Just "<Control>z")
redoAction <- newAction "Redo" "Redo" Nothing (Just Gtk.stockRedo) (Just "<Control>y")
flagsAction <- newAction "Optimisation Flags..." "Optimisation Flags..."
Nothing (Just Gtk.stockPreferences) Nothing
guiOptionsAction <- newAction "Preferences..." "Preferences..." Nothing (Just Gtk.stockPreferences) Nothing
makeGatesAction <- newAction "Make Gate-Level Netlist..." "Make Gate-Level Netlist..."
Nothing (Just Gtk.stockExecute) Nothing
plotAction <- newAction "Plot..." "Plot..." Nothing (Just Gtk.stockPrint) Nothing
topLevelAction <- newAction "Set As Top Level" "Set As Top Level" Nothing (Just Gtk.stockExecute) Nothing
insertLatchesAction <- newAction "Insert Latches..." "Insert Latches..."
Nothing (Just Gtk.stockExecute) Nothing
removeLatchesAction <- newAction "Remove All Latches" "Remove All Latches"
Nothing (Just Gtk.stockExecute) Nothing
experimentalAction <- newAction "Experimental" "Experimental" Nothing (Just Gtk.stockExecute) Nothing
showTimeWindowAction <- newAction "Time Window" "Time Window" Nothing Nothing Nothing
showNetworkDiffWindowAction <- newAction "Network Diff. Window" "Network Diff. Window" Nothing Nothing Nothing
showOComponentWindowAction <- newAction "O Component Window" "O Component Window" Nothing Nothing Nothing
leadsAction <- newAction "New Leads" "New Leads" Nothing (Just Gtk.stockRefresh) Nothing
optAction <- newAction "(Leads) Apply All" "Apply All" Nothing (Just Gtk.stockApply) Nothing
stepAction <- newAction "(Leads) Step" "Step" Nothing (Just Gtk.stockGoForward) Nothing
do
newAction "(Time) Play" "Play" (Just "Play") (Just Gtk.stockMediaPlay) Nothing
newAction "(Time) Stop" "Stop" (Just "Stop") (Just Gtk.stockMediaStop) Nothing
newAction "(Time) Step Forward" "Step Forward" (Just "Step Forward") (Just Gtk.stockGoForward) Nothing
newAction "(Time) Step Backward" "Step Backward" (Just "Step Backward") (Just Gtk.stockGoBack) Nothing
ui <- Gtk.uiManagerNew
Gtk.uiManagerAddUiFromString ui mainMenuUI
Gtk.uiManagerInsertActionGroup ui actions 0
Just mainMenubar <- Gtk.uiManagerGetWidget ui "/ui/mainMenubar"
-- partToolbar <- liftM (Gtk.castToToolbar . fromJust) $ Gtk.uiManagerGetWidget ui "/ui/partToolbar"
-- toolbarSetup partToolbar Gtk.OrientationHorizontal Gtk.ToolbarText
leadsToolbar <- liftM (Gtk.castToToolbar . fromJust) $ Gtk.uiManagerGetWidget ui "/ui/leadsToolbar"
toolbarSetup leadsToolbar Gtk.OrientationHorizontal Gtk.ToolbarBothHoriz
(partLayoutRef, partWidget) <- makeLayoutTable (Nothing :: Maybe PartGraphical) True
(displayMode0 == GuiSpring) (Just (layoutClicked infoRef))
partFrame <- makeFrame "Selected Part" partWidget
partsScrolledWindow <- Gtk.scrolledWindowNew Nothing Nothing
partsList <- Gtk.treeViewNew
Gtk.set partsList [ Gtk.treeViewHeadersVisible Gtk.:= False ]
Gtk.containerAdd partsScrolledWindow partsList
Gtk.set partsScrolledWindow [
Gtk.scrolledWindowHscrollbarPolicy Gtk.:= Gtk.PolicyNever,
Gtk.scrolledWindowVscrollbarPolicy Gtk.:= Gtk.PolicyAutomatic ]
let partsVBox = partsScrolledWindow
partsFrame <- makeFrame "Parts" partsVBox
leadsScrolledWindow <- Gtk.scrolledWindowNew Nothing Nothing
leadsList <- Gtk.treeViewNew
Gtk.containerAdd leadsScrolledWindow leadsList
Gtk.set leadsScrolledWindow [ Gtk.scrolledWindowHscrollbarPolicy Gtk.:= Gtk.PolicyAutomatic,
Gtk.scrolledWindowVscrollbarPolicy Gtk.:= Gtk.PolicyAlways ]
leadsVBox <- Gtk.vBoxNew False 2
Gtk.boxPackStart leadsVBox leadsToolbar Gtk.PackNatural 2
Gtk.boxPackStart leadsVBox leadsScrolledWindow Gtk.PackGrow 2
leadsFrame <- makeFrame "Optimisation Leads" leadsVBox
partsAndLeadsPaned <- makeVPaned partsFrame leadsFrame
Gtk.set partsAndLeadsPaned [ Gtk.panedPosition Gtk.:= 200 ]
mainHPaned <- makeHPaned partFrame partsAndLeadsPaned
Gtk.set mainHPaned [ Gtk.panedPosition Gtk.:= 400 ]
status <- Gtk.statusbarNew
vBox <- Gtk.vBoxNew False 2
Gtk.boxPackStart vBox mainMenubar Gtk.PackNatural 2
Gtk.boxPackStart vBox mainHPaned Gtk.PackGrow 2
Gtk.boxPackStart vBox status Gtk.PackNatural 2
mainWindow <- Gtk.windowNew
Gtk.set mainWindow [ Gtk.windowTitle Gtk.:= "Teak", Gtk.windowAllowShrink Gtk.:= True ]
Gtk.containerAdd mainWindow vBox
Gtk.widgetSetSizeRequest mainWindow 800 600
(diffWindow, fromLayoutRef, toLayoutRef) <- makeDiffWindow mainWindow infoRef True diffDepth0
(oLayoutRef, oLayoutWidget) <- makeLayoutTable (Nothing :: Maybe OGraphical) True False Nothing
oWindow <- do
close <- Gtk.buttonNewFromStock Gtk.stockClose
window <- makeDialogue mainWindow "Operator" [close] oLayoutWidget
setLayoutSizeRequest oLayoutRef 400 300
Gtk.onClicked close $ Gtk.widgetHide window
Gtk.on window Gtk.deleteEvent $ lift $ Gtk.widgetHide window >> return True
return window
(timeWindow, setTimeLimits) <- makeTimeWindow infoRef mainWindow ui (0, 1) timeStepSize0
historyRef <- newIORef (GuiHistory False 0 10 [] (historySetSensitive undoAction redoAction))
optimPath <- teakOptimPath
let
partSensitiveActions = [optAction, leadsAction, stepAction, partAction, topLevelAction,
insertLatchesAction, removeLatchesAction, experimentalAction]
partSetSensitive s = forM_ partSensitiveActions $ \a -> Gtk.actionSetSensitive a s
setBaseName baseName = do
modifyToolOptions infoRef $ \opts -> opts { optBaseName = baseName }
when (isJust baseName) $ do
modifyIORef infoRef $ \info -> info { guiLastNetworkFile = Just $ case guiLastNetworkFile info of
Nothing -> fromJust baseName <.> "teak"
Just oldName -> replaceBaseName oldName (fromJust baseName) }
Gtk.actionSetSensitive saveNetworkAction $ isJust baseName
defaultRulesFile = optimPath </> "default" <.> "rules"
writeIORef infoRef $ GuiInfo {
guiHistory = historyRef,
guiPartLayout = partLayoutRef,
guiFromLayout = fromLayoutRef,
guiToLayout = toLayoutRef,
guiOLayout = oLayoutRef,
guiParts = parts0,
guiLeads = [],
guiUpdateLeads = return (),
guiGetLeadView = const Nothing,
guiPartList = partsList,
guiLeadsList = leadsList,
guiSetFlagList = return (),
guiPartListModel = undefined,
guiDiffWindow = diffWindow,
guiTimeWindow = timeWindow,
guiOWindow = oWindow,
guiTimeLimits = (0, 1),
guiTime = 0,
guiStateColours = defaultMonitorStateColours,
guiSetPartSelectedSensitive = partSetSensitive,
guiSetTimeLimits = setTimeLimits,
guiSetBaseName = setBaseName,
guiLastBalsaFile = Nothing,
guiLastNetworkFile = Nothing,
guiLastOptimRulesFile = Just defaultRulesFile,
guiToolOptions = toolOpts0
}
setTimeLimits $ partMonitorEventsTimeLimits $ optPartMonitorEvents toolOpts0
setBaseName $ optBaseName toolOpts0
initLeadsList infoRef leadsList
flagsWindow <- makeFlagsWindow mainWindow infoRef
guiOptionsWindow <- makePreferencesWindow mainWindow infoRef
insertLatchesWindow <- makeInsertLatchesWindow mainWindow infoRef
let makeSaveAsNetworkFileChooser = do
info <- readIORef infoRef
makeFileChooser (Just mainWindow) "Save Network" (guiLastNetworkFile info) Gtk.FileChooserActionSave
"gtk-save" $ \filename -> do
modifyIORef infoRef $ \info -> info { guiLastNetworkFile = Just filename }
info <- readIORef infoRef
putStrLn $ "saving network to file `" ++ filename ++ "'"
writeNetworkFile False "" filename $ guiParts info
return True
return ()
Gtk.on compileBalsaAction Gtk.actionActivated $ do
info <- readIORef infoRef
chooser <- makeFileChooser (Just mainWindow) "Compile Balsa" (guiLastBalsaFile info)
Gtk.FileChooserActionOpen
"gtk-open" $ \filename -> do
modifyIORef infoRef $ \info -> info { guiLastBalsaFile = Just filename }
(context, parts, reports) <- compileBalsaToParts infoRef filename
case (parts, reports) of
([], []) -> do
dialogue <- Gtk.messageDialogNew Nothing [Gtk.DialogModal] Gtk.MessageWarning
Gtk.ButtonsYesNo $ "File `" ++ filename ++ "' contains no procedures\n\nLoad anyway?"
response <- Gtk.dialogRun dialogue
Gtk.widgetDestroy dialogue
case response of
Gtk.ResponseYes -> do
newParts infoRef []
return True
_ -> return False
(_, _:_) -> do
dialogue <- Gtk.messageDialogNew Nothing [Gtk.DialogModal] Gtk.MessageError
Gtk.ButtonsYesNo $ "File `" ++ filename ++ "' has errors\n\nView errors?"
response <- Gtk.dialogRun dialogue
Gtk.widgetDestroy dialogue
case response of
Gtk.ResponseYes -> do
makeSourceErrorViewWindow mainWindow "Source Errors"
(Just context) filename reports
return False -- set to True if we want the file chooser to go away here
_ -> return False
(_:_, []) -> do
newParts infoRef parts
return True
fileChooserAddFilters chooser [("*", "All files"), ("*.balsa", "Balsa files")]
Gtk.on saveNetworkAction Gtk.actionActivated $ do
info <- readIORef infoRef
let baseName = optBaseName $ guiToolOptions info
if isJust baseName
then do
let fileName = fromJust baseName ++ ".teak"
writeNetworkFile False "" fileName $ guiParts info
else makeSaveAsNetworkFileChooser
Gtk.on saveAsNetworkAction Gtk.actionActivated $ makeSaveAsNetworkFileChooser
Gtk.on openNetworkAction Gtk.actionActivated $ do
info <- readIORef infoRef
chooser <- makeFileChooser (Just mainWindow) "Open Network" (guiLastNetworkFile info)
Gtk.FileChooserActionOpen
"gtk-open" $ \filename -> do
modifyIORef infoRef $ \info -> info { guiLastNetworkFile = Just filename }
Why comp parts <- runWhyT $ readNetworkFile filename
case comp of
Complete -> do
newParts infoRef parts
pushHistory infoRef
return True
_ -> return False
fileChooserAddFilters chooser [("*", "All files"), ("*.teak", "Teak network files")]
return ()
Gtk.on readRulesAction Gtk.actionActivated $ do
info <- readIORef infoRef
chooser <- makeFileChooser (Just mainWindow) "Read Optimisation Rules" (guiLastOptimRulesFile info)
Gtk.FileChooserActionOpen
"gtk-open" $ \filename -> do
modifyIORef infoRef $ \info -> info { guiLastOptimRulesFile = Just filename }
contents <- readFile filename
let
filePos = PosLC (PosFile filename (ImportFile filename)) 1 1
Why rulesComp ruleSet = parseRules filePos contents
case rulesComp of
Complete -> do
putStrLn $ "*** read " ++ summariseRuleSet ruleSet (" from file " ++ filename)
setRules infoRef ruleSet
pushHistory infoRef
return True
Wrong reports -> do
dialogue <- Gtk.messageDialogNew Nothing [Gtk.DialogModal] Gtk.MessageError
Gtk.ButtonsYesNo $ "File `" ++ filename ++ "' has errors\n\nView errors?"
response <- Gtk.dialogRun dialogue
Gtk.widgetDestroy dialogue
case response of
Gtk.ResponseYes -> do
makeSourceErrorViewWindow mainWindow "Rule Errors"
(Nothing :: Maybe (Context Decl)) filename reports
return False -- set to True if we want the file chooser to go away here
_ -> return False
Incomplete -> return False
fileChooserAddFilters chooser [("*", "All files"), ("*.rules", "Optim. rules files")]
return ()
Gtk.on readMonitorEventsAction Gtk.actionActivated $ do
chooser <- makeFileChooser (Just mainWindow) "Read Monitor Events Rules" Nothing Gtk.FileChooserActionOpen
"gtk-open" $ \filename -> do
info <- readIORef infoRef
Why comp monitorEvents <- runWhyT $ readMonitorFile (guiParts info) filename
case comp of
Complete -> do
putStrLn $ "*** read events from file " ++ filename
modifyToolOptions infoRef $ \opts -> opts { optPartMonitorEvents = monitorEvents }
info <- readIORef infoRef
guiSetTimeLimits info $ partMonitorEventsTimeLimits monitorEvents
return True
Wrong reports -> do
dialogue <- Gtk.messageDialogNew Nothing [Gtk.DialogModal] Gtk.MessageError
Gtk.ButtonsYesNo $ "File `" ++ filename ++ "' has errors\n\nView errors?"
response <- Gtk.dialogRun dialogue
Gtk.widgetDestroy dialogue
case response of
Gtk.ResponseYes -> do
makeSourceErrorViewWindow mainWindow "Monitor Event Errors"
(Nothing :: Maybe (Context Decl)) filename reports
return False -- set to True if we want the file chooser to go away here
_ -> return False
Incomplete -> return False
fileChooserAddFilters chooser [("*", "All files"), ("*.report", "Monitor state files")]
return ()
Gtk.on quitAction Gtk.actionActivated Gtk.mainQuit
Gtk.onDestroy mainWindow Gtk.mainQuit
{- Populate structures, choose inital part and setup TreeView widgets -}
partsListModel <- makePartList partsList infoRef
modifyIORef infoRef $ \info -> info { guiPartListModel = partsListModel }
updateLeads infoRef
Gtk.on topLevelAction Gtk.actionActivated $ do
catchAndRestore infoRef $ do
info <- readIORef infoRef
Just part <- guiPart infoRef
let
parts = guiParts info
topLevelName = networkName part
parts' = uniquifyPart parts topLevelName
parts'' = removeGo parts' topLevelName
let Just part' = nwFindPart parts'' topLevelName
setParts infoRef parts''
setPart infoRef (Just part')
updateAll infoRef
updatePartSelection infoRef $ networkName part'
Gtk.on insertLatchesAction Gtk.actionActivated $ Gtk.widgetShowAll insertLatchesWindow
Gtk.on removeLatchesAction Gtk.actionActivated $ do
maybePart <- guiPart infoRef
when (isJust maybePart) $ do
let part' = runPart_ (fromJust maybePart) nwRemoveLatches
replacePart infoRef part'
setLeads infoRef []
updateLeads infoRef
pushHistory infoRef
readIORef infoRef >>= fitLayout . guiPartLayout
Gtk.on experimentalAction Gtk.actionActivated $ do
catchAndRestore infoRef $ do
Just part <- guiPart infoRef
putStrLn "*** findLinkDeps"
let linkDeps = tryPart part findLinkDeps
mapM_ print linkDeps
putStrLn "*** rootComponents"
let root = rootComponents linkDeps
mapM_ print root
putStrLn "*** makeEdges"
let edges = makeEdges linkDeps
mapM_ print edges
putStrLn "*** loop"
let loop = loopLinks edges root
mapM_ print loop
-- let backLinks = tryPart part $ findBackLinks 10
-- print backLinks
-- backLinks <- findBackLinks2 10
-- print backLinks
info <- readIORef infoRef
let builtinInfo = findBuiltins $ guiParts info
print $ fbsPartStates builtinInfo
setLayoutPreRender partLayoutRef $ \layout -> do
info <- readIORef infoRef
let
toolOpts = guiToolOptions info
guiOpts = optGui toolOpts
if findBoolSubOption guiOpts GuiShowMonitorEvents
then return $ fromMaybe (const (return ())) $ do
part <- layoutPart layout
graphical <- layoutGraphical layout
return $ renderMonitorEvents (guiStateColours info) guiOpts (optPartMonitorEvents toolOpts)
(guiTime info) part graphical
else return (const (return ()))
-- Gtk.onClicked flagsButton $ Gtk.widgetShowAll flagsWindow
Gtk.on flagsAction Gtk.actionActivated $ presentWindow flagsWindow
Gtk.on guiOptionsAction Gtk.actionActivated $ presentWindow guiOptionsWindow
Gtk.on leadsAction Gtk.actionActivated $ do
-- Gtk.onToolButtonClicked leadsButton $ do
pushHistory infoRef
info <- readIORef infoRef
maybePart <- guiPart infoRef
when (isJust maybePart) $ do
setLeads infoRef $ makePartLeads (fromJust maybePart) $ optEnabledOptims $ guiToolOptions info
updateLeads infoRef
Gtk.on optAction Gtk.actionActivated $ do
-- Gtk.onToolButtonClicked optButton $ do
info <- readIORef infoRef
maybePart <- guiPart infoRef
let Just part = maybePart
when (isJust maybePart) $ do
let
parts = guiParts info
Just i = nwFindPartIndex parts $ networkName part
leads = guiLeads info
context = OptimContext { optimContextParts = parts }
Why comp part' <- runWhyT $ applyLeads context part noOptimLog
(optEnabledOptims $ guiToolOptions info) leads
let parts' = replaceAt parts i part'
stop <- printCompleteness noPosContext comp
when (not stop) $ do
showNetworkDiff infoRef part part'
-- Check
printCompleteness noPosContext $ gatherCompleteness $ map checkPart parts'
setParts infoRef parts'
setPart infoRef $ Just part'
setLeads infoRef $ makePartLeads part' $ optEnabledOptims $ guiToolOptions info
-- setLeads infoRef []
redrawPart infoRef
updateLeads infoRef
pushHistory infoRef
Gtk.on stepAction Gtk.actionActivated $ do
-- Gtk.onToolButtonClicked stepButton $ do
-- FIXME, need to remove leads (or indicate when stepping) whose components have disappeared
info <- readIORef infoRef
maybePart <- guiPart infoRef
let Just part = maybePart
when (isJust maybePart) $ do
-- Step when a line is selected will apply that lead (or the next one that actually passes)
selection <- Gtk.treeViewGetSelection leadsList
selectedLeads <- liftM ((map leadViewIndex) . (mapMaybe (guiGetLeadView info . head))) $
Gtk.treeSelectionGetSelectedRows selection
let
parts = guiParts info
Just i = nwFindPartIndex parts $ networkName part
context = OptimContext { optimContextParts = parts }
allLeads = guiLeads info
(skippedLeads, tryLeads) = case selectedLeads of
[selectedLeadIndex] -> (firstLeads ++ nonPassingLeads, tryLeads1)
where
(firstLeads, tryLeads0) = splitAt selectedLeadIndex allLeads
(nonPassingLeads, tryLeads1) = skipToPassingLead context part tryLeads0
_ -> skipToPassingLead context part allLeads
(newLeads, oldLeads, part') <- if null tryLeads
then return ([], [], part)
else do
let
apply2 (newLeads1, part1) = return
(newLeadsToLeads (optEnabledOptims $ guiToolOptions info) newLeads1, part2)
where part2 = runPart_ part1 nwRemoveUnusedLinks
Why errors (newLeads, part') <- runWhyT $ defaultConnectWhyT ([], part)
(applyLead context part noOptimLog (head tryLeads)) apply2
-- writeNetworkFile False "" "afterStep" [part']
stop <- printCompleteness noPosContext errors
when (not stop) $ showNetworkDiff infoRef part part'
return (newLeads, tail tryLeads, part')
-- Unselect leads before messing with the lead list
Gtk.treeSelectionUnselectAll selection
let parts' = replaceAt parts i part'
-- Check
printCompleteness noPosContext $ gatherCompleteness $ map checkPart parts'
setParts infoRef parts'
setPart infoRef $ Just part'
setLeads infoRef $ newLeads ++ skippedLeads ++ oldLeads
updateLeads infoRef
redrawPart infoRef
pushHistory infoRef
leadSelection <- Gtk.treeViewGetSelection leadsList
Gtk.onSelectionChanged leadSelection $ do
info <- readIORef infoRef
rows <- Gtk.treeSelectionGetSelectedRows leadSelection
-- print rows
let
selectedLeads = map leadViewLead $ mapMaybe (guiGetLeadView info) $ concat rows
comps = map leadCompNo selectedLeads
-- print compNos
maybePart <- guiPart infoRef
let Just part = maybePart
when (isJust maybePart) $ do
let layoutRef = guiPartLayout info
partLayoutSetHighlightComps layoutRef comps
exposeLayout layoutRef
let context = OptimContext { optimContextParts = guiParts info }
mapM_ (\lead -> do
let summary = optimPassesTest context part lead
when (isJust summary) $ putStrLn $ fromJust summary) selectedLeads
Gtk.on undoAction Gtk.actionActivated $ undo infoRef
Gtk.on redoAction Gtk.actionActivated $ redo infoRef
Gtk.on makeGatesAction Gtk.actionActivated $ makeGates mainWindow infoRef
Gtk.on plotAction Gtk.actionActivated $ plot mainWindow infoRef
Gtk.on showTimeWindowAction Gtk.actionActivated $ presentWindow timeWindow
Gtk.on showNetworkDiffWindowAction Gtk.actionActivated $ presentWindow diffWindow
Gtk.on showOComponentWindowAction Gtk.actionActivated $ presentWindow oWindow
{- Go live -}
Gtk.widgetShowAll mainWindow
updatePartList infoRef
-- Just to get the rest of the windows realised
{- FIXME, check to see that it's OK to not do this. Seems to be a history problem -}
{-
Gtk.widgetShowAll oWindow
Gtk.widgetHide oWindow
Gtk.widgetShowAll diffWindow
Gtk.widgetHide diffWindow
-}
when (findBoolSubOption (optGui toolOpts0) GuiShowTimeWindow) $ presentWindow timeWindow
when (findBoolSubOption (optGui toolOpts0) GuiShowNetworkDiffWindow) $ presentWindow diffWindow
when (findBoolSubOption (optGui toolOpts0) GuiShowOWindow) $ presentWindow oWindow
installHandler sigINT (Catch $ exitWith (ExitFailure 1)) Nothing
Gtk.mainGUI
finalParts <- liftM guiParts $ readIORef infoRef
return finalParts
| Mahdi89/eTeak | src/Gui.hs | bsd-3-clause | 96,034 | 647 | 40 | 33,304 | 16,183 | 8,527 | 7,656 | -1 | -1 |
module SerializeText () where
import Control.Applicative
import Data.Binary
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
-- FIXME: Why isn't there already an instance?
instance Binary T.Text where
put = put . TE.encodeUtf8
get = TE.decodeUtf8 <$> get
| bgamari/bayes-stack | network-topic-models/SerializeText.hs | bsd-3-clause | 312 | 0 | 7 | 72 | 67 | 42 | 25 | 8 | 0 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TemplateHaskell #-}
module T10891 where
import Language.Haskell.TH
import System.IO
class C a where
f :: a -> Int
class C' a where
type F a :: *
type F a = a
f' :: a -> Int
class C'' a where
data Fd a :: *
instance C' Int where
type F Int = Bool
f' = id
instance C'' Int where
data Fd Int = B Bool | C Char
$(return [])
test :: ()
test =
$(let
display :: Name -> Q ()
display q = do
i <- reify q
runIO (hPutStrLn stderr (pprint i) >> hFlush stderr)
in do
display ''C
display ''C'
display ''C''
[| () |])
| mpickering/ghc-exactprint | tests/examples/ghc8/T10891.hs | bsd-3-clause | 630 | 0 | 18 | 201 | 256 | 130 | 126 | 31 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
-- | A Shakespearean module for Roy, introducing type-safe,
-- compile-time variable and url interpolation. It is exactly the same as
-- "Text.Julius", except that the template is first compiled to Javascript with
-- the system tool @roy@.
--
-- To use this module, @roy@ must be installed on your system.
--
-- If you interpolate variables,
-- the template is first wrapped with a function containing javascript variables representing shakespeare variables,
-- then compiled with @roy@,
-- and then the value of the variables are applied to the function.
-- This means that in production the template can be compiled
-- once at compile time and there will be no dependency in your production
-- system on @roy@.
--
-- Your code:
--
-- > let b = 1
-- > console.log(#{a} + b)
--
-- Final Result:
--
-- > ;(function(shakespeare_var_a){
-- > var b = 1;
-- > console.log(shakespeare_var_a + b);
-- > })(#{a});
--
-- Further reading:
--
-- 1. Shakespearean templates: <http://www.yesodweb.com/book/templates>
--
-- 2. Roy: <http://roy.brianmckenna.org/>
module Text.Roy
( -- * Functions
-- ** Template-Reading Functions
-- | These QuasiQuoter and Template Haskell methods return values of
-- type @'JavascriptUrl' url@. See the Yesod book for details.
roy
, royFile
, royFileReload
#ifdef TEST_EXPORT
, roySettings
#endif
) where
import Language.Haskell.TH.Quote (QuasiQuoter (..))
import Language.Haskell.TH.Syntax
import Text.Shakespeare
import Text.Julius
-- | The Roy language compiles down to Javascript.
-- We do this compilation once at compile time to avoid needing to do it during the request.
-- We call this a preConversion because other shakespeare modules like Lucius use Haskell to compile during the request instead rather than a system call.
roySettings :: Q ShakespeareSettings
roySettings = do
jsettings <- javascriptSettings
return $ jsettings { varChar = '#'
, preConversion = Just PreConvert {
preConvert = ReadProcess "roy" ["--stdio", "--browser"]
, preEscapeIgnoreBalanced = "'\""
, preEscapeIgnoreLine = "//"
, wrapInsertion = Just WrapInsertion {
wrapInsertionIndent = Just " "
, wrapInsertionStartBegin = "(\\"
, wrapInsertionSeparator = " "
, wrapInsertionStartClose = " ->\n"
, wrapInsertionEnd = ")"
, wrapInsertionAddParens = True
}
}
}
-- | Read inline, quasiquoted Roy.
roy :: QuasiQuoter
roy = QuasiQuoter { quoteExp = \s -> do
rs <- roySettings
quoteExp (shakespeare rs) s
}
-- | Read in a Roy template file. This function reads the file once, at
-- compile time.
royFile :: FilePath -> Q Exp
royFile fp = do
rs <- roySettings
shakespeareFile rs fp
-- | Read in a Roy template file. This impure function uses
-- unsafePerformIO to re-read the file on every call, allowing for rapid
-- iteration.
royFileReload :: FilePath -> Q Exp
royFileReload fp = do
rs <- roySettings
shakespeareFileReload rs fp
| fgaray/shakespeare | Text/Roy.hs | mit | 3,132 | 1 | 17 | 626 | 344 | 214 | 130 | 40 | 1 |
module T14285a where
import qualified Data.Foldable as F
import qualified Data.IntMap as IM
import qualified Data.IntSet as IS
import Prelude hiding (null)
data Set k = Set IS.IntSet
empty = Set IS.empty
null (Set a) = IS.null a
sfromList :: (Enum a, Foldable c) => c a -> Set a
sfromList xs = Set $ IS.fromList $ Prelude.map fromEnum $ F.toList xs
{-# inlineable fromList #-}
fromList :: Enum k => [(k,v)] -> Map k v
fromList kvs =
Map $ IM.fromList $ Prelude.map (\(k,v) -> (fromEnum k, v)) kvs
newtype Map k v = Map { unMap :: (IM.IntMap v) } deriving (Eq, Ord)
{-# inlineable findWithDefault #-}
findWithDefault d k (Map m) = IM.findWithDefault d (fromEnum k) m
data Rel a b = Rel !(Map a (Set b)) !(Map b (Set a))
{-# INLINEABLE images #-}
images x (Rel f b) = findWithDefault empty x f
{-# INLINEABLE pre_images #-}
pre_images x rel = images x $ mirrorRel rel
{-# INLINEABLE mirrorRel #-}
mirrorRel :: Rel a b -> Rel b a
mirrorRel (Rel f g) = Rel g f
| ezyang/ghc | testsuite/tests/stranal/should_run/T14285a.hs | bsd-3-clause | 971 | 0 | 11 | 196 | 418 | 224 | 194 | 24 | 1 |
{-# LANGUAGE RebindableSyntax, OverloadedStrings #-}
module T12688 where
import Prelude (String,Show(..))
newtype Text = Text String
fromString :: String -> Text
fromString = Text
x :: Text
x = "x"
newtype Foo = Foo ()
deriving (Show)
| olsner/ghc | testsuite/tests/deriving/should_compile/T12688.hs | bsd-3-clause | 242 | 0 | 6 | 44 | 69 | 43 | 26 | 10 | 1 |
import Control.Monad (when)
import Data.Maybe (isJust)
import System.Environment (lookupEnv)
main :: IO ()
main = do
term <- lookupEnv "PATH"
when (isJust term) $ putStrLn "Got PATH"
fish <- lookupEnv "One fish, two fish, red fish, blue fish"
print fish
| urbanslug/ghc | libraries/base/tests/System/T5930.hs | bsd-3-clause | 271 | 0 | 10 | 57 | 91 | 44 | 47 | 9 | 1 |
import System.Environment
main :: IO ()
main = do (expr:_) <- getArgs
putStrLn $ show $ calcRPN expr
calcRPN :: String -> Float
calcRPN e | length finalStack == 1 = head $ finalStack
| otherwise = error "Unbalanced expression"
where finalStack = foldl evalCurrent [] $ words e
evalCurrent acc next
| next == "+" = execOp2 (+)
| next == "-" = execOp2 (-)
| next == "*" = execOp2 (*)
| next == "/" = execOp2 (/)
| next == "^" = execOp2 (**)
| next == "ln" = execOp1 (log)
| next == "s" = execOpA (sum)
| otherwise = (read next:acc)
where execOp2 op = ((foldl1 op $ take 2 acc):(drop 2 acc))
execOp1 op = ((op $ head $ take 1 acc):(drop 1 acc))
execOpA op = [op acc]
| fredmorcos/attic | snippets/haskell/RPNCalc.hs | isc | 839 | 0 | 13 | 310 | 363 | 177 | 186 | 20 | 1 |
module GHCJS.DOM.StorageEvent (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/StorageEvent.hs | mit | 42 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Unison.Test.UriParser where
import EasyTest
import Unison.Codebase.Editor.RemoteRepo (ReadRepo(..))
import Unison.Codebase.Path (Path(..))
import qualified Unison.Codebase.Path as Path
import qualified Text.Megaparsec as P
import qualified Unison.Codebase.Editor.UriParser as UriParser
import qualified Data.Sequence as Seq
import Unison.Codebase.ShortBranchHash (ShortBranchHash(..))
import Data.Text (Text)
import Unison.NameSegment (NameSegment(..))
import qualified Data.Text as Text
test :: Test ()
test = scope "uriparser" . tests $ [ testAugmented ]
testAugmented:: Test ()
testAugmented = scope "augmented" . tests $
-- Local Protocol
-- $ git clone /srv/git/project.git
-- $ git clone /srv/git/project.git[:treeish][:[#hash][.path]]
[ scope "local-protocol" . tests . map parseAugmented $
[ ("/srv/git/project.git",
(ReadGitRepo "/srv/git/project.git", Nothing, Path.empty))
-- , ("/srv/git/project.git:abc:#def.hij.klm",
-- (ReadGitRepo "/srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"]))
, ("srv/git/project.git",
(ReadGitRepo "srv/git/project.git", Nothing, Path.empty))
-- , ("srv/git/project.git:abc:#def.hij.klm",
-- (ReadGitRepo "srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"]))
],
-- File Protocol
-- $ git clone file:///srv/git/project.git[:treeish][:[#hash][.path]] <- imagined
scope "file-protocol" . tests . map parseAugmented $
[ ("file:///srv/git/project.git",
(ReadGitRepo "file:///srv/git/project.git", Nothing, Path.empty))
-- , ("file:///srv/git/project.git:abc:#def.hij.klm",
-- (ReadGitRepo "file:///srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"]))
, ("file://srv/git/project.git",
(ReadGitRepo "file://srv/git/project.git", Nothing, Path.empty))
-- , ("file://srv/git/project.git:abc:#def.hij.klm",
-- (ReadGitRepo "file://srv/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"]))
],
-- Smart / Dumb HTTP protocol
-- $ git clone https://example.com/gitproject.git[:treeish][:[#hash][.path]] <- imagined
scope "http-protocol" . tests . map parseAugmented $
[ ("https://example.com/git/project.git",
(ReadGitRepo "https://example.com/git/project.git", Nothing, Path.empty))
-- , ("https://[email protected]/git/project.git:abc:#def.hij.klm]",
-- (ReadGitRepo "https://[email protected]/git/project.git" (Just "abc"), sbh "def", path ["hij", "klm"]))
],
-- SSH Protocol
-- $ git clone ssh://[user@]server/project.git[:treeish][:[#hash][.path]]
scope "ssh-protocol" . tests . map parseAugmented $
[ ("ssh://[email protected]:222/user/project.git",
(ReadGitRepo "ssh://[email protected]:222/user/project.git", Nothing, Path.empty))
-- , ("ssh://[email protected]/user/project.git:abc:#def.hij.klm",
-- (ReadGitRepo "ssh://[email protected]/user/project.git" (Just "abc"), sbh "def", path ["hij", "klm"]))
],
-- $ git clone [user@]server:project.git[:treeish][:[#hash][.path]]
scope "scp-protocol" . tests . map parseAugmented $
[ ("[email protected]:user/project.git",
(ReadGitRepo "[email protected]:user/project.git", Nothing, Path.empty))
, ("github.com:user/project.git",
(ReadGitRepo "github.com:user/project.git", Nothing, Path.empty))
-- , ("[email protected]:user/project.git:abc:#def.hij.klm",
-- (ReadGitRepo "[email protected]:user/project.git" (Just "abc"), sbh "def", path ["hij", "klm"]))
]
]
parseAugmented :: (Text, (ReadRepo, Maybe ShortBranchHash, Path)) -> Test ()
parseAugmented (s, r) = scope (Text.unpack s) $
case P.parse UriParser.repoPath "test case" s of
Left x -> crash $ show x
Right x -> expectEqual x r
path :: [Text] -> Path
path = Path . Seq.fromList . fmap NameSegment
sbh :: Text -> Maybe ShortBranchHash
sbh = Just . ShortBranchHash
| unisonweb/platform | parser-typechecker/tests/Unison/Test/UriParser.hs | mit | 3,882 | 0 | 11 | 583 | 649 | 381 | 268 | 47 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
-- | This module is the primary interface to the Unison typechecker
module Unison.Typechecker (admissibleTypeAt, check, check', checkAdmissible', equals, locals, subtype, isSubtype, synthesize, synthesize', typeAt, wellTyped) where
import Control.Monad
import Unison.Note (Note,Noted)
import Unison.Paths (Path)
import Unison.Term (Term)
import Unison.Type (Type)
import Unison.Var (Var)
import qualified Unison.ABT as ABT
import qualified Unison.Note as Note
import qualified Unison.Paths as Paths
import qualified Unison.Term as Term
import qualified Unison.Type as Type
import qualified Unison.TypeVar as TypeVar
import qualified Unison.Typechecker.Context as Context
-- import Debug.Trace
-- watch msg a = trace (msg ++ show a) a
invalid :: (Show a1, Show a) => a -> a1 -> String
invalid loc ctx = "invalid path " ++ show loc ++ " in:\n" ++ show ctx
-- | Compute the allowed type of a replacement for a given subterm.
-- Example, in @\g -> map g [1,2,3]@, @g@ has an admissible type of
-- @Int -> r@, where @r@ is an unbound universal type variable, which
-- means that an @Int -> Bool@, an @Int -> String@, etc could all be
-- substituted for @g@.
--
-- Algorithm works by replacing the subterm, @e@ with
-- @(f e)@, where @f@ is a fresh function parameter. We then
-- read off the type of @e@ from the inferred result type of @f@.
admissibleTypeAt :: (Monad f, Var v)
=> Type.Env f v
-> Path
-> Term v
-> Noted f (Type v)
admissibleTypeAt synth loc t = Note.scoped ("admissibleTypeAt@" ++ show loc ++ " " ++ show t) $
let
f = ABT.v' "f"
shake (Type.Arrow' (Type.Arrow' _ tsub) _) = Type.generalize tsub
shake (Type.ForallNamed' _ t) = shake t
shake _ = error "impossible, f had better be a function"
in case Term.lam f <$> Paths.modifyTerm (\t -> Term.app (Term.var (ABT.Free f)) (Term.wrapV t)) loc t of
Nothing -> Note.failure $ invalid loc t
Just t -> shake <$> synthesize synth t
-- | Compute the type of the given subterm.
typeAt :: (Monad f, Var v) => Type.Env f v -> Path -> Term v -> Noted f (Type v)
typeAt synth [] t = Note.scoped ("typeAt: " ++ show t) $ synthesize synth t
typeAt synth loc t = Note.scoped ("typeAt@"++show loc ++ " " ++ show t) $
let
f = ABT.v' "f"
remember e = Term.var (ABT.Free f) `Term.app` Term.wrapV e
shake (Type.Arrow' (Type.Arrow' tsub _) _) = Type.generalize tsub
shake (Type.ForallNamed' _ t) = shake t
shake _ = error "impossible, f had better be a function"
in case Term.lam f <$> Paths.modifyTerm remember loc t of
Nothing -> Note.failure $ invalid loc t
Just t -> shake <$> synthesize synth t
-- | Return the type of all local variables in scope at the given location
locals :: (Show v, Monad f, Var v) => Type.Env f v -> Path -> Term v -> Noted f [(v, Type v)]
locals synth path ctx | ABT.isClosed ctx =
Note.scoped ("locals@"++show path ++ " " ++ show ctx)
((zip (map ABT.unvar vars)) <$> types)
where
-- replace focus, x, with `let saved = f v1 v2 v3 ... vn in x`,
-- where `f` is fresh variable, then infer type of `f`, read off the
-- types of `v1`, `v2`, ...
vars = map ABT.Bound (Paths.inScopeAtTerm path ctx)
f = ABT.v' "f"
saved = ABT.v' "saved"
remember e = Term.let1 [(saved, Term.var (ABT.Free f) `Term.apps` map Term.var vars)] (Term.wrapV e)
usingAllLocals = Term.lam f (Paths.modifyTerm' remember path ctx)
types = if null vars then pure []
else extract <$> typeAt synth [] usingAllLocals
extract (Type.Arrow' i _) = extract1 i
extract (Type.ForallNamed' _ t) = extract t
extract t = error $ "expected function type, got: " ++ show t
extract1 (Type.Arrow' i o) = i : extract1 o
extract1 _ = []
locals _ _ ctx =
Note.failure $ "Term.locals: term contains free variables - " ++ show ctx
-- | Infer the type of a 'Unison.Syntax.Term', using
-- a function to resolve the type of @Ref@ constructors
-- contained in that term.
synthesize :: (Monad f, Var v) => Type.Env f v -> Term v -> Noted f (Type v)
synthesize env t = ABT.vmap TypeVar.underlying <$> Context.synthesizeClosed env t
-- | Infer the type of a 'Unison.Syntax.Term', assumed
-- not to contain any @Ref@ constructors
synthesize' :: Var v => Term v -> Either Note (Type v)
synthesize' term = join . Note.unnote $ synthesize missing term
where missing h = Note.failure $ "unexpected ref: " ++ show h
-- | Check whether a term matches a type, using a
-- function to resolve the type of @Ref@ constructors
-- contained in the term. Returns @typ@ if successful,
-- and a note about typechecking failure otherwise.
check :: (Monad f, Var v) => Type.Env f v -> Term v -> Type v -> Noted f (Type v)
check env term typ = synthesize env (Term.ann term typ)
-- | Check whether a term, assumed to contain no @Ref@ constructors,
-- matches a given type. Return @Left@ if any references exist, or
-- if typechecking fails.
check' :: Var v => Term v -> Type v -> Either Note (Type v)
check' term typ = join . Note.unnote $ check missing term typ
where missing h = Note.failure $ "unexpected ref: " ++ show h
-- | `checkAdmissible' e t` tests that `(f : t -> r) e` is well-typed.
-- If `t` has quantifiers, these are moved outside, so if `t : forall a . a`,
-- this will check that `(f : forall a . a -> a) e` is well typed.
checkAdmissible' :: Var v => Term v -> Type v -> Either Note (Type v)
checkAdmissible' term typ =
synthesize' (Term.blank `Term.ann` tweak typ `Term.app` term)
where
tweak (Type.ForallNamed' v body) = Type.forall v (tweak body)
tweak t = t `Type.arrow` t
-- | Returns `True` if the expression is well-typed, `False` otherwise
wellTyped :: (Monad f, Var v) => Type.Env f v -> Term v -> Noted f Bool
wellTyped synth term = (const True <$> synthesize synth term) `Note.orElse` pure False
-- | @subtype a b@ is @Right b@ iff @f x@ is well-typed given
-- @x : a@ and @f : b -> t@. That is, if a value of type `a`
-- can be passed to a function expecting a `b`, then `subtype a b`
-- returns `Right b`. This function returns @Left note@ with information
-- about the reason for subtyping failure otherwise.
--
-- Example: @subtype (forall a. a -> a) (Int -> Int)@ returns @Right (Int -> Int)@.
subtype :: Var v => Type v -> Type v -> Either Note (Type v)
subtype t1 t2 =
let (t1', t2') = (ABT.vmap TypeVar.Universal t1, ABT.vmap TypeVar.Universal t2)
in case Context.runM (Context.subtype t1' t2') Context.env0 of
Left e -> Left e
Right _ -> Right t2
-- | Returns true if @subtype t1 t2@ returns @Right@, false otherwise
isSubtype :: Var v => Type v -> Type v -> Bool
isSubtype t1 t2 = case subtype t1 t2 of
Left _ -> False
Right _ -> True
-- | Returns true if the two type are equal, up to alpha equivalence and
-- order of quantifier introduction. Note that alpha equivalence considers:
-- `forall b a . a -> b -> a` and
-- `forall a b . a -> b -> a` to be different types
equals :: Var v => Type v -> Type v -> Bool
equals t1 t2 = isSubtype t1 t2 && isSubtype t2 t1
| nightscape/platform | shared/src/Unison/Typechecker.hs | mit | 7,128 | 0 | 18 | 1,533 | 2,035 | 1,036 | 999 | 91 | 5 |
module Feature.RpcSpec where
import Test.Hspec
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Network.HTTP.Types
import Network.Wai.Test (SResponse(simpleStatus, simpleBody))
import qualified Data.ByteString.Lazy as BL (empty)
import SpecHelper
import Text.Heredoc
import Network.Wai (Application)
import Protolude hiding (get)
spec :: SpecWith Application
spec =
describe "remote procedure call" $ do
context "a proc that returns a set" $ do
it "returns paginated results" $ do
request methodPost "/rpc/getitemrange"
(rangeHdrs (ByteRangeFromTo 0 0)) [json| { "min": 2, "max": 4 } |]
`shouldRespondWith` [json| [{"id":3}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-0/*"]
}
request methodGet "/rpc/getitemrange?min=2&max=4"
(rangeHdrs (ByteRangeFromTo 0 0)) ""
`shouldRespondWith` [json| [{"id":3}] |]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-0/*"]
}
it "includes total count if requested" $ do
request methodPost "/rpc/getitemrange"
(rangeHdrsWithCount (ByteRangeFromTo 0 0))
[json| { "min": 2, "max": 4 } |]
`shouldRespondWith` [json| [{"id":3}] |]
{ matchStatus = 206 -- it now knows the response is partial
, matchHeaders = ["Content-Range" <:> "0-0/2"]
}
request methodGet "/rpc/getitemrange?min=2&max=4"
(rangeHdrsWithCount (ByteRangeFromTo 0 0)) ""
`shouldRespondWith` [json| [{"id":3}] |]
{ matchStatus = 206
, matchHeaders = ["Content-Range" <:> "0-0/2"]
}
it "returns proper json" $ do
post "/rpc/getitemrange" [json| { "min": 2, "max": 4 } |] `shouldRespondWith`
[json| [ {"id": 3}, {"id":4} ] |]
{ matchHeaders = [matchContentTypeJson] }
get "/rpc/getitemrange?min=2&max=4" `shouldRespondWith`
[json| [ {"id": 3}, {"id":4} ] |]
{ matchHeaders = [matchContentTypeJson] }
it "returns CSV" $ do
request methodPost "/rpc/getitemrange"
(acceptHdrs "text/csv")
[json| { "min": 2, "max": 4 } |]
`shouldRespondWith` "id\n3\n4"
{ matchStatus = 200
, matchHeaders = ["Content-Type" <:> "text/csv; charset=utf-8"]
}
request methodGet "/rpc/getitemrange?min=2&max=4"
(acceptHdrs "text/csv") ""
`shouldRespondWith` "id\n3\n4"
{ matchStatus = 200
, matchHeaders = ["Content-Type" <:> "text/csv; charset=utf-8"]
}
context "unknown function" $ do
it "returns 404" $
post "/rpc/fakefunc" [json| {} |] `shouldRespondWith` 404
it "should fail with 404 on unknown proc name" $
get "/rpc/fake" `shouldRespondWith` 404
it "should fail with 404 on unknown proc args" $ do
get "/rpc/sayhello" `shouldRespondWith` 404
get "/rpc/sayhello?any_arg=value" `shouldRespondWith` 404
it "works when having uppercase identifiers" $ do
get "/rpc/quotedFunction?user=mscott&fullName=Michael Scott&SSN=401-32-XXXX" `shouldRespondWith`
[json|{"user": "mscott", "fullName": "Michael Scott", "SSN": "401-32-XXXX"}|]
{ matchHeaders = [matchContentTypeJson] }
post "/rpc/quotedFunction"
[json|{"user": "dschrute", "fullName": "Dwight Schrute", "SSN": "030-18-XXXX"}|]
`shouldRespondWith`
[json|{"user": "dschrute", "fullName": "Dwight Schrute", "SSN": "030-18-XXXX"}|]
{ matchHeaders = [matchContentTypeJson] }
context "shaping the response returned by a proc" $ do
it "returns a project" $ do
post "/rpc/getproject" [json| { "id": 1} |] `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","client_id":1}]|]
get "/rpc/getproject?id=1" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7","client_id":1}]|]
it "can filter proc results" $ do
post "/rpc/getallprojects?id=gt.1&id=lt.5&select=id" [json| {} |] `shouldRespondWith`
[json|[{"id":2},{"id":3},{"id":4}]|]
{ matchHeaders = [matchContentTypeJson] }
get "/rpc/getallprojects?id=gt.1&id=lt.5&select=id" `shouldRespondWith`
[json|[{"id":2},{"id":3},{"id":4}]|]
{ matchHeaders = [matchContentTypeJson] }
it "can limit proc results" $ do
post "/rpc/getallprojects?id=gt.1&id=lt.5&select=id?limit=2&offset=1" [json| {} |]
`shouldRespondWith` [json|[{"id":3},{"id":4}]|]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "1-2/*"] }
get "/rpc/getallprojects?id=gt.1&id=lt.5&select=id?limit=2&offset=1"
`shouldRespondWith` [json|[{"id":3},{"id":4}]|]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "1-2/*"] }
it "select works on the first level" $ do
post "/rpc/getproject?select=id,name" [json| { "id": 1} |] `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7"}]|]
get "/rpc/getproject?id=1&select=id,name" `shouldRespondWith`
[str|[{"id":1,"name":"Windows 7"}]|]
context "foreign entities embedding" $ do
it "can embed if related tables are in the exposed schema" $ do
post "/rpc/getproject?select=id,name,client(id),tasks(id)" [json| { "id": 1} |] `shouldRespondWith`
[json|[{"id":1,"name":"Windows 7","client":{"id":1},"tasks":[{"id":1},{"id":2}]}]|]
{ matchHeaders = [matchContentTypeJson] }
get "/rpc/getproject?id=1&select=id,name,client(id),tasks(id)" `shouldRespondWith`
[json|[{"id":1,"name":"Windows 7","client":{"id":1},"tasks":[{"id":1},{"id":2}]}]|]
{ matchHeaders = [matchContentTypeJson] }
it "cannot embed if the related table is not in the exposed schema" $ do
post "/rpc/single_article?select=*,article_stars(*)" [json|{ "id": 1}|]
`shouldRespondWith` 400
get "/rpc/single_article?id=1&select=*,article_stars(*)"
`shouldRespondWith` 400
it "can embed if the related tables are in a hidden schema but exposed as views" $ do
post "/rpc/single_article?select=id,articleStars(userId)" [json|{ "id": 2}|]
`shouldRespondWith` [json|[{"id": 2, "articleStars": [{"userId": 3}]}]|]
{ matchHeaders = [matchContentTypeJson] }
get "/rpc/single_article?id=2&select=id,articleStars(userId)"
`shouldRespondWith` [json|[{"id": 2, "articleStars": [{"userId": 3}]}]|]
{ matchHeaders = [matchContentTypeJson] }
context "a proc that returns an empty rowset" $
it "returns empty json array" $ do
post "/rpc/test_empty_rowset" [json| {} |] `shouldRespondWith`
[json| [] |]
{ matchHeaders = [matchContentTypeJson] }
get "/rpc/test_empty_rowset" `shouldRespondWith`
[json| [] |]
{ matchHeaders = [matchContentTypeJson] }
context "proc return types" $ do
context "returns text" $ do
it "returns proper json" $
post "/rpc/sayhello" [json| { "name": "world" } |] `shouldRespondWith`
[json|"Hello, world"|]
{ matchHeaders = [matchContentTypeJson] }
it "can handle unicode" $
post "/rpc/sayhello" [json| { "name": "¥" } |] `shouldRespondWith`
[json|"Hello, ¥"|]
{ matchHeaders = [matchContentTypeJson] }
it "returns array" $
post "/rpc/ret_array" [json|{}|] `shouldRespondWith`
[json|[1, 2, 3]|]
{ matchHeaders = [matchContentTypeJson] }
it "returns setof integers" $
post "/rpc/ret_setof_integers" [json|{}|] `shouldRespondWith`
[json|[{ "ret_setof_integers": 1 },
{ "ret_setof_integers": 2 },
{ "ret_setof_integers": 3 }]|]
{ matchHeaders = [matchContentTypeJson] }
it "returns enum value" $
post "/rpc/ret_enum" [json|{ "val": "foo" }|] `shouldRespondWith`
[json|"foo"|]
{ matchHeaders = [matchContentTypeJson] }
it "returns domain value" $
post "/rpc/ret_domain" [json|{ "val": "8" }|] `shouldRespondWith`
[json|8|]
{ matchHeaders = [matchContentTypeJson] }
it "returns range" $
post "/rpc/ret_range" [json|{ "low": 10, "up": 20 }|] `shouldRespondWith`
[json|"[10,20)"|]
{ matchHeaders = [matchContentTypeJson] }
it "returns row of scalars" $
post "/rpc/ret_scalars" [json|{}|] `shouldRespondWith`
[json|[{"a":"scalars", "b":"foo", "c":1, "d":"[10,20)"}]|]
{ matchHeaders = [matchContentTypeJson] }
it "returns composite type in exposed schema" $
post "/rpc/ret_point_2d" [json|{}|] `shouldRespondWith`
[json|[{"x": 10, "y": 5}]|]
{ matchHeaders = [matchContentTypeJson] }
it "cannot return composite type in hidden schema" $
post "/rpc/ret_point_3d" [json|{}|] `shouldRespondWith` 401
it "returns single row from table" $
post "/rpc/single_article?select=id" [json|{"id": 2}|] `shouldRespondWith`
[json|[{"id": 2}]|]
{ matchHeaders = [matchContentTypeJson] }
it "returns null for void" $
post "/rpc/ret_void" [json|{}|] `shouldRespondWith`
[json|null|]
{ matchHeaders = [matchContentTypeJson] }
context "improper input" $ do
it "rejects unknown content type even if payload is good" $ do
request methodPost "/rpc/sayhello"
(acceptHdrs "audio/mpeg3") [json| { "name": "world" } |]
`shouldRespondWith` 415
request methodGet "/rpc/sayhello?name=world"
(acceptHdrs "audio/mpeg3") ""
`shouldRespondWith` 415
it "rejects malformed json payload" $ do
p <- request methodPost "/rpc/sayhello"
(acceptHdrs "application/json") "sdfsdf"
liftIO $ do
simpleStatus p `shouldBe` badRequest400
isErrorFormat (simpleBody p) `shouldBe` True
it "treats simple plpgsql raise as invalid input" $ do
p <- post "/rpc/problem" "{}"
liftIO $ do
simpleStatus p `shouldBe` badRequest400
isErrorFormat (simpleBody p) `shouldBe` True
context "unsupported verbs" $ do
it "DELETE fails" $
request methodDelete "/rpc/sayhello" [] ""
`shouldRespondWith` 405
it "PATCH fails" $
request methodPatch "/rpc/sayhello" [] ""
`shouldRespondWith` 405
it "OPTIONS fails" $
-- TODO: should return info about the function
request methodOptions "/rpc/sayhello" [] ""
`shouldRespondWith` 405
it "executes the proc exactly once per request" $ do
post "/rpc/callcounter" [json| {} |] `shouldRespondWith`
[json|1|]
{ matchHeaders = [matchContentTypeJson] }
post "/rpc/callcounter" [json| {} |] `shouldRespondWith`
[json|2|]
{ matchHeaders = [matchContentTypeJson] }
context "a proc that receives no parameters" $ do
it "interprets empty string as empty json object on a post request" $
post "/rpc/noparamsproc" BL.empty `shouldRespondWith`
[json| "Return value of no parameters procedure." |]
{ matchHeaders = [matchContentTypeJson] }
it "interprets empty string as a function with no args on a get request" $
get "/rpc/noparamsproc" `shouldRespondWith`
[json| "Return value of no parameters procedure." |]
{ matchHeaders = [matchContentTypeJson] }
it "returns proper output when having the same return col name as the proc name" $ do
post "/rpc/test" [json|{}|] `shouldRespondWith`
[json|[{"test":"hello","value":1}]|] { matchHeaders = [matchContentTypeJson] }
get "/rpc/test" `shouldRespondWith`
[json|[{"test":"hello","value":1}]|] { matchHeaders = [matchContentTypeJson] }
context "procs with OUT/INOUT params" $ do
it "returns a scalar result when there is a single OUT param" $ do
get "/rpc/single_out_param?num=5" `shouldRespondWith`
[json|6|] { matchHeaders = [matchContentTypeJson] }
get "/rpc/single_json_out_param?a=1&b=two" `shouldRespondWith`
[json|{"a": 1, "b": "two"}|] { matchHeaders = [matchContentTypeJson] }
it "returns a scalar result when there is a single INOUT param" $
get "/rpc/single_inout_param?num=2" `shouldRespondWith`
[json|3|] { matchHeaders = [matchContentTypeJson] }
it "returns a row result when there are many OUT params" $
get "/rpc/many_out_params" `shouldRespondWith`
[json|[{"my_json":{"a": 1, "b": "two"},"num":3,"str":"four"}]|] { matchHeaders = [matchContentTypeJson] }
it "returns a row result when there are many INOUT params" $
get "/rpc/many_inout_params?num=1&str=two&b=false" `shouldRespondWith`
[json| [{"num":1,"str":"two","b":false}]|] { matchHeaders = [matchContentTypeJson] }
it "can handle procs with args that have a DEFAULT value" $ do
get "/rpc/many_inout_params?num=1&str=two" `shouldRespondWith`
[json| [{"num":1,"str":"two","b":true}]|] { matchHeaders = [matchContentTypeJson] }
get "/rpc/three_defaults?b=4" `shouldRespondWith`
[json|8|] { matchHeaders = [matchContentTypeJson] }
it "can map a RAISE error code and message to a http status" $
get "/rpc/raise_pt402"
`shouldRespondWith` [json|{ "hint": "Upgrade your plan", "details": "Quota exceeded" }|]
{ matchStatus = 402
, matchHeaders = [matchContentTypeJson]
}
it "defaults to status 500 if RAISE code is PT not followed by a number" $
get "/rpc/raise_bad_pt" `shouldRespondWith` 500
context "expects a single json object" $ do
it "does not expand posted json into parameters" $
request methodPost "/rpc/singlejsonparam"
[("prefer","params=single-object")] [json| { "p1": 1, "p2": "text", "p3" : {"obj":"text"} } |] `shouldRespondWith`
[json| { "p1": 1, "p2": "text", "p3" : {"obj":"text"} } |]
{ matchHeaders = [matchContentTypeJson] }
it "accepts parameters from an html form" $
request methodPost "/rpc/singlejsonparam"
[("Prefer","params=single-object"),("Content-Type", "application/x-www-form-urlencoded")]
("integer=7&double=2.71828&varchar=forms+are+fun&" <>
"boolean=false&date=1900-01-01&money=$3.99&enum=foo") `shouldRespondWith`
[json| { "integer": "7", "double": "2.71828", "varchar" : "forms are fun"
, "boolean":"false", "date":"1900-01-01", "money":"$3.99", "enum":"foo" } |]
{ matchHeaders = [matchContentTypeJson] }
it "works with GET" $
request methodGet "/rpc/singlejsonparam?p1=1&p2=text" [("Prefer","params=single-object")] ""
`shouldRespondWith` [json|{ "p1": "1", "p2": "text"}|]
{ matchHeaders = [matchContentTypeJson] }
it "should work with an overloaded function" $ do
get "/rpc/overloaded" `shouldRespondWith`
[json|[{ "overloaded": 1 },
{ "overloaded": 2 },
{ "overloaded": 3 }]|]
{ matchHeaders = [matchContentTypeJson] }
request methodPost "/rpc/overloaded" [("Prefer","params=single-object")]
[json|[{"x": 1, "y": "first"}, {"x": 2, "y": "second"}]|]
`shouldRespondWith`
[json|[{"x": 1, "y": "first"}, {"x": 2, "y": "second"}]|]
{ matchHeaders = [matchContentTypeJson] }
get "/rpc/overloaded?a=1&b=2" `shouldRespondWith` [str|3|]
get "/rpc/overloaded?a=1&b=2&c=3" `shouldRespondWith` [str|"123"|]
context "only for POST rpc" $ do
it "gives a parse filter error if GET style proc args are specified" $
post "/rpc/sayhello?name=John" [json|{}|] `shouldRespondWith` 400
it "ignores json keys not included in ?columns" $
post "/rpc/sayhello?columns=name"
[json|{"name": "John", "smth": "here", "other": "stuff", "fake_id": 13}|] `shouldRespondWith`
[json|"Hello, John"|]
{ matchHeaders = [matchContentTypeJson] }
context "only for GET rpc" $ do
it "should fail on mutating procs" $ do
get "/rpc/callcounter" `shouldRespondWith` 500
get "/rpc/setprojects?id_l=1&id_h=5&name=FreeBSD" `shouldRespondWith` 500
it "should filter a proc that has arg name = filter name" $
get "/rpc/get_projects_below?id=5&id=gt.2&select=id" `shouldRespondWith`
[json|[{ "id": 3 }, { "id": 4 }]|]
{ matchHeaders = [matchContentTypeJson] }
it "should work with filters that have the not operator" $ do
get "/rpc/get_projects_below?id=5&id=not.gt.2&select=id" `shouldRespondWith`
[json|[{ "id": 1 }, { "id": 2 }]|]
{ matchHeaders = [matchContentTypeJson] }
get "/rpc/get_projects_below?id=5&id=not.in.(1,3)&select=id" `shouldRespondWith`
[json|[{ "id": 2 }, { "id": 4 }]|]
{ matchHeaders = [matchContentTypeJson] }
it "should work with filters that use the plain with language fts operator" $ do
get "/rpc/get_tsearch?text_search_vector=fts(english).impossible" `shouldRespondWith`
[json|[{"text_search_vector":"'fun':5 'imposs':9 'kind':3"}]|]
{ matchHeaders = [matchContentTypeJson] }
get "/rpc/get_tsearch?text_search_vector=plfts.impossible" `shouldRespondWith`
[json|[{"text_search_vector":"'fun':5 'imposs':9 'kind':3"}]|]
{ matchHeaders = [matchContentTypeJson] }
get "/rpc/get_tsearch?text_search_vector=not.fts(english).fun%7Crat" `shouldRespondWith`
[json|[{"text_search_vector":"'amus':5 'fair':7 'impossibl':9 'peu':4"},{"text_search_vector":"'art':4 'spass':5 'unmog':7"}]|]
{ matchHeaders = [matchContentTypeJson] }
it "should work with an argument of custom type in public schema" $
get "/rpc/test_arg?my_arg=something" `shouldRespondWith`
[json|"foobar"|]
{ matchHeaders = [matchContentTypeJson] }
| begriffs/postgrest | test/Feature/RpcSpec.hs | mit | 18,200 | 0 | 20 | 4,590 | 3,178 | 1,844 | 1,334 | -1 | -1 |
module BreveLang
where
-- (
-- Expr (..)
-- , Statement (..)
-- , Trace
-- , Traces
-- , breveParser
-- , pitchClasses
-- , durations
-- ) where
{-
Breve:
pitchclass ::= ( A | B | C | D | E | F | G ) ( ff | f | ss | s )
integer ::= digit {digit}
double ::= integer '.' integer
boolean ::= "true" | "false"
note = '(' expr expr expr ')'
rest ::= '(' "rest" expr ')'
snippet ::= '{' expr {',' expr } '}'
identifier ::= (lowercase | _ ) {letter | digit | underscore}
var ::= identifier
list = '[' expr {',' expr} ']'
expr ::= pitchclass
| integer | double | boolean
| note | rest
| snippet | list
| expr binop expr
| unop expr
| identifier
| '(\' ident {ident} '->' (expr | assign {assign} return ')'
| identifier'(' {expr} ')'
| 'if' expr 'then' expr 'else' expr
| 'case' expr 'of' {pattern '->' expr ';'}
| '(' expr ')'
assign ::= identifier '=' expr
return ::= "return" expr
sequence ::= statement {';' statement}
statement ::= assign | return | sequence
pattern ::= pitchclass
| integer
| double
| boolean
| '(' pattern pattern pattern ')'
| '(' 'rest' pattern ')'
| '[' pat {',' pat} ']'
| '[' ']'
| '(' pat ':' pat {':' pat} ')'
| '{' pat {',' pat} '}'
| '(' pat ':' '{' '}' ')'
| identifier
| '_'
lineComment ::= --
blockComment ::= {- ... -}
-}
import qualified Euterpea.Music.Note.Music as E
import Data.List (intercalate)
import Text.Parsec
import Text.Parsec.Expr
import Text.Parsec.Language (emptyDef)
import Text.Parsec.String (Parser)
import Text.Parsec.Token
-- Loc is a the (line, column) location of a literal, used as part of traces
-- during synthesis. Note these do not necessarily map 100% accurately to the
-- location of the literal; it is only necessary that each literal has a unique
-- location such that loc(A) < loc(B) if A appears before B in the source.
-- (The second property is useful, but not absolutely 100% necessary.)
type Loc = (Int, Int)
-- Expr represents an expression in Breve.
-- See langauge definition above.
data Expr = PitchClass E.PitchClass Loc
| N Integer Loc | D Double Loc
| B Bool
| UnOpExpr UnOp Expr
| BinOpExpr BinOp Expr Expr
| Note Expr Expr Expr -- PitchClass, Octave, Duration
| Rest Expr -- Duration
| Snippet [Expr] -- Note | Rest
| Var String
| List [Expr]
| Lambda [Pat] Statement -- Seq or Return, most likely
| App String [Expr] -- Function name, arguments
| If Expr Expr Expr -- If statement: condition, true branch, false branch
| Case Expr [(Pat, Expr)] -- case expr of pat -> expr; ...
deriving (Eq)
-- Specialized Show (toString) for Expr.
instance Show Expr where
show (PitchClass e _) = show e
show (N i _) = show i
show (D d _) = show d
show (B b) = show b
show (UnOpExpr u e) = shows u (show e)
show (BinOpExpr b@Div e1 e2) = concat [show e1, show b, show e2]
show (BinOpExpr b e1 e2) = unwords [show e1, show b, show e2]
show (Note p o d) = '(' : unwords [show p, show o, shows d ")"]
show (Rest d) = "(rest " ++ shows d ")"
show (Snippet ss) = '{' : intercalate ", " (map show ss) ++ "}"
show (Var v) = v
show (List ls) = '[' : intercalate ", " (map show ls) ++ "]"
show (Lambda v s) = '(':'\\': unwords (map show v) ++ " -> " ++ shows s ")"
show (App n as) = n ++ "(" ++ intercalate ", " (map show as) ++ ")"
show (If c t f) = "if " ++ shows c " then " ++ shows t " else " ++ show f
show (Case e ps) = "case " ++ shows e " of " ++
intercalate "; " (map (\(p,r) -> '(' : show p ++ " -> " ++ shows r ")") ps)
-- Binary Operators, such as addition, multiplication, etc.
data BinOp =
SeqOp | ParOp -- snippets
| Add | Mult | Div | Sub -- math
| Eq | Neq | Lt | Lte | Gt | Gte -- equality
| Cons | Cat -- list
deriving (Eq)
-- The string representations of BinOps are used during parsing. See the optable
-- below.
instance Show BinOp where
show SeqOp = ":+:"
show ParOp = ":=:"
show Add = "+"
show Mult = "*"
show Div = "/"
show Sub = "-"
show Eq = "=="
show Neq = "!="
show Lt = "<"
show Lte = "<="
show Gt = ">"
show Gte = ">="
show Cons = ":"
show Cat = "++"
-- Unary operators. Right now just negation for booleans and numbers.
-- You may have noticed the language specification does not include negative
-- integers. This is due to the negation operator and how Parsec handles numbers
-- and trust me it's just EASIER this way.
data UnOp = Not | Neg deriving (Eq)
instance Show UnOp where
show Not = "!"
show Neg = "-"
-- Statement represents a full line of a breve program: an assignment, a return
-- (only valid inside lambdas) and a sequence of statements.
-- Statements end with semicolons.
data Statement = Assign String Expr | Return Expr | Seq [Statement] deriving (Eq)
-- Interestingly, the show instances we have so far are basically sufficient for
-- recreating the source of a program simply by calling "show".
-- However, comments are lost -- again due to parsec.
instance Show Statement where
show (Assign s e) = unwords [s, "=", shows e ";"]
show (Seq ss) = unlines (map show ss)
show (Return e) = "return " ++ shows e ";"
-- Pat represents a pattern matching construct, used in case expressions and
-- lambdas.
data Pat =
-- Constants
Ppc E.PitchClass
| Pn Integer
| Pd Double
| Pb Bool
-- Structures
| Pnote Pat Pat Pat
| Prest Pat
| Plist [Pat]
| Psnip [Pat]
-- Variables
| Pvar String
| Pwc -- wildcard
| Ppat String Pat -- at pattern, e.g. l@(x:xs)
-- Splitting
| Psplit Pat Pat -- (:) for splitting list and snippet elements off
deriving (Show, Eq)
-- All the pitches possible in the language. The order of the m list is
-- significant.
pitchClasses = [n : m | n <- ['A'..'G'], m <- ["ff", "ss", "f", "s", ""]]
-- Current language keywords. Some of them are "reserved" for future use
-- (namely def)
keywords = ["rest", "true", "false", "if", "then", "else", "def", "return", "case", "of"]
-- Operators. Neat how we can have one canonical representation for each op by
-- implementing Show, huh?
mathOps = map show [Add, Sub, Mult, Div]
unOps = map show [Neg, Not]
boolOps = map show [Eq, Neq, Lt, Lte, Gt, Gte]
listOps = map show [Cons, Cat]
catOps = map show [SeqOp, ParOp] ++ ["=", "\\", "->", "@"]
-- The language definition used by Parsec to generate parsers.
-- This is somewhat more broad than it needs to be, but that allows for
-- expansion of the language.
-- (For example, opStart and opLetter are primarily for user-defined ops, which
-- are not going to be a thing in Breve any time soon.)
breveDef :: LanguageDef st
breveDef = emptyDef { commentStart = "{-"
, commentEnd = "-}"
, nestedComments = True
, commentLine = "--"
, identStart = lower <|> char '_'
, identLetter = alphaNum <|> char '_'
, opStart = oneOf ":!#$%&*+./<=>?@\\^|-~"
, opLetter = oneOf ":!#$%&*+./<=>?@\\^|-~"
, reservedNames = pitchClasses ++ keywords
, reservedOpNames = catOps ++ mathOps ++ unOps ++ boolOps ++ listOps
, caseSensitive = True
}
-- This bizarre-looking reversed construct is used to extract the language
-- parsers we need from the call to "makeTokenParser".
TokenParser { identifier = b_identifier
, reserved = b_reserved
, reservedOp = b_resop
, naturalOrFloat = b_number
, symbol = b_symbol
, parens = b_parens
, braces = b_braces
, brackets = b_brackets
, semi = b_semi
, commaSep = b_commaSep
, whiteSpace = b_whitespace } = makeTokenParser breveDef
-- The default semiSep1 uses sepBy, which expects something like "a, b, c".
-- That's fine for lists, but we're processing semicolon-terminated statements.
-- We expect the last statement to have a semicolon! So sepEndBy makes more
-- sense. (The final separator *is* optional.)
b_semiSep1 p = sepEndBy p b_semi
-- The main hook for calling the breveParser.
-- Should be with "runParser".
breveParser :: Parser Statement
breveParser = b_whitespace >> parseSeq <* eof
-- ===================
-- Parsing Statements
-- ===================
-- All of the parsers are named very clearly:
-- parse<Expr> or parsePat<Pattern>
-- Comments are mainly for explaining rational, since what the thing is
-- attempting to parse should be clear.
-- "do" notation is avoided as much as possible.
-- Parses a sequence of semicolon-separated statements.
parseSeq :: Parser Statement
parseSeq = fmap Seq (b_semiSep1 parseStatement)
-- Parses the other two kinds of statements; return and assign.
-- Return is furst so it can try to eat "return" keywords before parseAssign has
-- a go.
parseStatement :: Parser Statement
parseStatement = parseReturn <|> try parseAssign
parseAssign :: Parser Statement
parseAssign = Assign <$> (b_identifier <* b_resop "=") <*> parseExpr
parseReturn :: Parser Statement
parseReturn = Return <$> (b_reserved "return" *> parseExpr)
-- ===================
-- Parsing Expressions
-- ===================
-- parseExpr was a hard-fought battle of not quite understanding what Parsec was
-- trying to do with parseTerm. However, this makes much more sense now.
-- It's essentially going to use the chainr combinator, which tries to parse a
-- "chain" of an operator. e.g. 1 + 2 + 3 + 4 is implemented using chainr
-- (numberparser) (additionopparser). Given that understanding, we have to
-- define the term parser as either a single term or an (expression in
-- parenthesis)
parseExpr :: Parser Expr
parseExpr = buildExpressionParser opTable term <?> msg
where
term = parseTerm <|> b_parens parseExpr <?> msg
msg = "an expression or operation (the statement ended early!)"
-- Precendence is from top to bottom.
opTable = [ [ inf ParOp AssocRight, inf SeqOp AssocRight]
, [ pref Not, pref Neg]
, [ math Mult, math Div]
, [ math Add, math Sub]
, [ inf Cons AssocRight, inf Cat AssocRight]
, [ bool Lt, bool Lte, bool Gt, bool Gte]
, [ bool Eq, bool Neq]
]
where
pref op = Prefix (b_resop (show op) *> return (UnOpExpr op))
inf op = Infix (b_resop (show op) *> return (BinOpExpr op))
math op = inf op AssocLeft
bool = math -- same structure, just differentiate in the table
-- These are mainly grouped to try to avoid conflicts and overlapping "try"s.
-- Note, Rest, Lambda all start with '('. The next few are all distinct when
-- they start, but then PitchClass (very restricted) overlaps with App (somewhat
-- restricted -- must be ident({expr})) overlaps with Var.
-- Bool is last for no particular reason?
parseTerm :: Parser Expr
parseTerm = try parseNote
<|> try parseRest
<|> try parseLambda
<|> parseSnippet
<|> parseList
<|> parseNum
<|> parseIf
<|> parseCase
<|> try parsePitchClass
<|> try parseApp
<|> parseVar
<|> parseBool
parseNote :: Parser Expr
parseNote = b_parens (Note <$> parseExpr <*> parseExpr <*> parseExpr)
parseRest :: Parser Expr
parseRest = Rest <$> b_parens (b_reserved "rest" *> parseExpr)
-- We try to do something useful here:
-- Lambdas are defined as a sequence of patterns followed by a body.
-- The body is either a single expression or a sequence of assign statements
-- terminated by a return statement. So our parser reflects that: it tries to
-- parse several Assigns, stopping if it encounters a Return; otherwise it tries
-- to parse an expression.
parseLambda :: Parser Expr
parseLambda = b_parens (Lambda <$> args <*> body)
where
args = b_resop "\\" *> manyTill parsePat (b_resop "->")
body = try bodyStmt <|> bodyExpr
bodyExpr = Return <$> parseExpr <* option "" b_semi -- sugar for e.g. (\ a b -> a + b)
bodyStmt = do
bod <- manyTill (parseAssign <* option "" b_semi) (try $ lookAhead parseReturn)
ret <- parseReturn <* option "" b_semi
return (Seq (bod ++ [ret]))
parseSnippet :: Parser Expr
parseSnippet = Snippet <$> b_braces (b_commaSep parseExpr)
parseVar :: Parser Expr
parseVar = Var <$> b_identifier
parsePitchClass :: Parser Expr
parsePitchClass = PitchClass <$> (read <$> parser) <*> getLoc
where
parser = choice (map (try . b_symbol) pitchClasses) <?> msg
msg = "capitol letter A-G, possibly followed by ff, f, s or ss"
parseList :: Parser Expr
parseList = List <$> b_brackets (b_commaSep parseExpr)
parseBool :: Parser Expr
parseBool = parseTrue <|> parseFalse
where
parseTrue = b_reserved "true" *> return (B True) <?> "true"
parseFalse = b_reserved "false" *> return (B False) <?> "false"
-- So here's the deal: Many languages have a unary + operator. It doesn't
-- actually do anything beyond type promotion. (e.g. +x if x is an unsigned short
-- yields x as a signed integer) In other languages it's a nop.
-- Since "-" (unary minus) is handled as an operation, I'm simplifying this
-- funcion to just parse numbers. Unary "+" is no longer a thing.
-- (There was a bug here: "+5" would return -5 due to stupidity of handling sign
-- parsing.)
parseNum :: Parser Expr
parseNum = either N D <$> b_number <*> getLoc
-- TODO this is terribly hacky, but it's the easiest way to make sure x (y + z)
-- is treated as 2 expressions, not as a single function application! (note the
-- "notFollowedBy b_whitespace" parser after ident!)
parseApp :: Parser Expr
parseApp = App <$> (ident <* notFollowedBy b_whitespace) <*> b_parens (b_commaSep parseExpr)
where ident = (:) <$> (lower <|> char '_') <*> many (alphaNum <|> char '_' <|> char '-')
parseIf :: Parser Expr
parseIf = If <$> (b_reserved "if" *> parseExpr)
<*> (b_reserved "then" *> parseExpr)
<*> (b_reserved "else" *> parseExpr)
parseCase :: Parser Expr
parseCase = Case <$> (b_reserved "case" *> parseExpr <* b_reserved "of")
<*> b_semiSep1 parsePats
where parsePats = (,) <$> (parsePat <* b_resop "->") <*> parseExpr
-- ===================
-- Parsing Patterns
-- ===================
parsePat :: Parser Pat
parsePat = try parsePatPC
<|> try parsePatNum
<|> try parsePatBool
<|> try parsePatNote
<|> try parsePatRest
<|> try parsePatSplit
<|> parsePatList
<|> parsePatSnippet
<|> try parsePatWC
<|> parsePatVar
parsePatPC :: Parser Pat
parsePatPC = Ppc . read <$> choice (map (try . b_symbol) pitchClasses)
-- Since we don't have a negation operator for patterns, we do have to actually
-- try to parse negative numbers.
-- But since we don't have a negation operator for patterns, this is easy.
-- TODO: No do notation?
parsePatNum :: Parser Pat
parsePatNum = do
sign <- optionMaybe (char '-')
p <- b_number
return $ case p of
Left i -> Pn (signed sign i)
Right d -> Pd (signed sign d)
where signed s = (*) (maybe 1 (const (-1)) s)
-- Why waste a perfectly good parseBool?
parsePatBool :: Parser Pat
parsePatBool = fmap (\(B b) -> Pb b) parseBool
parsePatNote :: Parser Pat
parsePatNote = b_parens (Pnote <$> parsePat <*> parsePat <*> parsePat)
parsePatRest :: Parser Pat
parsePatRest = Prest <$> b_parens (b_reserved "rest" *> parsePat)
parsePatSplit :: Parser Pat
parsePatSplit = b_parens (chainr1 term (b_resop ":" *> return Psplit))
where term = parsePat
parsePatList :: Parser Pat
parsePatList = Plist <$> b_brackets (b_commaSep parsePat)
parsePatSnippet :: Parser Pat
parsePatSnippet = Psnip <$> b_braces (b_commaSep parsePat)
parsePatWC :: Parser Pat
parsePatWC = Pwc <$ b_reserved "_"
-- My obsession with removing do notation lead to this particular monstronsity.
-- My Frankenstein's Monster! If we have an identifier followed by an "@", we
-- have an at-pattern and have to assign the rest of the pattern to the
-- identifier before parsing the rest of it.
-- Otherwise it's just a normal pattern variable.
parsePatVar :: Parser Pat
parsePatVar = flip (maybe Pvar (flip Ppat)) <$> b_identifier <*> optionMaybe (b_resop "@" *> parsePat)
-- Do-notation version left for posterity.
-- parsePatVar = do
-- name <- b_identifier
-- at <- optionMaybe (b_resop "@" *> parsePat)
-- return $ case at of
-- Just pat -> Ppat name pat
-- Nothing -> Pvar name
-- ============
-- Utility
-- ============
-- Used by the literal parsers to grab the location from source code.
-- This must be used consistently; if you call it after parsing in one parser,
-- you must do the same everywhere else!
-- This COULD be replaced with Control.Arrow.&&& but I'm not sure if we want
-- that kind of magic here. (Maybe FIXME later)
getLoc :: Parser (Int,Int)
getLoc = do
pos <- getPosition
let loc = (sourceLine pos, sourceColumn pos)
return loc
-- Turn the common durations into their equivalent Euterpea names.
-- This isn't actually used here. Or anywhere.
durToStr :: E.Dur -> String
durToStr d
| d == E.bn = "bn"
| d == E.wn = "wn"
| d == E.hn = "hn"
| d == E.qn = "qn"
| d == E.en = "en"
| d == E.sn = "sn"
| d == E.sfn = "sfn"
| d == E.tn = "tn"
| d == E.dwn = "dwn"
| d == E.dhn = "dhn"
| d == E.dqn = "dqn"
| d == E.den = "den"
| d == E.dsn = "dsn"
| d == E.dtn = "dtn"
| d == E.ddhn = "ddhn"
| d == E.ddqn = "ddqn"
| d == E.dden = "dden"
| otherwise = show d
| Solumin/ScriptNScribe | src/BreveLang.hs | mit | 18,041 | 0 | 16 | 4,673 | 3,688 | 1,980 | 1,708 | 258 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Bank.Models.Customer.Commands
( customerCommands
, CreateCustomer (..)
) where
import Language.Haskell.TH (Name)
import Bank.Json
customerCommands :: [Name]
customerCommands =
[ ''CreateCustomer
]
data CreateCustomer =
CreateCustomer
{ createCustomerData :: String
} deriving (Show, Eq)
deriveJSONUnPrefixLower ''CreateCustomer
| jdreaver/eventful | examples/bank/src/Bank/Models/Customer/Commands.hs | mit | 390 | 0 | 8 | 62 | 89 | 54 | 35 | 14 | 1 |
{-# LANGUAGE CPP #-}
module Arguments where
#if (MIN_VERSION_base(4,9,0))
import Data.Monoid
#endif
import Options.Applicative
import System.Environment (getArgs)
import Config
import Dispatch.Types
import Types
import Utils
getInstructions :: IO Instructions
getInstructions = do
mins <- fmap transformOptions getOptions
case mins of
Right ins -> return ins
Left err -> die $ "Failed to parse instructions\n" ++ err
transformOptions :: Options -> Either String Instructions
transformOptions (dispatch, go) = (,) <$> pure dispatch <*> configFromOptions go
configFromOptions :: GlobalOptions -> Either String SparkConfig
configFromOptions go = Right conf
where
conf = defaultConfig
{ conf_compile_output = opt_output go
, conf_compile_kind = opt_kind go
, conf_compile_override = opt_overrride go
, conf_deploy_replace_links = opt_replace_links go || opt_replace go
, conf_deploy_replace_files = opt_replace_files go || opt_replace go
, conf_deploy_replace_directories = opt_replace_directories go || opt_replace go
, conf_debug = opt_debug go
}
getOptions :: IO Options
getOptions = do
args <- getArgs
let result = runOptionsParser args
handleParseResult result
runOptionsParser :: [String] -> ParserResult Options
runOptionsParser strs = execParserPure prefs optionsParser strs
where
prefs = ParserPrefs
{ prefMultiSuffix = "SPARK" -- metavar suffix for multiple options
, prefDisambiguate = True -- automatically disambiguate abbreviations (default: False)
, prefShowHelpOnError = True -- always show help text on parse errors (default: False)
, prefBacktrack = True -- backtrack to parent parser when a subcommand fails (default: True)
, prefColumns = 80 -- number of columns in the terminal, used to format the help page (default: 80)
}
optionsParser :: ParserInfo Options
optionsParser = info (helper <*> parseOptions) help
where
help = fullDesc <> progDesc description
description = "Super User Spark, author: Tom Sydney Kerckhove"
parseOptions :: Parser Options
parseOptions = (,) <$> parseCommand <*> parseGlobalOptions
parseCommand :: Parser Dispatch
parseCommand = hsubparser $ mconcat
[ command "parse" parseParse
, command "compile" parseCompile
, command "check" parseCheck
, command "deploy" parseDeploy
]
parseParse :: ParserInfo Dispatch
parseParse = info parser modifier
where
parser = DispatchParse <$> strArgument (metavar "FILE" <> help "the file to parse")
modifier = fullDesc
<> progDesc "Parse a spark file and check for syntactic errors."
parseCompile :: ParserInfo Dispatch
parseCompile = info parser modifier
where
parser = DispatchCompile
<$> argument auto
(metavar "CARD" <> help "the card file to compile")
modifier = fullDesc
<> progDesc "Compile a spark card."
parseCheck :: ParserInfo Dispatch
parseCheck = info parser modifier
where
parser = DispatchCheck
<$> argument auto
(metavar "CARD" <> help "the card to check")
modifier = fullDesc
<> progDesc "Check the deployment of a spark card."
parseDeploy :: ParserInfo Dispatch
parseDeploy = info parser modifier
where
parser = DispatchDeploy
<$> argument auto
(metavar "CARD" <> help "the card to deploy") -- TODO more help
modifier = fullDesc
<> progDesc "Deploy a spark card."
parseGlobalOptions :: Parser GlobalOptions
parseGlobalOptions = GlobalOptions
<$> option (Just <$> str)
( long "output"
<> short 'o'
<> value Nothing
<> metavar "FILE"
<> help "The output file for compilation" )
<*> option (Just <$> auto)
( long "kind"
<> short 'k'
<> value Nothing
<> metavar "KIND"
<> help "The kind specification for unspecified deployments (default: link)" )
<*> option (Just <$> auto)
( long "override"
<> short 'O'
<> value Nothing
<> metavar "KIND"
<> help "Override every deployment to be of the given kind" )
<*> switch
( long "replace-links"
<> help "Replace links at deploy destinations." )
<*> switch
( long "replace-files"
<> help "Replace existing files at deploy destinations." )
<*> switch
( long "replace-Directories"
<> help "Replace existing directories at deploy destinations." )
<*> switch
( long "replace-all"
<> short 'r'
<> help "Equivalent to --replace-files --replace-directories --replace-links" )
<*> switch
( long "debug"
<> help "Show al debug information." )
| badi/super-user-spark | src/Arguments.hs | mit | 4,913 | 0 | 19 | 1,349 | 1,002 | 503 | 499 | 113 | 2 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE PatternGuards #-}
-- | A sqlite backend for persistent.
--
-- Note: If you prepend @WAL=off @ to your connection string, it will disable
-- the write-ahead log. This functionality is now deprecated in favour of using SqliteConnectionInfo.
module Database.Persist.Sqlite
( withSqlitePool
, withSqlitePoolInfo
, withSqliteConn
, withSqliteConnInfo
, createSqlitePool
, createSqlitePoolFromInfo
, module Database.Persist.Sql
, SqliteConf (..)
, SqliteConnectionInfo
, mkSqliteConnectionInfo
, sqlConnectionStr
, walEnabled
, fkEnabled
, runSqlite
, runSqliteInfo
, wrapConnection
, wrapConnectionInfo
, mockMigration
) where
import Database.Persist.Sql
import Database.Persist.Sql.Types.Internal (mkPersistBackend)
import qualified Database.Sqlite as Sqlite
import Control.Applicative as A
import qualified Control.Exception as E
import Control.Monad (when)
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Logger (NoLoggingT, runNoLoggingT, MonadLogger)
import Control.Monad.Trans.Control (control)
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Trans.Reader (ReaderT, runReaderT)
import Control.Monad.Trans.Resource (ResourceT, runResourceT)
import Control.Monad.Trans.Writer (runWriterT)
import Data.Acquire (Acquire, mkAcquire, with)
import Data.Aeson
import Data.Aeson.Types (modifyFailure)
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.HashMap.Lazy as HashMap
import Data.Int (Int64)
import Data.IORef
import qualified Data.Map as Map
import Data.Monoid ((<>))
import Data.Pool (Pool)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Lens.Micro.TH (makeLenses)
-- | Create a pool of SQLite connections.
--
-- Note that this should not be used with the @:memory:@ connection string, as
-- the pool will regularly remove connections, destroying your database.
-- Instead, use 'withSqliteConn'.
createSqlitePool :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, IsSqlBackend backend)
=> Text -> Int -> m (Pool backend)
createSqlitePool = createSqlitePoolFromInfo . conStringToInfo
-- | Create a pool of SQLite connections.
--
-- Note that this should not be used with the @:memory:@ connection string, as
-- the pool will regularly remove connections, destroying your database.
-- Instead, use 'withSqliteConn'.
createSqlitePoolFromInfo :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, IsSqlBackend backend)
=> SqliteConnectionInfo -> Int -> m (Pool backend)
createSqlitePoolFromInfo connInfo = createSqlPool $ open' connInfo
-- | Run the given action with a connection pool.
--
-- Like 'createSqlitePool', this should not be used with @:memory:@.
withSqlitePool :: (MonadBaseControl IO m, MonadIO m, MonadLogger m, IsSqlBackend backend)
=> Text
-> Int -- ^ number of connections to open
-> (Pool backend -> m a) -> m a
withSqlitePool connInfo = withSqlPool . open' $ conStringToInfo connInfo
-- | Run the given action with a connection pool.
--
-- Like 'createSqlitePool', this should not be used with @:memory:@.
withSqlitePoolInfo :: (MonadBaseControl IO m, MonadIO m, MonadLogger m, IsSqlBackend backend)
=> SqliteConnectionInfo
-> Int -- ^ number of connections to open
-> (Pool backend -> m a) -> m a
withSqlitePoolInfo connInfo = withSqlPool $ open' connInfo
withSqliteConn :: (MonadBaseControl IO m, MonadIO m, MonadLogger m, IsSqlBackend backend)
=> Text -> (backend -> m a) -> m a
withSqliteConn = withSqliteConnInfo . conStringToInfo
withSqliteConnInfo :: (MonadBaseControl IO m, MonadIO m, MonadLogger m, IsSqlBackend backend)
=> SqliteConnectionInfo -> (backend -> m a) -> m a
withSqliteConnInfo = withSqlConn . open'
open' :: (IsSqlBackend backend) => SqliteConnectionInfo -> LogFunc -> IO backend
open' connInfo logFunc = do
conn <- Sqlite.open $ _sqlConnectionStr connInfo
wrapConnectionInfo connInfo conn logFunc `E.onException` Sqlite.close conn
-- | Wrap up a raw 'Sqlite.Connection' as a Persistent SQL 'Connection'.
--
-- Since 1.1.5
wrapConnection :: (IsSqlBackend backend) => Sqlite.Connection -> LogFunc -> IO backend
wrapConnection = wrapConnectionInfo (mkSqliteConnectionInfo "")
-- | Wrap up a raw 'Sqlite.Connection' as a Persistent SQL 'Connection', allowing full control over WAL and FK constraints.
wrapConnectionInfo :: (IsSqlBackend backend)
=> SqliteConnectionInfo
-> Sqlite.Connection
-> LogFunc
-> IO backend
wrapConnectionInfo connInfo conn logFunc = do
when (_walEnabled connInfo) $ do
-- Turn on the write-ahead log
-- https://github.com/yesodweb/persistent/issues/363
turnOnWal <- Sqlite.prepare conn "PRAGMA journal_mode=WAL;"
_ <- Sqlite.step turnOnWal
Sqlite.reset conn turnOnWal
Sqlite.finalize turnOnWal
when (_fkEnabled connInfo) $ do
-- Turn on foreign key constraints
-- https://github.com/yesodweb/persistent/issues/646
turnOnFK <- Sqlite.prepare conn "PRAGMA foreign_keys = on;"
_ <- Sqlite.step turnOnFK
Sqlite.reset conn turnOnFK
Sqlite.finalize turnOnFK
smap <- newIORef $ Map.empty
return . mkPersistBackend $ SqlBackend
{ connPrepare = prepare' conn
, connStmtMap = smap
, connInsertSql = insertSql'
, connUpsertSql = Nothing
, connInsertManySql = Nothing
, connClose = Sqlite.close conn
, connMigrateSql = migrate'
, connBegin = helper "BEGIN"
, connCommit = helper "COMMIT"
, connRollback = ignoreExceptions . helper "ROLLBACK"
, connEscapeName = escape
, connNoLimit = "LIMIT -1"
, connRDBMS = "sqlite"
, connLimitOffset = decorateSQLWithLimitOffset "LIMIT -1"
, connLogFunc = logFunc
, connMaxParams = Just 999
}
where
helper t getter = do
stmt <- getter t
_ <- stmtExecute stmt []
stmtReset stmt
ignoreExceptions = E.handle (\(_ :: E.SomeException) -> return ())
-- | A convenience helper which creates a new database connection and runs the
-- given block, handling @MonadResource@ and @MonadLogger@ requirements. Note
-- that all log messages are discarded.
--
-- Since 1.1.4
runSqlite :: (MonadBaseControl IO m, MonadIO m, IsSqlBackend backend)
=> Text -- ^ connection string
-> ReaderT backend (NoLoggingT (ResourceT m)) a -- ^ database action
-> m a
runSqlite connstr = runResourceT
. runNoLoggingT
. withSqliteConn connstr
. runSqlConn
-- | A convenience helper which creates a new database connection and runs the
-- given block, handling @MonadResource@ and @MonadLogger@ requirements. Note
-- that all log messages are discarded.
--
-- Since 1.1.4
runSqliteInfo :: (MonadBaseControl IO m, MonadIO m, IsSqlBackend backend)
=> SqliteConnectionInfo
-> ReaderT backend (NoLoggingT (ResourceT m)) a -- ^ database action
-> m a
runSqliteInfo conInfo = runResourceT
. runNoLoggingT
. withSqliteConnInfo conInfo
. runSqlConn
prepare' :: Sqlite.Connection -> Text -> IO Statement
prepare' conn sql = do
stmt <- Sqlite.prepare conn sql
return Statement
{ stmtFinalize = Sqlite.finalize stmt
, stmtReset = Sqlite.reset conn stmt
, stmtExecute = execute' conn stmt
, stmtQuery = withStmt' conn stmt
}
insertSql' :: EntityDef -> [PersistValue] -> InsertSqlResult
insertSql' ent vals =
case entityPrimary ent of
Just _ ->
ISRManyKeys sql vals
where sql = T.concat
[ "INSERT INTO "
, escape $ entityDB ent
, "("
, T.intercalate "," $ map (escape . fieldDB) $ entityFields ent
, ") VALUES("
, T.intercalate "," (map (const "?") $ entityFields ent)
, ")"
]
Nothing ->
ISRInsertGet ins sel
where
sel = T.concat
[ "SELECT "
, escape $ fieldDB (entityId ent)
, " FROM "
, escape $ entityDB ent
, " WHERE _ROWID_=last_insert_rowid()"
]
ins = T.concat
[ "INSERT INTO "
, escape $ entityDB ent
, if null (entityFields ent)
then " VALUES(null)"
else T.concat
[ "("
, T.intercalate "," $ map (escape . fieldDB) $ entityFields ent
, ") VALUES("
, T.intercalate "," (map (const "?") $ entityFields ent)
, ")"
]
]
execute' :: Sqlite.Connection -> Sqlite.Statement -> [PersistValue] -> IO Int64
execute' conn stmt vals = flip finally (liftIO $ Sqlite.reset conn stmt) $ do
Sqlite.bind stmt vals
_ <- Sqlite.step stmt
Sqlite.changes conn
withStmt'
:: MonadIO m
=> Sqlite.Connection
-> Sqlite.Statement
-> [PersistValue]
-> Acquire (Source m [PersistValue])
withStmt' conn stmt vals = do
_ <- mkAcquire
(Sqlite.bind stmt vals >> return stmt)
(Sqlite.reset conn)
return pull
where
pull = do
x <- liftIO $ Sqlite.step stmt
case x of
Sqlite.Done -> return ()
Sqlite.Row -> do
cols <- liftIO $ Sqlite.columns stmt
yield cols
pull
showSqlType :: SqlType -> Text
showSqlType SqlString = "VARCHAR"
showSqlType SqlInt32 = "INTEGER"
showSqlType SqlInt64 = "INTEGER"
showSqlType SqlReal = "REAL"
showSqlType (SqlNumeric precision scale) = T.concat [ "NUMERIC(", T.pack (show precision), ",", T.pack (show scale), ")" ]
showSqlType SqlDay = "DATE"
showSqlType SqlTime = "TIME"
showSqlType SqlDayTime = "TIMESTAMP"
showSqlType SqlBlob = "BLOB"
showSqlType SqlBool = "BOOLEAN"
showSqlType (SqlOther t) = t
migrate' :: [EntityDef]
-> (Text -> IO Statement)
-> EntityDef
-> IO (Either [Text] [(Bool, Text)])
migrate' allDefs getter val = do
let (cols, uniqs, _) = mkColumns allDefs val
let newSql = mkCreateTable False def (filter (not . safeToRemove val . cName) cols, uniqs)
stmt <- getter "SELECT sql FROM sqlite_master WHERE type='table' AND name=?"
oldSql' <- with (stmtQuery stmt [PersistText $ unDBName table]) ($$ go)
case oldSql' of
Nothing -> return $ Right [(False, newSql)]
Just oldSql -> do
if oldSql == newSql
then return $ Right []
else do
sql <- getCopyTable allDefs getter val
return $ Right sql
where
def = val
table = entityDB def
go = do
x <- CL.head
case x of
Nothing -> return Nothing
Just [PersistText y] -> return $ Just y
Just y -> error $ "Unexpected result from sqlite_master: " ++ show y
-- | Mock a migration even when the database is not present.
-- This function performs the same functionality of 'printMigration'
-- with the difference that an actualy database isn't needed for it.
mockMigration :: Migration -> IO ()
mockMigration mig = do
smap <- newIORef $ Map.empty
let sqlbackend = SqlBackend
{ connPrepare = \_ -> do
return Statement
{ stmtFinalize = return ()
, stmtReset = return ()
, stmtExecute = undefined
, stmtQuery = \_ -> return $ return ()
}
, connStmtMap = smap
, connInsertSql = insertSql'
, connInsertManySql = Nothing
, connClose = undefined
, connMigrateSql = migrate'
, connBegin = helper "BEGIN"
, connCommit = helper "COMMIT"
, connRollback = ignoreExceptions . helper "ROLLBACK"
, connEscapeName = escape
, connNoLimit = "LIMIT -1"
, connRDBMS = "sqlite"
, connLimitOffset = decorateSQLWithLimitOffset "LIMIT -1"
, connLogFunc = undefined
, connUpsertSql = undefined
, connMaxParams = Just 999
}
result = runReaderT . runWriterT . runWriterT $ mig
resp <- result sqlbackend
mapM_ TIO.putStrLn $ map snd $ snd resp
where
helper t getter = do
stmt <- getter t
_ <- stmtExecute stmt []
stmtReset stmt
ignoreExceptions = E.handle (\(_ :: E.SomeException) -> return ())
-- | Check if a column name is listed as the "safe to remove" in the entity
-- list.
safeToRemove :: EntityDef -> DBName -> Bool
safeToRemove def (DBName colName)
= any (elem "SafeToRemove" . fieldAttrs)
$ filter ((== DBName colName) . fieldDB)
$ entityFields def
getCopyTable :: [EntityDef]
-> (Text -> IO Statement)
-> EntityDef
-> IO [(Bool, Text)]
getCopyTable allDefs getter def = do
stmt <- getter $ T.concat [ "PRAGMA table_info(", escape table, ")" ]
oldCols' <- with (stmtQuery stmt []) ($$ getCols)
let oldCols = map DBName $ filter (/= "id") oldCols' -- need to update for table id attribute ?
let newCols = filter (not . safeToRemove def) $ map cName cols
let common = filter (`elem` oldCols) newCols
let id_ = fieldDB (entityId def)
return [ (False, tmpSql)
, (False, copyToTemp $ id_ : common)
, (common /= filter (not . safeToRemove def) oldCols, dropOld)
, (False, newSql)
, (False, copyToFinal $ id_ : newCols)
, (False, dropTmp)
]
where
getCols = do
x <- CL.head
case x of
Nothing -> return []
Just (_:PersistText name:_) -> do
names <- getCols
return $ name : names
Just y -> error $ "Invalid result from PRAGMA table_info: " ++ show y
table = entityDB def
tableTmp = DBName $ unDBName table <> "_backup"
(cols, uniqs, _) = mkColumns allDefs def
cols' = filter (not . safeToRemove def . cName) cols
newSql = mkCreateTable False def (cols', uniqs)
tmpSql = mkCreateTable True def { entityDB = tableTmp } (cols', uniqs)
dropTmp = "DROP TABLE " <> escape tableTmp
dropOld = "DROP TABLE " <> escape table
copyToTemp common = T.concat
[ "INSERT INTO "
, escape tableTmp
, "("
, T.intercalate "," $ map escape common
, ") SELECT "
, T.intercalate "," $ map escape common
, " FROM "
, escape table
]
copyToFinal newCols = T.concat
[ "INSERT INTO "
, escape table
, " SELECT "
, T.intercalate "," $ map escape newCols
, " FROM "
, escape tableTmp
]
mkCreateTable :: Bool -> EntityDef -> ([Column], [UniqueDef]) -> Text
mkCreateTable isTemp entity (cols, uniqs) =
case entityPrimary entity of
Just pdef ->
T.concat
[ "CREATE"
, if isTemp then " TEMP" else ""
, " TABLE "
, escape $ entityDB entity
, "("
, T.drop 1 $ T.concat $ map sqlColumn cols
, ", PRIMARY KEY "
, "("
, T.intercalate "," $ map (escape . fieldDB) $ compositeFields pdef
, ")"
, ")"
]
Nothing -> T.concat
[ "CREATE"
, if isTemp then " TEMP" else ""
, " TABLE "
, escape $ entityDB entity
, "("
, escape $ fieldDB (entityId entity)
, " "
, showSqlType $ fieldSqlType $ entityId entity
," PRIMARY KEY"
, mayDefault $ defaultAttribute $ fieldAttrs $ entityId entity
, T.concat $ map sqlColumn cols
, T.concat $ map sqlUnique uniqs
, ")"
]
mayDefault :: Maybe Text -> Text
mayDefault def = case def of
Nothing -> ""
Just d -> " DEFAULT " <> d
sqlColumn :: Column -> Text
sqlColumn (Column name isNull typ def _cn _maxLen ref) = T.concat
[ ","
, escape name
, " "
, showSqlType typ
, if isNull then " NULL" else " NOT NULL"
, mayDefault def
, case ref of
Nothing -> ""
Just (table, _) -> " REFERENCES " <> escape table
]
sqlUnique :: UniqueDef -> Text
sqlUnique (UniqueDef _ cname cols _) = T.concat
[ ",CONSTRAINT "
, escape cname
, " UNIQUE ("
, T.intercalate "," $ map (escape . snd) cols
, ")"
]
escape :: DBName -> Text
escape (DBName s) =
T.concat [q, T.concatMap go s, q]
where
q = T.singleton '"'
go '"' = "\"\""
go c = T.singleton c
-- | Information required to setup a connection pool.
data SqliteConf = SqliteConf
{ sqlDatabase :: Text
, sqlPoolSize :: Int
}
| SqliteConfInfo
{ sqlConnInfo :: SqliteConnectionInfo
, sqlPoolSize :: Int
} deriving Show
instance FromJSON SqliteConf where
parseJSON v = modifyFailure ("Persistent: error loading Sqlite conf: " ++) $ flip (withObject "SqliteConf") v parser where
parser o = if HashMap.member "database" o
then SqliteConf
A.<$> o .: "database"
A.<*> o .: "poolsize"
else SqliteConfInfo
A.<$> o .: "connInfo"
A.<*> o .: "poolsize"
instance PersistConfig SqliteConf where
type PersistConfigBackend SqliteConf = SqlPersistT
type PersistConfigPool SqliteConf = ConnectionPool
createPoolConfig (SqliteConf cs size) = runNoLoggingT $ createSqlitePoolFromInfo (conStringToInfo cs) size -- FIXME
createPoolConfig (SqliteConfInfo info size) = runNoLoggingT $ createSqlitePoolFromInfo info size -- FIXME
runPool _ = runSqlPool
loadConfig = parseJSON
finally :: MonadBaseControl IO m
=> m a -- ^ computation to run first
-> m b -- ^ computation to run afterward (even if an exception was raised)
-> m a
finally a sequel = control $ \runInIO ->
E.finally (runInIO a)
(runInIO sequel)
{-# INLINABLE finally #-}
-- | Creates a SqliteConnectionInfo from a connection string, with the default settings.
mkSqliteConnectionInfo :: Text -> SqliteConnectionInfo
mkSqliteConnectionInfo fp = SqliteConnectionInfo fp True True
-- | Parses connection options from a connection string. Used only to provide deprecated API.
conStringToInfo :: Text -> SqliteConnectionInfo
conStringToInfo connStr = SqliteConnectionInfo connStr' enableWal True where
(connStr', enableWal) = case () of
()
| Just cs <- T.stripPrefix "WAL=on " connStr -> (cs, True)
| Just cs <- T.stripPrefix "WAL=off " connStr -> (cs, False)
| otherwise -> (connStr, True)
-- | Information required to connect to a sqlite database. We export lenses instead of fields to avoid being limited to the current implementation.
data SqliteConnectionInfo = SqliteConnectionInfo
{ _sqlConnectionStr :: Text -- ^ connection string for the database. Use @:memory:@ for an in-memory database.
, _walEnabled :: Bool -- ^ if the write-ahead log is enabled - see https://github.com/yesodweb/persistent/issues/363.
, _fkEnabled :: Bool -- ^ if foreign-key constraints are enabled.
} deriving Show
makeLenses ''SqliteConnectionInfo
instance FromJSON SqliteConnectionInfo where
parseJSON v = modifyFailure ("Persistent: error loading SqliteConnectionInfo: " ++) $
flip (withObject "SqliteConnectionInfo") v $ \o -> SqliteConnectionInfo
<$> o .: "connectionString"
<*> o .: "walEnabled"
<*> o .: "fkEnabled"
| rdnetto/persistent | persistent-sqlite/Database/Persist/Sqlite.hs | mit | 20,603 | 0 | 21 | 6,243 | 4,901 | 2,582 | 2,319 | 435 | 5 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module IHaskell.Display.Widgets.Style.ButtonStyle
( -- * Button style
ButtonStyle
-- * Create a new button style
, mkButtonStyle
) where
-- To keep `cabal repl` happy when running from the ihaskell repo
import Prelude
import Data.Aeson
import Data.IORef (newIORef)
import Data.Vinyl (Rec(..), (<+>))
import IHaskell.Display
import IHaskell.Eval.Widgets
import IHaskell.IPython.Message.UUID as U
import IHaskell.Display.Widgets.Types
import IHaskell.Display.Widgets.Common
-- | A 'ButtonStyle' represents a Button Style from IPython.html.widgets.
type ButtonStyle = IPythonWidget 'ButtonStyleType
-- | Create a new button style
mkButtonStyle :: IO ButtonStyle
mkButtonStyle = do
wid <- U.random
let stl = defaultStyleWidget "ButtonStyleModel"
but = (ButtonColor =:: Nothing)
:& (FontWeight =:: DefaultWeight)
:& RNil
btnStlState = WidgetState (stl <+> but)
stateIO <- newIORef btnStlState
let style = IPythonWidget wid stateIO
-- Open a comm for this widget, and store it in the kernel state
widgetSendOpen style $ toJSON btnStlState
-- Return the style widget
return style
instance IHaskellWidget ButtonStyle where
getCommUUID = uuid
| gibiansky/IHaskell | ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Style/ButtonStyle.hs | mit | 1,480 | 0 | 13 | 344 | 243 | 141 | 102 | 33 | 1 |
module MppCodeGen (gen_code) where
{- Code Generation for M++
=======================
Generates AM Stack machine code for M++ programs, given their
intermediate representation.
Refer to the AM specification for details of the code generation.
Usage
=====
gen_code ir : Return the AM source code for the given program.
-}
import MppIR
import Debug.Trace
type LabelCounter = (Int,Int)
-- Top-level code generation function.
-- Returns the AM source code for the given IR.
gen_code :: I_prog -> String
gen_code p = gen_prog p (0,0)
-- Generate a label, updating the label reference counter.
make_label :: LabelCounter -> (String, LabelCounter)
make_label (n,c) = ("label" ++ show (n+1) ++ ":", (n+1,c))
-- Generate a function label.
fun_label :: String -> String
fun_label nm = "fun_" ++ nm ++ ":"
-- Generate the AM source for the given IR.
gen_prog :: I_prog -> LabelCounter -> String
gen_prog (I_PROG (fs,nV,ads,ss)) l
= prog_prologue nV ads
++ stmtsCode
++ prog_epilogue nV ++ "\n"
++ funsCode ++ "\n"
++ innerFunsCode
where
(stmtsCode, l1) = gen_stmts ss l
(funsCode, l2) = gen_funs fs l1
(innerFunsCode, _) = gen_innerfuns ss l2
-- Program header to set up the initial AR.
prog_prologue :: Int -> [(Int, [I_expr])] -> String
prog_prologue nV ads
= "\tLOAD_R %sp\n"
++ "\tLOAD_R %sp\n"
++ "\tSTORE_R %fp\n"
++ "\tALLOC " ++ show nV ++ "\n"
++ "\tLOAD_I " ++ show (nV+2) ++ "\n"
++ gen_arys nV ads
-- Program footer to take down the initial AR.
prog_epilogue :: Int -> String
prog_epilogue nV
= "\tALLOC -" ++ show nV ++ "\n"
++ "\tHALT\n"
-- Generate the source code to allocate stack space for all given
-- array descriptors.
gen_arys :: Int -> [(Int, [I_expr])] -> String
gen_arys nV as = ga nV as (length as) 0 where
ga _ [] nAs _ = ""
ga nV (a:as) nAs an
= gen_ary nV a an
++ ga nV as nAs (an+1)
-- Generate the source code to allocate stack space for a given
-- array descriptor. Upon completion the total allocated array size
-- is added to the prior deallocation record and stored in the
-- deallocation record's stack space.
gen_ary :: Int -> (Int, [I_expr]) -> Int -> String
gen_ary nV (o,[]) anum = ""
gen_ary nV (o,(e:es)) anum
= gen_expr e
++ gen_aryref o
++ gen_exprs es
++ gen_arysize 0 o (length (e:es))
++ gen_dealloc o nV anum (length (e:es))
++ "\tALLOC_S\n"
-- Compute the base address for the array at the given offset.
gen_aryref :: Int -> String
gen_aryref o
= "\tLOAD_R %sp\n"
++ "\tLOAD_R %fp\n"
++ "\tSTORE_O " ++ show o ++ "\n"
-- Compute the total array size for the array on the TOS,
-- putting the value onto the TOS above the array dimension descriptors.
gen_arysize :: Int -> Int -> Int -> String
gen_arysize l o d
= _gen_arysize l o d 0
++ concat (replicate (d-1) "\tAPP MUL\n") where
_gen_arysize l o d c
| d <= c = ""
| otherwise = gen_access_ptr l
++ "\tLOAD_O " ++ show o ++ "\n"
++ "\tLOAD_O " ++ show c ++ "\n"
++ _gen_arysize l o d (c+1)
-- Generate the source code to update the deallocation record with
-- the newly added array, adding its size (currently on the TOS)
-- to the deallocation record.
gen_dealloc :: Int -> Int -> Int -> Int -> String
gen_dealloc o nV anum nDim
= "\tLOAD_R %fp\n"
++ "\tLOAD_O " ++ show (nV + 1) ++ "\n"
++ "\tLOAD_I " ++ show (nDim) ++ "\n"
++ "\tLOAD_R %fp\n"
++ "\tLOAD_O " ++ show o ++ "\n"
++ "\tLOAD_O " ++ show (nDim) ++ "\n"
++ "\tAPP ADD\n"
++ "\tAPP ADD\n"
++ "\tLOAD_R %fp\n"
++ "\tSTORE_O " ++ show (nV + 1) ++ "\n"
-- Follow back the link pointers `level` times, computing a reference
-- to the frame pointer of that past AR.
gen_access_ptr :: Int -> String
gen_access_ptr level
= "\tLOAD_R %fp\n"
++ (concat $ replicate level "\tLOAD_O -2\n")
-- Generate the source code for the given function list.
gen_funs :: [I_fbody] -> LabelCounter -> (String,LabelCounter)
gen_funs [] l = ("",l)
gen_funs (f:fs) l
= (funCode
++ funsCode,
l2) where
(funCode, l1) = gen_fun f l
(funsCode, l2) = gen_funs fs l1
-- Generate the source code for the **CLOSURE** of the statement list
-- given. I.e. this walks the IR rooted at each given statement
-- and extracts all function definitions.
gen_innerfuns :: [I_stmt] -> LabelCounter -> (String, LabelCounter)
gen_innerfuns ss l = _gen_innerfuns (concat (map statementClosure ss)) l
_gen_innerfuns [] l = ("",l)
_gen_innerfuns ((I_BLOCK (fs,_,_,_)):ss) l
= (funCode
++ funsCode,
l') where
(funCode, l1) = gen_funs fs l
(funsCode, l') = _gen_innerfuns ss l1
_gen_innerfuns (_:ss) l = _gen_innerfuns ss l
-- Generate the source code for the given statement list. Function
-- declarations are ignored.
gen_stmts :: [I_stmt] -> LabelCounter -> (String,LabelCounter)
gen_stmts [] l = ("",l)
gen_stmts (s:ss) l
= (stmtCode
++ stmtsCode,
l2) where
(stmtCode, l1) = gen_stmt s l
(stmtsCode, l2) = gen_stmts ss l1
-- Generate the source code for the given expression list.
gen_exprs :: [I_expr] -> String
gen_exprs [] = ""
gen_exprs (e:[]) = gen_expr e
gen_exprs (e:es)
= gen_expr e
++ gen_exprs es
-- Generate the source code for the given function, included all
-- contained function declarations.
gen_fun :: I_fbody -> LabelCounter -> (String,LabelCounter)
gen_fun (I_FUN (nm,fs,nV,nA,ads,ss)) l
= (fun_label nm
++ fun_prologue nV nA ads
++ stmtsCode
++ fun_epilogue nV nA ++ "\n"
++ funsCode
++ innerFunsCode,
l') where
(stmtsCode, l1) = gen_stmts ss l
(funsCode, l2) = gen_funs fs l1
(innerFunsCode, l') = gen_innerfuns ss l2
-- Complete the initialization of the function AR.
fun_prologue :: Int -> Int -> [(Int,[I_expr])] -> String
fun_prologue nV nA ads
= "\tLOAD_R %sp\n"
++ "\tSTORE_R %fp\n"
++ "\tALLOC " ++ show nV ++ "\n"
++ "\tLOAD_I " ++ show (nV+2) ++ "\n"
++ gen_arys nV ads
-- Deallocate the function AR, and return control to the stored caller
-- address. Assumes the return value is at TOS.
fun_epilogue :: Int -> Int -> String
fun_epilogue nV nA
= "\tLOAD_R %fp\n"
++ "\tSTORE_O " ++ show (-(nA+3)) ++ "\n"
++ "\tLOAD_R %fp\n"
++ "\tLOAD_O 0\n"
++ "\tLOAD_R %fp\n"
++ "\tSTORE_O " ++ show (-(nA+2)) ++ "\n"
++ "\tLOAD_R %fp\n"
++ "\tLOAD_O " ++ show (nV+1) ++ "\n"
++ "\tAPP NEG\n"
++ "\tALLOC_S\n"
++ "\tSTORE_R %fp\n"
++ "\tALLOC -" ++ show nA ++ "\n"
++ "\tJUMP_S\n"
-- Generate the IR for the given statement, ignoring contained function
-- definitions.
gen_stmt :: I_stmt -> LabelCounter -> (String,LabelCounter)
gen_stmt (I_ASS (lv,o,[],e)) l
= (gen_expr e
++ gen_access_ptr lv
++ "\tSTORE_O " ++ show o ++ "\n",
l)
gen_stmt (I_ASS (lv,o,aes,e)) l
= (gen_expr e
++ gen_access_ptr lv
++ "\tLOAD_O " ++ show o ++ "\n"
++ gen_aryoffset lv o aes
++ "\tSTORE_OS\n",
l)
gen_stmt (I_WHILE (e,s)) l
= ("\tJUMP label" ++ show (fst lCNum) ++ "\n"
++ startLabel
++ stmtCode
++ condLabel
++ exprCode
++ "\tJUMP_C label" ++ show (fst lSNum) ++ "\n",
lCNum) where
(startLabel, lSNum) = make_label l
(stmtCode, l1) = gen_stmt s lSNum
(condLabel, lCNum) = make_label l1
exprCode = gen_expr e
gen_stmt (I_COND (e,s1,s2)) l
= (exprCode
++ "\tJUMP_C label" ++ show (fst lINum) ++ "\n"
++ stmt2Code
++ "\tJUMP label" ++ show (fst lONum) ++ "\n"
++ ifLabel
++ stmt1Code
++ outLabel,
lONum) where
exprCode = gen_expr e
(stmt2Code, l1) = gen_stmt s1 l
(ifLabel, lINum) = make_label l1
(stmt1Code, l2) = gen_stmt s2 lINum
(outLabel, lONum) = make_label l2
gen_stmt (I_CASE (e,cs)) l
= (gen_expr e
++ "\tLOAD_H\n"
++ "\tJUMP_O\n"
++ cases_jumps cs l
++ casesCode
++ "return_case" ++ show (snd l + 1) ++ ":",
l') where
(casesCode, l') = gen_cases (reverse cs) l
gen_stmt (I_READ_B (lv,o,[])) l
= ("\tREAD_B\n"
++ gen_access_ptr lv
++ "\tSTORE_O " ++ show o ++ "\n",
l)
gen_stmt (I_READ_B (lv,o,es)) l
= ("\tREAD_B\n"
++ gen_access_ptr lv
++ "\tLOAD_O " ++ show o ++ "\n"
++ gen_aryoffset lv o es
++ "\tSTORE_OS\n",
l)
gen_stmt (I_PRINT_B e) l
= (gen_expr e
++ "\tPRINT_B\n",
l)
gen_stmt (I_READ_I (lv,o,[])) l
= ("\tREAD_I\n"
++ gen_access_ptr lv
++ "\tSTORE_O " ++ show o ++ "\n",
l)
gen_stmt (I_READ_I (lv,o,es)) l
= ("\tREAD_I\n"
++ gen_access_ptr lv
++ "\tLOAD_O " ++ show o ++ "\n"
++ gen_aryoffset lv o es
++ "\tSTORE_OS\n",
l)
gen_stmt (I_PRINT_I e) l
= (gen_expr e
++ "\tPRINT_I\n",
l)
gen_stmt (I_READ_F (lv,o,[])) l
= ("\tREAD_F\n"
++ gen_access_ptr lv
++ "\tSTORE_O " ++ show o ++ "\n",
l)
gen_stmt (I_READ_F (lv,o,es)) l
= ("\tREAD_F\n"
++ gen_access_ptr lv
++ "\tLOAD_O " ++ show o ++ "\n"
++ gen_aryoffset lv o es
++ "\tSTORE_OS\n",
l)
gen_stmt (I_PRINT_F e) l
= (gen_expr e
++ "\tPRINT_F\n",
l)
gen_stmt (I_READ_C (lv,o,[])) l
= ("\tREAD_C\n"
++ gen_access_ptr lv
++ "\tSTORE_O " ++ show o ++ "\n",
l)
gen_stmt (I_READ_C (lv,o,es)) l
= ("\tREAD_C\n"
++ gen_access_ptr lv
++ "\tLOAD_O " ++ show o ++ "\n"
++ gen_aryoffset lv o es
++ "\tSTORE_OS\n",
l)
gen_stmt (I_PRINT_C e) l
= (gen_expr e
++ "\tPRINT_C\n",
l)
gen_stmt (I_RETURN e) l
= (gen_expr e,
l)
gen_stmt (I_BLOCK (fs,nV,ads,ss)) l
= (block_prologue nV ads
++ stmtCode
++ block_epilogue nV,
l') where
(stmtCode, l') = gen_stmts ss l
-- Compute the offset from the array pointer's base (i.e. the first
-- dimension) for the given set of indexing expressions. Place this
-- value on the TOS.
gen_aryoffset :: Int -> Int -> [I_expr] -> String
gen_aryoffset l o es
= "\tLOAD_I " ++ show (length es) ++ "\n"
++ aof l o es 0 (length es)
++ concat (replicate (length es) "\tAPP ADD\n") where
aof l o [] d _ = ""
aof l o (e:es) d (nD)
= gen_expr e
++ gen_dims l o (d+1) nD
++ concat (replicate (nD - (d+1)) "\tAPP MUL\n")
++ aof l o es (d+1) nD
-- Load all of the dimensions from d up to (nD-1) from the array's
-- stored dimension table.
gen_dims :: Int -> Int -> Int -> Int -> String
gen_dims l o d nD
= concat (map (\x -> gen_dim l o x) [d..(nD-1)])
-- Load the single dimension d from the stored array dimension table.
gen_dim :: Int -> Int -> Int -> String
gen_dim l o d
= gen_access_ptr l
++ "\tLOAD_O " ++ show o ++ "\n"
++ "\tLOAD_O " ++ show d ++ "\n"
-- Generate a case trampoline, i.e. a list of JUMPs to a particular case
-- index.
cases_jumps :: [(Int,Int,I_stmt)] -> LabelCounter -> String
cases_jumps cs l = cj cs 0 l where
cj [] cn l = ""
cj (c:cs) cn l
= "\tJUMP case" ++ show (snd l + 1) ++ "_" ++ show cn ++ "\n"
++ cj cs (cn+1) l
-- Generate the source code for the given list of cases, ignoring
-- function declarations.
gen_cases :: [(Int,Int,I_stmt)] -> LabelCounter -> (String,LabelCounter)
gen_cases cs (ln,cn) = gc cs (ln,cn+1) (cn+1) where
gc [] l _ = ("",l)
gc ((conn,nA,s):cs) l cn
= ("case" ++ show cn ++ "_" ++ show conn ++ ":"
++ caseCode
++ casesCode,
l')
where
(caseCode, l1) = gen_case (conn,nA,s) l cn
(casesCode, l') = gc cs l1 cn
-- Generate the source code for the given case, ignoring function
-- declarations. Sets up and takes down a case AR.
gen_case :: (Int,Int,I_stmt) -> LabelCounter -> Int -> (String,LabelCounter)
gen_case (conn,nA,s) l cn
= (case_prologue nA
++ stmtCode
++ case_epilogue nA
++ "\tJUMP return_case" ++ show cn ++ "\n",
l') where
(stmtCode, l') = gen_stmt s l
-- Set up a block AR.
block_prologue :: Int -> [(Int, [I_expr])] -> String
block_prologue nV ads
= "\tLOAD_R %fp\n"
++ "\tALLOC 2\n"
++ "\tLOAD_R %sp\n"
++ "\tSTORE_R %fp\n"
++ "\tALLOC " ++ show nV ++ "\n"
++ "\tLOAD_I " ++ show (nV+3) ++ "\n"
++ gen_arys nV ads
-- Tear down a block AR.
block_epilogue :: Int -> String
block_epilogue nV
= "\tLOAD_R %fp\n"
++ "\tLOAD_O " ++ show (nV+1) ++ "\n"
++ "\tAPP NEG\n"
++ "\tALLOC_S\n"
++ "\tSTORE_R %fp\n"
-- Set up a case AR.
case_prologue :: Int -> String
case_prologue nA
= "\tLOAD_R %fp\n"
++ "\tLOAD_R %fp\n"
++ "\tALLOC 2\n"
++ "\tLOAD_R %sp\n"
++ "\tSTORE_R %fp\n"
-- Tear down a case AR.
case_epilogue :: Int -> String
case_epilogue nA
= "\tALLOC -3\n"
++ "\tSTORE_R %fp\n"
++ "\tALLOC " ++ show (-nA) ++ "\n"
-- Generate the source code for the given expression.
gen_expr :: I_expr -> String
gen_expr (I_IVAL v)
= "\tLOAD_I " ++ show v ++ "\n"
gen_expr (I_RVAL v)
= "\tLOAD_F " ++ show v ++ "\n"
gen_expr (I_BVAL True)
= "\tLOAD_B false\n"
gen_expr (I_BVAL False)
= "\tLOAD_B true\n"
gen_expr (I_CVAL v)
= "\tLOAD_C \"" ++ (mkChar v) ++ "\"\n" where
mkChar '\n' = "\\n"
mkChar '\t' = "\\t"
mkChar '\r' = "\\r"
mkChar c = [c]
gen_expr (I_ID (l,o,[]))
= gen_access_ptr l
++ "\tLOAD_O " ++ show o ++ "\n"
gen_expr (I_ID (l,o,es))
= gen_access_ptr l
++ "\tLOAD_O " ++ show o ++ "\n"
++ gen_aryoffset l o es
++ "\tLOAD_OS\n"
gen_expr (I_APP ((I_CONS c), es))
= gen_exprs (reverse es)
++ gen_op (I_CONS c)
gen_expr (I_APP ((I_CALL f), es))
= gen_exprs (reverse es)
++ gen_op (I_CALL f)
gen_expr (I_APP (op, es))
= gen_exprs es
++ gen_op op
gen_expr (I_REF (l,o))
= gen_access_ptr l
++ "\tLOAD_O " ++ show o ++ "\n"
gen_expr (I_SIZE (l,o,d))
= gen_arysize l o d
-- Generate the source code for the given operand.
gen_op :: I_opn -> String
gen_op (I_CALL (nm,l))
= "\tALLOC 1\n"
++ gen_access_ptr l
++ "\tLOAD_R %fp\n"
++ "\tLOAD_R %cp\n"
++ "\tJUMP fun_" ++ nm ++ "\n"
gen_op (I_CONS (cn,nA))
= "\tLOAD_I " ++ show (cn+1) ++ "\n"
++ "\tSTORE_H " ++ show (nA + 1) ++ "\n"
gen_op I_ADD_I = "\tAPP ADD\n"
gen_op I_MUL_I = "\tAPP MUL\n"
gen_op I_SUB_I = "\tAPP SUB\n"
gen_op I_DIV_I = "\tAPP DIV\n"
gen_op I_NEG_I = "\tAPP NEG\n"
gen_op I_ADD_F = "\tAPP ADD_F\n"
gen_op I_MUL_F = "\tAPP MUL_F\n"
gen_op I_SUB_F = "\tAPP SUB_F\n"
gen_op I_DIV_F = "\tAPP DIV_F\n"
gen_op I_NEG_F = "\tAPP NEG_F\n"
gen_op I_LT_I = "\tAPP LT\n"
gen_op I_LE_I = "\tAPP LE\n"
gen_op I_GT_I = "\tAPP GT\n"
gen_op I_GE_I = "\tAPP GE\n"
gen_op I_EQ_I = "\tAPP EQ\n"
gen_op I_LT_F = "\tAPP LT_F\n"
gen_op I_LE_F = "\tAPP LE_F\n"
gen_op I_GT_F = "\tAPP GT_F\n"
gen_op I_GE_F = "\tAPP GE_F\n"
gen_op I_EQ_F = "\tAPP EQ_F\n"
gen_op I_LT_C = "\tAPP LT_C\n"
gen_op I_LE_C = "\tAPP LE_C\n"
gen_op I_GT_C = "\tAPP GT_C\n"
gen_op I_GE_C = "\tAPP GE_C\n"
gen_op I_EQ_C = "\tAPP EQ_C\n"
gen_op I_NOT = "\tAPP NOT\n"
gen_op I_AND = "\tAPP AND\n"
gen_op I_OR = "\tAPP OR\n"
gen_op I_FLOAT = "\tAPP FLOAT\n"
gen_op I_FLOOR = "\tAPP FLOOR\n"
gen_op I_CEIL = "\tAPP CEIL\n"
| JamesSullivan1/Mpp | src/MppCodeGen.hs | mit | 15,190 | 0 | 28 | 3,835 | 4,973 | 2,608 | 2,365 | 414 | 4 |
first :: (a, b, c) -> a
first (x, _, _) = x
second :: (a, b, c) -> b
second (_, y, _) = y
third :: (a, b, c) -> c
third (_, _, z) = z
| v0lkan/learning-haskell | session-003/004-sort-tuple.hs | mit | 136 | 0 | 6 | 41 | 106 | 63 | 43 | 6 | 1 |
-- | Functionality for graphing 2-dimensional matrices.
module System.Console.Ansigraph.Internal.Matrix (
matShow
, displayMat
, displayCMat
) where
import System.Console.Ansigraph.Internal.Core
import Data.Complex
import Data.List (intersperse)
import Control.Monad.IO.Class (MonadIO, liftIO)
-- for GHC <= 7.8
import Control.Applicative
---- Matrices ----
mmap :: (a -> b) -> [[a]] -> [[b]]
mmap = map . map
mmax :: (Num a, Ord a) => [[a]] -> a
mmax = maximum . map maximum . mmap abs
densityChars = "█▓▒░"
densityVals :: [Double]
densityVals = (+ 0.125) . (/4) <$> [3,2,1,0]
-- = [7/8, 5/8, 3/8, 1/8]
blocks :: [(Double,Char)]
blocks = zip densityVals densityChars
data MatElement = MatElement !Bool {-# UNPACK #-} !Char
elemChar :: MatElement -> Char
elemChar (MatElement _ c) = c
putRealElement :: MonadIO m => GraphSettings -> MatElement -> m ()
putRealElement s (MatElement b c) = colorStr clring (c : " ")
where clr = if b then realNegColor s else realColor s
clring = mkColoring clr (realBG s)
putImagElement :: MonadIO m => GraphSettings -> MatElement -> m ()
putImagElement s (MatElement b c) = colorStr clring $ c : " "
where clr = if b then imagNegColor s else imagColor s
clring = mkColoring clr (imagBG s)
selectMatElement :: Double -> MatElement
selectMatElement x = let l = filter (\p -> fst p < abs x) blocks in case l of
[] -> MatElement False ' '
(p:_) -> MatElement (x < 0) (snd p)
matElements :: [[Double]] -> [[MatElement]]
matElements m = let mx = mmax m
in mmap (selectMatElement . (/ mx)) m
-- | Given a matrix of Doubles, return the list of strings illustrating the absolute value
-- of each entry relative to the largest, via unicode chars that denote a particular density.
-- Used for testing purposes.
matShow :: [[Double]] -> [String]
matShow = mmap elemChar . matElements
newline = putStrLn' ""
intersperse' x l = intersperse x l ++ [x]
-- | Use ANSI coloring (specified by an 'GraphSettings') to visually display a Real matrix.
displayMat :: MonadIO m => GraphSettings -> [[Double]] -> m ()
displayMat s = liftIO . sequence_ . concat . intersperse' [newline] . displayRealMat s
-- | Use ANSI coloring (specified by a 'GraphSettings') to visually display a Real matrix.
displayRealMat :: MonadIO m => GraphSettings -> [[Double]] -> [[m ()]]
displayRealMat s = mmap (putRealElement s) . matElements
-- | Use ANSI coloring (specified by a 'GraphSettings') to visually display a Real matrix.
displayImagMat :: MonadIO m => GraphSettings -> [[Double]] -> [[m ()]]
displayImagMat s = mmap (putImagElement s) . matElements
-- | Use ANSI coloring (specified by an 'GraphSettings') to visually display a Complex matrix.
displayCMat :: MonadIO m => GraphSettings -> [[Complex Double]] -> m ()
displayCMat s m = liftIO . sequence_ . concat . intersperse' [newline] $
zipWith (\xs ys -> xs ++ putStr " " : ys)
(displayRealMat s $ mmap realPart m)
(displayImagMat s $ mmap imagPart m)
| BlackBrane/ansigraph | src/System/Console/Ansigraph/Internal/Matrix.hs | mit | 3,034 | 0 | 13 | 611 | 978 | 523 | 455 | 53 | 2 |
module Tach.Node.ImpulseSpec (main, spec) where
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "someFunction" $ do
it "should work fine" $ do
True `shouldBe` False
| smurphy8/tach | apps/tach-node-impulse/test/Tach/Node/ImpulseSpec.hs | mit | 214 | 0 | 13 | 49 | 77 | 41 | 36 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Database.Design.Ampersand.Output.PandocAux
( writepandoc
, XRefObj(..) , xRefTo, xRefToLatexRefString
, headerWithLabel
, definitionListItemLabel
, pandocEqnArray
, pandocEqnArrayWithLabels
, pandocEqnArrayWithLabel
, pandocEquation
, pandocEquationWithLabel
, count
, ShowMath(..)
, latexEscShw, escapeNonAlphaNum
, xrefCitation
, texOnly_Id
, texOnly_fun
, texOnly_rel
, newGlossaryEntry
)
where
import Database.Design.Ampersand.Prototype.StaticFiles_Generated
import Database.Design.Ampersand.ADL1
import Database.Design.Ampersand.FSpec
import Data.Char hiding (Space)
import Text.Pandoc
import Text.Pandoc.Builder
import Database.Design.Ampersand.Core.AbstractSyntaxTree
import Database.Design.Ampersand.Basics hiding (hPutStrLn)
import Prelude hiding (writeFile,readFile,getContents,putStr,putStrLn)
import Database.Design.Ampersand.Misc
import System.Process (system)
import System.Exit (ExitCode(ExitSuccess,ExitFailure))
import System.FilePath -- (combine,addExtension,replaceExtension)
import System.Directory
import System.Info (os)
import Data.List
import Control.Monad
import Data.Maybe
-- | Default key-value pairs for use with the Pandoc template
defaultWriterVariables :: FSpec -> [(String , String)]
defaultWriterVariables fSpec
= [ ("title", (case (fsLang fSpec, diagnosisOnly (getOpts fSpec)) of
(Dutch , False) -> if test (getOpts fSpec)
then "Afspraken van "
else "Functionele Specificatie van "
(English, False) -> "Functional Specification of "
(Dutch , True) -> "Diagnose van "
(English, True) -> "Diagnosis of "
)++name fSpec)
-- , ("mainfont",
-- , ("sansfont",
-- , ("monofont",
-- , ("mathfont",
, ("fontsize", "10pt") --can be overridden by geometry package (see below)
, ("lang" , case fsLang fSpec of
Dutch -> "dutch"
English -> "english")
, ("papersize", "a4")
, ("babel-lang", case fsLang fSpec of
Dutch -> "dutch"
English -> "english")
, ("documentclass","report")
] ++
[ ("toc" , "<<TheTableOfContentsShouldGoHere>>") | not (diagnosisOnly (getOpts fSpec))]++
[ ("header-includes", unlines
[ "% ============Ampersand specific Begin================="
, "% First a couple of LaTeX packages are included:"
, ""
, "% The glossaries package supports acronyms and multiple glossaries"
, "\\usepackage[toc]{glossaries} % Add the glossaries to the table of contents"
, "\\makeglossaries"
, ""
, "% geometry provides a flexible and easy interface to page dimentions"
, "\\usepackage[ top=1.5cm, bottom=1.5cm, outer=5cm, inner=2cm"
, " , heightrounded, footskip=.5cm"
, " , marginparwidth=2.5cm, marginparsep=0.5cm]{geometry}"
, ""
, "% breqn – Automatic line breaking of displayed equations"
, "\\usepackage{breqn}"
, ""
, "% colonequals – Colon equals symbols"
, "\\usepackage{colonequals}"
, ""
, ""
, "\\usepackage{textcomp}"
, "% == [all]{hypcap} after {hyperref} shows the ref'd picture i.o. the caption @ click =="
, ""
, "\\usepackage[all]{hypcap}"
, ""
, "% hack1) For the purpose of clear references in Latex. See also https://github.com/AmpersandTarski/ampersand/issues/31"
, "\\makeatletter"
, "\\let\\orgdescriptionlabel\\descriptionlabel"
, "\\renewcommand*{\\descriptionlabel}[1]{%"
, " \\let\\orglabel\\label"
, " \\let\\label\\@gobble"
, " \\phantomsection"
, " \\edef\\@currentlabel{#1}%"
, " %\\edef\\@currentlabelname{#1}%"
, " \\let\\label\\orglabel"
, " \\orgdescriptionlabel{#1}%"
, "}"
, "\\makeatother"
, "% End-hack1"
, ""
, "% hack2) The LaTeX commands \\[ and \\], are redefined in the amsmath package, making sure that equations are"
, "% not numbered. This is undesireable behaviour. this is fixed with the following hack, inspired on a note"
, "% found at http://tex.stackexchange.com/questions/40492/what-are-the-differences-between-align-equation-and-displaymath"
, "\\DeclareRobustCommand{\\[}{\\begin{equation}}"
, "\\DeclareRobustCommand{\\]}{\\end{equation}}"
, "% End-hack2"
, ""
, ""
, "\\def\\id#1{\\mbox{\\em #1\\/}}"
, "\\newcommand{\\marge}[1]{\\marginpar{\\begin{minipage}[t]{3cm}{\\noindent\\small\\em #1}\\end{minipage}}}"
, "\\def\\define#1{\\label{dfn:#1}\\index{#1}{\\em #1}}"
, "\\def\\defmar#1{\\label{dfn:#1}\\index{#1}\\marge{#1}{\\em #1}}"
, "\\newcommand{\\iden}{\\mathbb{I}}"
, "\\newcommand{\\ident}[1]{\\mathbb{I}_{#1}}"
, "\\newcommand{\\full}{\\mathbb{V}}"
, "\\newcommand{\\fullt}[1]{\\mathbb{V}_{[#1]}}"
, "\\newcommand{\\flip}[1]{{#1}^\\smallsmile} %formerly: {#1}^\\backsim"
, "%\\newcommand{\\kleeneplus}[1]{{#1}^{+}}"
, "%\\newcommand{\\kleenestar}[1]{{#1}^{*}}"
, "\\newcommand{\\asterisk}{*}"
, "\\newcommand{\\cmpl}[1]{\\overline{#1}}"
, "\\newcommand{\\subs}{\\vdash}"
, "\\newcommand{\\rel}{\\times}"
, "\\newcommand{\\fun}{\\rightarrow}"
, "\\newcommand{\\isa}{\\sqsubseteq}"
, "\\newcommand{\\N}{\\mbox{\\msb N}}"
, "\\newcommand{\\disjn}[1]{\\id{disjoint}(#1)}"
, "\\newcommand{\\fsignat}[3]{\\id{#1}:\\id{#2}\\fun\\id{#3}}"
, "\\newcommand{\\signat}[3]{\\id{#1}:\\id{#2}\\rel\\id{#3}}"
, "\\newcommand{\\signt}[2]{\\mbox{\\({#1}_{[{#2}]}\\)}}"
, "\\newcommand{\\declare}[3]{\\id{#1}:\\ \\id{#2}\\rel\\id{#3}}"
, "%\\newcommand{\\fdeclare}[3]{\\id{#1}:\\ \\id{#2}\\fun\\id{#3}}"
, "% ============Ampersand specific End==================="
])
| fspecFormat (getOpts fSpec) == FLatex ]
--DESCR -> functions to write the pandoc
-- String = the name of the outputfile
-- The first IO() is a Pandoc output format
-- The second IO(): If the output format is latex, then this IO() generates a .pdf from the .ltx
writepandoc :: FSpec -> Pandoc -> (String,IO(),IO())
writepandoc fSpec thePandoc = (outputFile,makeOutput,postProcessMonad)
where
outputFile = addExtension (combine (dirOutput (getOpts fSpec)) (baseName (getOpts fSpec)))
(case fspecFormat (getOpts fSpec) of
Fasciidoc -> ".txt"
Fcontext -> ".context"
Fdocbook -> ".docbook"
Fman -> ".man"
Fmarkdown -> ".md"
Fmediawiki -> ".mediawiki"
Forg -> ".org"
Fplain -> ".plain"
Frst -> ".rst"
FPandoc -> ".pandoc"
Frtf -> ".rtf"
FLatex -> ".ltx"
Fhtml -> ".html"
Fopendocument -> ".odt"
Ftexinfo -> ".texinfo"
Ftextile -> ".textile"
)
makeOutput
= do verboseLn (getOpts fSpec) ("Generating "++fSpecFormatString++" to : "++outputFile)
writeFile outputFile (pandocWriter (writerOptions (readDefaultTemplate fSpecFormatString)) $ thePandoc)
verboseLn (getOpts fSpec) "... done."
where
pandocWriter :: WriterOptions -> Pandoc -> String
pandocWriter =
case fspecFormat (getOpts fSpec) of
Fasciidoc -> fatal 145 "No current support for asciidoc"
FPandoc -> writeNative
Fcontext -> writeConTeXt
Fdocbook -> writeDocbook
Fhtml -> writeHtmlString
FLatex -> writeLaTeX
Fman -> writeMan
Fmarkdown -> writeMarkdown
Fmediawiki -> writeMediaWiki
Fopendocument -> writeOpenDocument
Forg -> writeOrg
Fplain -> writePlain
Frst -> writeRST
Frtf -> writeRTF
Ftexinfo -> writeTexinfo
Ftextile -> writeTextile
fSpecFormatString :: String
fSpecFormatString =
case fspecFormat (getOpts fSpec) of
FPandoc -> "pandoc"
Fasciidoc -> "asciidoc"
Fcontext -> "context"
Fdocbook -> "docbook"
Fhtml -> "html"
FLatex -> "latex"
Fman -> "man"
Fmarkdown -> "markdown"
Fmediawiki -> "mediawiki"
Fopendocument -> "opendocument"
Forg -> "org"
Fplain -> "plain"
Frst -> "rst"
Frtf -> "rtf"
Ftexinfo -> "texinfo"
Ftextile -> "textile"
readDefaultTemplate :: String -> Maybe String
readDefaultTemplate s = getStaticFileContent PandocTemplates ("default."++s)
writerOptions :: Maybe String -> WriterOptions
writerOptions template = def
{ writerStandalone=isJust template
, writerTableOfContents=True
, writerNumberSections=True
, writerTemplate=fromMaybe "" template
, writerVariables=defaultWriterVariables fSpec
}
postProcessMonad :: IO()
postProcessMonad =
case fspecFormat (getOpts fSpec) of
FLatex -> do
(ready,nrOfRounds) <- doRestOfPdfLatex (False, 0) -- initialize with: (<NotReady>, <0 rounds so far>)
verboseLn (getOpts fSpec) ("PdfLatex was called "++
(if nrOfRounds>1 then show nrOfRounds++" times" else "once")++
if ready then "."
else ", but did not solve all references!")
where
doRestOfPdfLatex :: (Bool,Int) -> IO (Bool,Int)
doRestOfPdfLatex (ready, roundsSoFar)
= if ready || roundsSoFar > 4 -- Make sure we will not hit a loop when something is wrong with call to pdfLatex ...
then return (ready, roundsSoFar)
else do result <- callPdfLatexOnce
let logFileName = replaceExtension outputFile ("log"++show roundsSoFar)
renameFile (replaceExtension outputFile "log") logFileName
logFileLines <- fmap lines $ readFile logFileName
{- Old comment: The log file should be renamed before reading, because readFile opens the file
for lazy IO. In a next run, pdfLatex will try to write to the log file again. If it
was read using readFile, it will fail because the file is still open. 8-((
-}
case result of
ExitSuccess -> verboseLn (getOpts fSpec) "PDF file created."
ExitFailure _ ->
do { let nrOfErrLines = 15
; putStrLn "----------- LaTeX error-----------"
-- get rid of latex memory info and take required nr of lines
; let reverseErrLines = take nrOfErrLines . drop 2
. dropWhile (not . ("Here is how much of TeX's memory you used:" `isPrefixOf`))
. reverse $ logFileLines
; putStrLn $ unlines . reverse $ reverseErrLines
; putStrLn "----------------------------------\n"
; putStrLn $ "ERROR: Latex execution failed."
; putStrLn $ "For more information, run pdflatex on: "++texFilename
; putStrLn $ "Or consult the log file:\n"++logFileName
}
-- We need to rerun latex if any of the log lines start with a rerunPrefix
let notReady = result == ExitSuccess &&
or [ rerunPrefix `isPrefixOf` line
| line <- logFileLines
, rerunPrefix <- [ "LaTeX Warning: Label(s) may have changed. Rerun to get cross-references right."
, "Package longtable Warning: Table widths have changed. Rerun LaTeX."
]
]
when notReady (verboseLn (getOpts fSpec) "Another round of pdfLatex is required. Hang on...")
-- when notReady (dump "log") -- Need to dump the last log file, otherwise pdfLatex cannot write its log.
doRestOfPdfLatex (not notReady, roundsSoFar +1)
texFilename = addExtension (baseName (getOpts fSpec)) ".ltx"
callPdfLatexOnce :: IO ExitCode
callPdfLatexOnce =
do if os `elem` ["mingw32","mingw64","cygwin","windows"] --REMARK: not a clear enum to check for windows OS
then do { res <- system ( pdfLatexCommand++"> "++combine (dirOutput (getOpts fSpec)) "pdflog" )
; if res /= ExitSuccess then return res else
system makeIndexCommand -- TODO: failure of makeindex is not reported correctly (requires refactoring command execution)
}
--REMARK: MikTex is windows; Tex-live does not have the flag -include-directory.
else system ( "cd "++dirOutput (getOpts fSpec)++
" && pdflatex "++commonFlags++
texFilename ++ "> "++addExtension(baseName (getOpts fSpec)) ".pdflog" )
-- >> system makeIndexCommand
-- disabled makeIndexCommand on non-windows, since it will always fail when absolute paths are used
-- For some weird Latex reason this can only be avoided by setting an environment variable.
where
pdfLatexCommand = "pdflatex "++commonFlags++pdfgetOpts++ outputFile
--makeIndexCommand = "makeglossaries "++replaceExtension outputFile "glo"
--makeindex uses the error stream for verbose stuff...
makeIndexCommand = "makeindex -s "++replaceExtension outputFile "ist"++" -t "++replaceExtension outputFile "glg"++" -o "++replaceExtension outputFile "gls"++" "++replaceExtension outputFile "glo 2> "++combine (dirOutput (getOpts fSpec)) "glossaries.log"
pdfgetOpts = " -include-directory="++dirOutput (getOpts fSpec)++ " -output-directory="++dirOutput (getOpts fSpec)++" "
commonFlags = "--halt-on-error --interaction=nonstopmode " -- MacTex options are normally with one '-', but '--interaction' is accepted
-- we don't do --disable-installer on Windows, so the install dialog will pop up, even when we are in nonstopmode
_ -> return()
-----Linguistic goodies--------------------------------------
count :: Lang -> Int -> String -> String
count lang n x
= case (lang, n) of
(Dutch , 0) -> "geen "++plural Dutch x
(Dutch , 1) -> "één "++x
(Dutch , 2) -> "twee "++plural Dutch x
(Dutch , 3) -> "drie "++plural Dutch x
(Dutch , 4) -> "vier "++plural Dutch x
(Dutch , 5) -> "vijf "++plural Dutch x
(Dutch , 6) -> "zes "++plural Dutch x
(Dutch , _) -> show n++" "++plural Dutch x
(English, 0) -> "no "++plural English x
(English, 1) -> "one "++x
(English, 2) -> "two "++plural English x
(English, 3) -> "three "++plural English x
(English, 4) -> "four "++plural English x
(English, 5) -> "five "++plural English x
(English, 6) -> "six "++plural English x
(English, _) -> show n++" "++plural English x
------ Symbolic referencing ---------------------------------
data XRefObj = XRefNaturalLanguageDeclaration Declaration
| XRefPredicateXpression Rule
| XRefDataAnalRule Rule
| XRefNaturalLanguageRule Rule
| XRefProcessAnalysis Pattern
| XRefProcessAnalysisDeclaration Declaration
| XRefConceptualAnalysisPattern Pattern
| XRefConceptualAnalysisDeclaration Declaration
| XRefConceptualAnalysisRule Rule
| XRefInterfacesInterface Interface
| XRefNaturalLanguageTheme (Maybe Pattern)
xRefTo :: XRefObj -> Inlines
xRefTo x = rawInline "latex" $ xRefToLatexRefString x
xRefToLatexRefString :: XRefObj -> String
xRefToLatexRefString x = "\\ref{"++xRefRawLabel x++"}"
xRefRawLabel :: XRefObj -> String
xRefRawLabel x
= case x of
XRefNaturalLanguageDeclaration d -> "natLangDcl:"++(escapeNonAlphaNum.fullName) d
XRefPredicateXpression r -> "pex:"++(escapeNonAlphaNum.name) r
XRefDataAnalRule r -> "dataAnalRule:"++(escapeNonAlphaNum.name) r
XRefNaturalLanguageRule r -> "natLangRule:"++(escapeNonAlphaNum.name) r
XRefProcessAnalysis p -> "prcAnal:"++(escapeNonAlphaNum.name) p
XRefProcessAnalysisDeclaration d
-> "prcAnalDcl:"++(escapeNonAlphaNum.fullName) d
XRefConceptualAnalysisPattern p
-> "cptAnalPat:"++(escapeNonAlphaNum.name) p
XRefConceptualAnalysisDeclaration d
-> "cptAnalDcl:"++(escapeNonAlphaNum.fullName) d
XRefConceptualAnalysisRule r -> "cptAnalRule:"++(escapeNonAlphaNum.name) r
XRefInterfacesInterface i -> "interface:"++(escapeNonAlphaNum.name) i
XRefNaturalLanguageTheme (Just t)
-> "theme:"++(escapeNonAlphaNum.name) t
XRefNaturalLanguageTheme Nothing
-> ":losseEindjes"
where
fullName d = name d++"*"++(name.source) d++"*"++(name.target) d
headerWithLabel :: XRefObj -> Int -> Inlines -> Blocks
headerWithLabel x = headerWith (xRefRawLabel x, [],[])
definitionListItemLabel :: XRefObj -> String -> Inlines
definitionListItemLabel x prefix
= str prefix <> rawInline "latex" ("\\label{"++xRefRawLabel x++"}")
xrefCitation :: String -> Inline -- uitbreidbaar voor andere rendering dan LaTeX
xrefCitation myLabel = RawInline (Text.Pandoc.Builder.Format "latex") ("\\cite{"++escapeNonAlphaNum myLabel++"}")
pandocEqnArray :: [[String]] -> [Block]
pandocEqnArray [] = []
pandocEqnArray xs
= (toList . para . displayMath)
( "\\begin{aligned}\n"
++ intercalate "\\\\\n " [ intercalate "&" row | row <-xs ]
++"\n\\end{aligned}"
)
pandocEqnArrayWithLabels :: [(XRefObj,[String])] -> Blocks
pandocEqnArrayWithLabels [] = mempty
pandocEqnArrayWithLabels rows
= (para .displayMath)
( "\\begin{aligned}\n"
++ intercalate "\\\\\n " [ intercalate "&" row ++ "\\label{"++xRefRawLabel x++"}" | (x,row)<-rows ]
++"\n\\end{aligned}"
)
pandocEqnArrayWithLabel :: XRefObj -> [[String]] -> Blocks
pandocEqnArrayWithLabel _ [] = mempty
pandocEqnArrayWithLabel xref rows
= (para . displayMath)
( "\\label{"++xRefRawLabel xref++"}\\begin{aligned}\\\\\n"
++ intercalate "\\\\\n " [ intercalate "&" row | row <- rows ]
++"\n\\end{aligned}"
)
pandocEquation :: String -> [Block]
pandocEquation x = toList . para . displayMath $ x
pandocEquationWithLabel :: XRefObj -> String -> Blocks
pandocEquationWithLabel xref x =
para . displayMath $
( "\\begin{aligned}\\label{"++xRefRawLabel xref++"}\\\\\n"
++x
++"\n\\end{aligned}"
)
--DESCR -> pandoc print functions for Ampersand data structures
---------------------------------------
-- LaTeX math markup
---------------------------------------
class ShowMath a where
showMath :: a -> String
instance ShowMath A_Concept where
showMath c = texOnly_Id (name c)
instance ShowMath A_Gen where
showMath g@Isa{} = showMath (genspc g)++"\\ \\le\\ "++showMath (gengen g)
showMath g@IsE{} = showMath (genspc g)++"\\ =\\ "++intercalate "\\cap" (map showMath (genrhs g))
instance ShowMath Rule where
showMath r = showMath (rrexp r)
instance ShowMath Signature where
showMath (Sign s t) = showMath s++"\\rel"++showMath t
instance ShowMath Expression where
showMath = showExpr . insParentheses
where showExpr (EEqu (l,r)) = showExpr l++texOnly_equals++showExpr r
showExpr (EInc (l,r)) = showExpr l++texOnly_subs++showExpr r
showExpr (EIsc (l,r)) = showExpr l++texOnly_inter++showExpr r
showExpr (EUni (l,r)) = showExpr l++texOnly_union++showExpr r
showExpr (EDif (l,r)) = showExpr l++texOnly_bx ++showExpr r
showExpr (ELrs (l,r)) = showExpr l++texOnly_lRes++showExpr r
showExpr (ERrs (l,r)) = showExpr l++texOnly_rRes++showExpr r
showExpr (EDia (l,r)) = showExpr l++texOnly_dia++showExpr r
showExpr (ECps (EEps i sgn,r)) | i==source sgn||i==target sgn = showExpr r
| otherwise = showExpr (ECps (EDcI i,r))
showExpr (ECps (l,EEps i sgn)) | i==source sgn||i==target sgn = showExpr l
| otherwise = showExpr (ECps (l,EDcI i))
showExpr (ECps (l,r)) = showExpr l++texOnly_compose++showExpr r
showExpr (ERad (l,r)) = showExpr l++texOnly_relAdd++showExpr r
showExpr (EPrd (l,r)) = showExpr l++texOnly_crtPrd++showExpr r
showExpr (EKl0 e) = showExpr (addParensToSuper e)++"^{"++texOnly_star++"}"
showExpr (EKl1 e) = showExpr (addParensToSuper e)++"^{"++texOnly_plus++"}"
showExpr (EFlp e) = showExpr (addParensToSuper e)++"^{"++texOnly_flip++"}"
showExpr (ECpl e) = "\\cmpl{"++showExpr e++"}"
showExpr (EBrk e) = "("++showExpr e++")"
showExpr (EDcD d) = "\\text{"++latexEscShw (name d)++"}"
showExpr (EDcI c) = "I_{[\\text{"++latexEscShw (name c)++"}]}"
showExpr EEps{} = "" -- fatal 417 "EEps may occur only in combination with composition (semicolon)." -- SJ 2014-03-11: Are we sure about this? Let's see if it ever occurs...
showExpr (EDcV sgn) = "V_{[\\text{"++latexEscShw (name (source sgn))++"}"++"*"
++"\\text{"++latexEscShw (name (target sgn))++"}]}"
showExpr (EMp1 val _) = "`\\text{"++(latexEscShw . showADL $ val)++"}`"
-- add extra parentheses to consecutive superscripts, since latex cannot handle these
-- (this is not implemented in insParentheses because it is a latex-specific issue)
addParensToSuper :: Expression -> Expression
addParensToSuper e@EKl0{} = EBrk e
addParensToSuper e@EKl1{} = EBrk e
addParensToSuper e@EFlp{} = EBrk e
addParensToSuper e = e
instance ShowMath Declaration where
showMath decl@(Sgn{})
= "\\declare{"++latexEscShw(name decl)++"}{"++latexEscShw(name (source decl))++"}{"++latexEscShw(name (target decl))++"}"
showMath Isn{}
= "\\mathbb{I}"
showMath Vs{}
= "\\full"
-- | latexEscShw escapes to LaTeX encoding. It is intended to be used in LaTeX text mode.
-- For more elaborate info on LaTeX encoding, consult the The Comprehensive LATEX Symbol List
-- on: http://ftp.snt.utwente.nl/pub/software/tex/info/symbols/comprehensive/symbols-a4.pdf
latexEscShw :: String -> String
latexEscShw "" = ""
latexEscShw ('\"':c:cs) | isAlphaNum c = "``"++latexEscShw (c:cs)
| otherwise = "''"++latexEscShw (c:cs)
latexEscShw "\"" = "''"
latexEscShw (c:cs) | isAlphaNum c && isAscii c = c:latexEscShw cs
| otherwise = f c++latexEscShw cs
where
f '"' = "\\textquotedbl "
f '#' = "\\#"
f '$' = "\\$"
f '%' = "\\%"
f '&' = "\\&"
f '\\'= "\\textbackslash "
f '^' = "\\^{}"
f '_' = "\\_"
f '{' = "\\{"
f '|' = "\\textbar "
f '}' = "\\}"
f '~' = "\\~{}"
f '¦' = "\\textbrokenbar "
f '¨' = "\\textasciidieresis "
f '¯' = "\\textasciimacron "
f '´' = "\\textasciiacute "
f '¢' = "\\textcent "
f '£' = "\\textpound "
f '¤' = "\\textcurrency "
f '¥' = "\\textyen "
f '€' = "\\texteuro "
f '<' = "\\textless "
f '>' = "\\textgreater "
f '±' = "\\textpm "
f '«' = "\\guillemotleft "
f '»' = "\\guillemotright "
f '×' = "\\texttimes "
f '÷' = "\\textdiv "
f '§' = "\\S "
f '©' = "\\textcopyright "
f '¬' = "\\textlnot "
f '®' = "\\textregistered "
f '°' = "\\textdegree "
f 'µ' = "\\textmu "
f '¶' = "\\P "
f '·' = "\\textperiodcentered "
f '¼' = "\\textonequarter "
f '½' = "\\textonehalf "
f '¾' = "\\textthreequarters "
f '¹' = "\\textonesuperior "
f '²' = "\\texttwosuperior "
f '³' = "\\textthreesuperior "
f '∞' = "\\hbipropto "
f 'ä' = "\\\"{a}" -- umlaut or dieresis
f 'Ä' = "\\\"{A}" -- umlaut or dieresis
f 'â' = "\\^{a}" -- circumflex
f 'Â' = "\\^{A}" -- circumflex
f 'à' = "\\`{a}" -- grave accent
f 'À' = "\\`{A}" -- grave accent
f 'á' = "\\'{a}" -- acute accent
f 'Á' = "\\'{A}" -- acute accent
f 'ã' = "\\~{a}" -- tilde
f 'Ã' = "\\~{A}" -- tilde
f 'å' = "\\aa "
-- f 'å' = "\\r{a}" -- alternatively: ring over the letter
f 'Å' = "\\AA "
-- f 'Å' = "\\r{A}" -- alternatively: ring over the letter
f 'ą' = "\\k{a}" -- ogonek
f 'Ą' = "\\k{A}" -- ogonek
f 'ª' = "\\textordfeminine "
f 'æ' = "\\ae "
f 'Æ' = "\\AE "
f 'ç' = "\\c{c}" -- cedilla
f 'Ç' = "\\c{C}" -- cedilla
f 'Ð' = "\\DH "
f 'ð' = "\\dh "
f 'ë' = "\\\"{e}" -- umlaut or dieresis
f 'Ë' = "\\\"{E}" -- umlaut or dieresis
f 'ê' = "\\^{e}" -- circumflex
f 'Ê' = "\\^{E}" -- circumflex
f 'è' = "\\`{e}" -- grave accent
f 'È' = "\\`{E}" -- grave accent
f 'é' = "\\'{e}" -- acute accent
f 'É' = "\\'{E}" -- acute accent
f 'ï' = "\\\"{\\i}" -- umlaut or dieresis
f 'Ï' = "\\\"{I}" -- umlaut or dieresis
f 'î' = "\\^{\\i}" -- circumflex
f 'Î' = "\\^{I}" -- circumflex
f 'ì' = "\\`{\\i}" -- grave accent
f 'Ì' = "\\`{I}" -- grave accent
f 'í' = "\\'{\\i}" -- acute accent
f 'Í' = "\\'{I}" -- acute accent
f 'ł' = "\\l " -- l with stroke
f 'Ł' = "\\L " -- l with stroke
f 'n' = "\\~{n}" -- tilde
f 'Ñ' = "\\~{N}" -- tilde
f 'Ȯ' = "\\.{O}" -- dot over the letter
f 'ȯ' = "\\.{o}" -- dot over the letter
f 'ö' = "\\\"{o}" -- umlaut or dieresis
f 'Ö' = "\\\"{O}" -- umlaut or dieresis
f 'ô' = "\\^{o}" -- circumflex
f 'Ô' = "\\^{O}" -- circumflex
f 'ò' = "\\`{o}" -- grave accent
f 'Ò' = "\\`{O}" -- grave accent
f 'ó' = "\\'{o}" -- acute accent
f 'Ó' = "\\'{O}" -- acute accent
f 'õ' = "\\~{o}" -- tilde
f 'Õ' = "\\~{O}" -- tilde
f 'ō' = "\\={o}" -- macron accent a bar over the letter)
f 'Ō' = "\\={O}" -- macron accent a bar over the letter)
f 'ő' = "\\H{o}" -- long Hungarian umlaut double acute)
f 'Ő' = "\\H{O}" -- long Hungarian umlaut double acute)
f 'Ø' = "\\O "
f 'ø' = "\\o "
f 'º' = "\\textordmasculine "
f 'ŏ' = "\\u{o}" -- breve over the letter
f 'Ŏ' = "\\u{O}" -- breve over the letter
f 'œ' = "\\oe "
f 'Œ' = "\\OE "
f 'š' = "\\v{s}" -- caron/hacek "v") over the letter
f 'Š' = "\\v{S}" -- caron/hacek "v") over the letter
f 'ß' = "\\ss "
f 'Þ' = "\\TH "
f 'þ' = "\\th "
f '™' = "\\texttrademark "
f 'ü' = "\\\"{u}" -- umlaut or dieresis
f 'Ü' = "\\\"{U}" -- umlaut or dieresis
f 'û' = "\\^{u}" -- circumflex
f 'Û' = "\\^{U}" -- circumflex
f 'ù' = "\\`{u}" -- grave accent
f 'Ù' = "\\`{U}" -- grave accent
f 'ú' = "\\'{u}" -- acute accent
f 'Ú' = "\\'{U}" -- acute accent
f 'ý' = "\\'{y}" -- acute accent
f 'Ý' = "\\'{Y}" -- acute accent
f _ = [c] -- let us think if this should be: fatal 661 ("Symbol "++show x++" (character "++show (ord c)++") is not supported")
--posixFilePath :: FilePath -> String
-- tex uses posix file notation, however when on a windows machine, we have windows conventions for file paths...
-- To set the graphicspath, we want something like: \graphicspath{{"c:/data/Ampersand/output/"}}
--posixFilePath fp = "/"++System.FilePath.Posix.addTrailingPathSeparator (System.FilePath.Posix.joinPath (tail (splitDirectories fp)))
---------------------------
--- LaTeX related stuff ---
---------------------------
-- safe function to have plain text in a piece of Math
mathText :: String -> String
mathText s = "\\text{"++latexEscShw s++"} "
texOnly_Id :: String -> String
texOnly_Id s = "\\id{"++latexEscShw s++"} "
texOnly_fun :: String
texOnly_fun = "\\rightarrow "
texOnly_rel :: String
texOnly_rel = "\\times "
texOnly_compose :: String
texOnly_compose = ";"
texOnly_relAdd :: String
texOnly_relAdd = "\\dagger "
texOnly_crtPrd :: String
texOnly_crtPrd = "\\asterisk "
texOnly_inter :: String
texOnly_inter = "\\cap "
texOnly_union :: String
texOnly_union = "\\cup "
texOnly_subs :: String
texOnly_subs = "\\vdash "
texOnly_equals :: String
texOnly_equals = "="
texOnly_star :: String
texOnly_star = "^* "
texOnly_plus :: String
texOnly_plus = "^+ "
texOnly_bx :: String
texOnly_bx = " - "
texOnly_lRes :: String
texOnly_lRes = " / "
texOnly_rRes :: String
texOnly_rRes = " \\backslash "
texOnly_dia :: String
texOnly_dia = " \\Diamond "
texOnly_flip :: String
texOnly_flip = "\\smallsmile "
newGlossaryEntry :: String -> String -> Inlines
newGlossaryEntry nm cnt =
rawInline "latex"
("\\newglossaryentry{"++escapeNonAlphaNum nm ++"}\n"++
" { name={"++latexEscShw nm ++"}\n"++
" , description={"++latexEscShw (cnt)++"}}\n")
| 4ZP6Capstone2015/ampersand | src/Database/Design/Ampersand/Output/PandocAux.hs | gpl-3.0 | 33,170 | 1 | 29 | 11,746 | 6,318 | 3,333 | 2,985 | 570 | 124 |
-- In the Lists chapter, you wrote a Caesar cipher. Now, we want to
-- expand on that idea by writing a Vigenère cipher. A Vigenère ci-
-- pher is another substitution cipher, based on a Caesar cipher, but it
-- uses a series of Caesar ciphers for polyalphabetic substitution. The
-- substitution for each letter in the plaintext is determined by a fixed
-- keyword.
--
-- So, for example, if you want to encode the message “meet at
-- dawn,” the first step is to pick a keyword that will determine which
-- Caesar cipher to use. We’ll use the keyword “ALLY” here. You repeat
-- the keyword for as many characters as there are in your original
-- message:
--
-- MEET AT DAWN
-- ALLY AL LYAL
--
-- Now the number of rightward shifts to make to encode each
-- character is set by the character of the keyword that lines up with it.
-- The ’A’ means a shift of 0, so the initial M will remain M. But the ’L’
-- for our second character sets a rightward shift of 11, so ’E’ becomes
-- ’P’. And so on, so “meet at dawn” encoded with the keyword “ALLY”
-- becomes “MPPR AE OYWY.”
-- Identifiers :
-- k - Key
-- p - Plain text
-- c - Single character
{-# LANGUAGE PatternGuards #-}
module VigenereCipher where
import Data.Char
type Key = String
type PlainText = String
type Secret = String
type CryptFunction = Char -> Char -> Char
alphabetSize :: Int
alphabetSize = 26
fromInt :: Int -> Char
fromInt i = chr $ (i `mod` alphabetSize) + 65
toInt :: Char -> Int
toInt c = (ord c - 65) `mod` alphabetSize
add :: CryptFunction
add k c = fromInt $ (toInt c) + (toInt k)
sub :: CryptFunction
sub k c = fromInt $ (toInt c) - (toInt k)
apply :: CryptFunction -> Key -> String -> String
apply _ [] _ = []
apply _ _ [] = []
apply f all@(k:ks) (x:xs) =
let go | isAlpha x = f k x : apply f ks xs
| otherwise = x : apply f all xs
in go
encrypt :: Key -> PlainText -> Secret
encrypt k = apply add (cycle k)
decrypt :: Key -> Secret -> PlainText
decrypt k = apply sub (cycle k)
| nirvinm/Solving-Exercises-in-Haskell-Programming-From-First-Principles | Algebraic_DataTypes/VigenereCipher.hs | gpl-3.0 | 2,037 | 0 | 12 | 438 | 413 | 229 | 184 | 28 | 1 |
{- |
Module : Sound.FModEx.Raw.Codec
Description : Codec FModEx library Haskell raw binding
Copyright : (c) Dimitri Sabadie
License : GPL-3
Maintainer : [email protected]
Stability : experimental
Portability : Linux only for now
Codec FModEx raw Haskell API.
-}
module Sound.FModEx.Raw.Codec (
module X
) where
import Sound.FModEx.Raw.Codec.Types as X
| phaazon/hsFModEx | Sound/FModEx/Raw/Codec.hs | gpl-3.0 | 391 | 0 | 4 | 81 | 25 | 19 | 6 | 3 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : System.Random
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- Random numbers.
--
-----------------------------------------------------------------------------
module System.Random
(
-- $intro
-- * The 'RandomGen' class, and the 'StdGen' generator
RandomGen(next, split, genRange)
, StdGen
, mkStdGen
-- * The 'Random' class
, Random ( random, randomR,
randoms, randomRs,
randomIO, randomRIO )
-- * The global random number generator
-- $globalrng
, getStdRandom
, getStdGen
, setStdGen
, newStdGen
-- * References
-- $references
) where
import Prelude
import System.CPUTime ( getCPUTime )
import System.Time ( getClockTime, ClockTime(..) )
import Data.Char ( isSpace, chr, ord )
import System.IO.Unsafe ( unsafePerformIO )
import Data.IORef
import Numeric ( readDec )
-- The standard nhc98 implementation of Time.ClockTime does not match
-- the extended one expected in this module, so we lash-up a quick
-- replacement here.
{- $intro
This library deals with the common task of pseudo-random
number generation. The library makes it possible to generate
repeatable results, by starting with a specified initial random
number generator; or to get different results on each run by using the
system-initialised generator, or by supplying a seed from some other
source.
The library is split into two layers:
* A core /random number generator/ provides a supply of bits. The class
'RandomGen' provides a common interface to such generators.
* The class 'Random' provides a way to extract particular values from
a random number generator. For example, the 'Float' instance of 'Random'
allows one to generate random values of type 'Float'.
This implementation uses the Portable Combined Generator of L'Ecuyer
["System.Random\#LEcuyer"] for 32-bit computers, transliterated by
Lennart Augustsson. It has a period of roughly 2.30584e18.
-}
-- | The class 'RandomGen' provides a common interface to random number
-- generators.
class RandomGen g where
-- |The 'next' operation returns an 'Int' that is uniformly distributed
-- in the range returned by 'genRange' (including both end points),
-- and a new generator.
next :: g -> (Int, g)
-- |The 'split' operation allows one to obtain two distinct random number
-- generators. This is very useful in functional programs (for example, when
-- passing a random number generator down to recursive calls), but very
-- little work has been done on statistically robust implementations of
-- 'split' (["System.Random\#Burton", "System.Random\#Hellekalek"]
-- are the only examples we know of).
split :: g -> (g, g)
-- |The 'genRange' operation yields the range of values returned by
-- the generator.
--
-- It is required that:
--
-- * If @(a,b) = 'genRange' g@, then @a < b@.
--
-- * 'genRange' is not strict.
--
-- The second condition ensures that 'genRange' cannot examine its
-- argument, and hence the value it returns can be determined only by the
-- instance of 'RandomGen'. That in turn allows an implementation to make
-- a single call to 'genRange' to establish a generator's range, without
-- being concerned that the generator returned by (say) 'next' might have
-- a different range to the generator passed to 'next'.
genRange :: g -> (Int,Int)
-- default method
genRange g = (minBound,maxBound)
{- |The "System.Random" library provides one instance of 'RandomGen', the
abstract data type 'StdGen'.
The 'StdGen' instance of 'RandomGen' has a 'genRange' of at least 30 bits.
The result of repeatedly using 'next' should be at least as statistically
robust as the /Minimal Standard Random Number Generator/ described by
["System.Random\#Park", "System.Random\#Carta"].
Until more is known about implementations of 'split', all we require is
that 'split' deliver generators that are (a) not identical and
(b) independently robust in the sense just given.
The 'Show' and 'Read' instances of 'StdGen' provide a primitive way to save the
state of a random number generator.
It is required that @'read' ('show' g) == g@.
In addition, 'read' may be used to map an arbitrary string (not necessarily one
produced by 'show') onto a value of type 'StdGen'. In general, the 'read'
instance of 'StdGen' has the following properties:
* It guarantees to succeed on any string.
* It guarantees to consume only a finite portion of the string.
* Different argument strings are likely to result in different results.
-}
data StdGen
= StdGen Int Int
instance RandomGen StdGen where
next = stdNext
split = stdSplit
genRange _ = stdRange
instance Show StdGen where
showsPrec p (StdGen s1 s2) =
showsPrec p s1 .
showChar ' ' .
showsPrec p s2
instance Read StdGen where
readsPrec _p = \ r ->
case try_read r of
r@[_] -> r
_ -> [stdFromString r] -- because it shouldn't ever fail.
where
try_read r = do
(s1, r1) <- readDec (dropWhile isSpace r)
(s2, r2) <- readDec (dropWhile isSpace r1)
return (StdGen s1 s2, r2)
{-
If we cannot unravel the StdGen from a string, create
one based on the string given.
-}
stdFromString :: String -> (StdGen, String)
stdFromString s = (mkStdGen num, rest)
where (cs, rest) = splitAt 6 s
num = foldl (\a x -> x + 3 * a) 1 (map ord cs)
{- |
The function 'mkStdGen' provides an alternative way of producing an initial
generator, by mapping an 'Int' into a generator. Again, distinct arguments
should be likely to produce distinct generators.
Programmers may, of course, supply their own instances of 'RandomGen'.
-}
mkStdGen :: Int -> StdGen -- why not Integer ?
mkStdGen s
| s < 0 = mkStdGen (-s)
| otherwise = StdGen (s1+1) (s2+1)
where
(q, s1) = s `divMod` 2147483562
s2 = q `mod` 2147483398
createStdGen :: Integer -> StdGen
createStdGen s
| s < 0 = createStdGen (-s)
| otherwise = StdGen (fromInteger (s1+1)) (fromInteger (s2+1))
where
(q, s1) = s `divMod` 2147483562
s2 = q `mod` 2147483398
-- FIXME: 1/2/3 below should be ** (vs@30082002) XXX
{- |
With a source of random number supply in hand, the 'Random' class allows the
programmer to extract random values of a variety of types.
Minimal complete definition: 'randomR' and 'random'.
-}
class Random a where
-- | Takes a range /(lo,hi)/ and a random number generator
-- /g/, and returns a random value uniformly distributed in the closed
-- interval /[lo,hi]/, together with a new generator. It is unspecified
-- what happens if /lo>hi/. For continuous types there is no requirement
-- that the values /lo/ and /hi/ are ever produced, but they may be,
-- depending on the implementation and the interval.
randomR :: RandomGen g => (a,a) -> g -> (a,g)
-- | The same as 'randomR', but using a default range determined by the type:
--
-- * For bounded types (instances of 'Bounded', such as 'Char'),
-- the range is normally the whole type.
--
-- * For fractional types, the range is normally the semi-closed interval
-- @[0,1)@.
--
-- * For 'Integer', the range is (arbitrarily) the range of 'Int'.
random :: RandomGen g => g -> (a, g)
-- | Plural variant of 'randomR', producing an infinite list of
-- random values instead of returning a new generator.
randomRs :: RandomGen g => (a,a) -> g -> [a]
randomRs ival g = x : randomRs ival g' where (x,g') = randomR ival g
-- | Plural variant of 'random', producing an infinite list of
-- random values instead of returning a new generator.
randoms :: RandomGen g => g -> [a]
randoms g = (\(x,g') -> x : randoms g') (random g)
-- | A variant of 'randomR' that uses the global random number generator
-- (see "System.Random#globalrng").
randomRIO :: (a,a) -> IO a
randomRIO range = getStdRandom (randomR range)
-- | A variant of 'random' that uses the global random number generator
-- (see "System.Random#globalrng").
randomIO :: IO a
randomIO = getStdRandom random
instance Random Int where
randomR (a,b) g = randomIvalInteger (toInteger a, toInteger b) g
random g = randomR (minBound,maxBound) g
instance Random Char where
randomR (a,b) g =
case (randomIvalInteger (toInteger (ord a), toInteger (ord b)) g) of
(x,g) -> (chr x, g)
random g = randomR (minBound,maxBound) g
instance Random Bool where
randomR (a,b) g =
case (randomIvalInteger (toInteger (bool2Int a), toInteger (bool2Int b)) g) of
(x, g) -> (int2Bool x, g)
where
bool2Int False = 0
bool2Int True = 1
int2Bool 0 = False
int2Bool _ = True
random g = randomR (minBound,maxBound) g
instance Random Integer where
randomR ival g = randomIvalInteger ival g
random g = randomR (toInteger (minBound::Int), toInteger (maxBound::Int)) g
instance Random Double where
randomR ival g = randomIvalDouble ival id g
random g = randomR (0::Double,1) g
-- hah, so you thought you were saving cycles by using Float?
instance Random Float where
random g = randomIvalDouble (0::Double,1) realToFrac g
randomR (a,b) g = randomIvalDouble (realToFrac a, realToFrac b) realToFrac g
mkStdRNG :: Integer -> IO StdGen
mkStdRNG o = do
ct <- getCPUTime
(TOD sec _) <- getClockTime
return (createStdGen (sec * 12345 + ct + o))
randomIvalInteger :: (RandomGen g, Num a) => (Integer, Integer) -> g -> (a, g)
randomIvalInteger (l,h) rng
| l > h = randomIvalInteger (h,l) rng
| otherwise = case (f n 1 rng) of (v, rng') -> (fromInteger (l + v `mod` k), rng')
where
k = h - l + 1
b = 2147483561
n = iLogBase b k
f 0 acc g = (acc, g)
f n acc g =
let
(x,g') = next g
in
f (n-1) (fromIntegral x + acc * b) g'
randomIvalDouble :: (RandomGen g, Fractional a) => (Double, Double) -> (Double -> a) -> g -> (a, g)
randomIvalDouble (l,h) fromDouble rng
| l > h = randomIvalDouble (h,l) fromDouble rng
| otherwise =
case (randomIvalInteger (toInteger (minBound::Int), toInteger (maxBound::Int)) rng) of
(x, rng') ->
let
scaled_x =
fromDouble ((l+h)/2) +
fromDouble ((h-l) / realToFrac intRange) *
fromIntegral (x::Int)
in
(scaled_x, rng')
intRange :: Integer
intRange = toInteger (maxBound::Int) - toInteger (minBound::Int)
iLogBase :: Integer -> Integer -> Integer
iLogBase b i = if i < b then 1 else 1 + iLogBase b (i `div` b)
stdRange :: (Int,Int)
stdRange = (0, 2147483562)
stdNext :: StdGen -> (Int, StdGen)
-- Returns values in the range stdRange
stdNext (StdGen s1 s2) = (z', StdGen s1'' s2'')
where z' = if z < 1 then z + 2147483562 else z
z = s1'' - s2''
k = s1 `quot` 53668
s1' = 40014 * (s1 - k * 53668) - k * 12211
s1'' = if s1' < 0 then s1' + 2147483563 else s1'
k' = s2 `quot` 52774
s2' = 40692 * (s2 - k' * 52774) - k' * 3791
s2'' = if s2' < 0 then s2' + 2147483399 else s2'
stdSplit :: StdGen -> (StdGen, StdGen)
stdSplit std@(StdGen s1 s2)
= (left, right)
where
-- no statistical foundation for this!
left = StdGen new_s1 t2
right = StdGen t1 new_s2
new_s1 | s1 == 2147483562 = 1
| otherwise = s1 + 1
new_s2 | s2 == 1 = 2147483398
| otherwise = s2 - 1
StdGen t1 t2 = snd (next std)
-- The global random number generator
{- $globalrng #globalrng#
There is a single, implicit, global random number generator of type
'StdGen', held in some global variable maintained by the 'IO' monad. It is
initialised automatically in some system-dependent fashion, for example, by
using the time of day, or Linux's kernel random number generator. To get
deterministic behaviour, use 'setStdGen'.
-}
-- |Sets the global random number generator.
setStdGen :: StdGen -> IO ()
setStdGen sgen = writeIORef theStdGen sgen
-- |Gets the global random number generator.
getStdGen :: IO StdGen
getStdGen = readIORef theStdGen
theStdGen :: IORef StdGen
theStdGen = unsafePerformIO $ do
rng <- mkStdRNG 0
newIORef rng
-- |Applies 'split' to the current global random generator,
-- updates it with one of the results, and returns the other.
newStdGen :: IO StdGen
newStdGen = do
rng <- getStdGen
let (a,b) = split rng
setStdGen a
return b
{- |Uses the supplied function to get a value from the current global
random generator, and updates the global generator with the new generator
returned by the function. For example, @rollDice@ gets a random integer
between 1 and 6:
> rollDice :: IO Int
> rollDice = getStdRandom (randomR (1,6))
-}
getStdRandom :: (StdGen -> (a,StdGen)) -> IO a
getStdRandom f = do
rng <- getStdGen
let (v, new_rng) = f rng
setStdGen new_rng
return v
{- $references
1. FW #Burton# Burton and RL Page, /Distributed random number generation/,
Journal of Functional Programming, 2(2):203-212, April 1992.
2. SK #Park# Park, and KW Miller, /Random number generators -
good ones are hard to find/, Comm ACM 31(10), Oct 1988, pp1192-1201.
3. DG #Carta# Carta, /Two fast implementations of the minimal standard
random number generator/, Comm ACM, 33(1), Jan 1990, pp87-88.
4. P #Hellekalek# Hellekalek, /Don\'t trust parallel Monte Carlo/,
Department of Mathematics, University of Salzburg,
<http://random.mat.sbg.ac.at/~peter/pads98.ps>, 1998.
5. Pierre #LEcuyer# L'Ecuyer, /Efficient and portable combined random
number generators/, Comm ACM, 31(6), Jun 1988, pp742-749.
The Web site <http://random.mat.sbg.ac.at/> is a great source of information.
-}
| kaoskorobase/mescaline | resources/hugs/packages/base/System/Random.hs | gpl-3.0 | 14,104 | 197 | 19 | 3,303 | 2,628 | 1,473 | 1,155 | 176 | 4 |
module Wordify.Rules.Board(Board,
emptyBoard,
allSquares,
placeTile,
occupiedSquareAt,
emptySquaresFrom,
lettersAbove,
lettersBelow,
lettersLeft,
lettersRight,
unoccupiedSquareAt,
prettyPrint) where
import Wordify.Rules.Square
import Wordify.Rules.Pos
import Data.Maybe
import Wordify.Rules.Tile
import qualified Data.Map as Map
import Control.Monad
import Data.Sequence as Seq
import Wordify.Rules.Board.Internal
import Control.Applicative
import Data.List.Split as S
import qualified Data.List as L
import Data.Foldable as F
instance Show Board where
show = prettyPrint
{- |
Returns all the squares on the board, ordered by column then row.
-}
allSquares :: Board -> [(Pos, Square)]
allSquares (Board squares) = Map.toList squares
{- |
Places a tile on a square and yields the new board, if the
target square is empty. Otherwise yields 'Nothing'.
-}
placeTile :: Board -> Tile -> Pos -> Maybe Board
placeTile board tile pos =
(\sq -> insertSquare board pos (putTileOn sq tile)) <$> unoccupiedSquareAt board pos
insertSquare :: Board -> Pos -> Square -> Board
insertSquare (Board squares) pos square = Board $ Map.insert pos square squares
squareAt :: Board -> Pos -> Maybe Square
squareAt (Board squares) = flip Map.lookup squares
{- | Returns the square at a given position if it is not occupied by a tile. Otherwise returns Nothing.-}
unoccupiedSquareAt :: Board -> Pos -> Maybe Square
unoccupiedSquareAt board pos =
squareAt board pos >>= (\sq -> if isOccupied sq then Nothing else Just sq)
{- | Returns the square at a given position if it is occupied by a tile. Otherwise returns Nothing.-}
occupiedSquareAt :: Board -> Pos -> Maybe Square
occupiedSquareAt board pos = squareAt board pos >>= squareIfOccupied
{- | All letters immediately above a given square until a non-occupied square -}
lettersAbove :: Board -> Pos -> Seq (Pos,Square)
lettersAbove board pos = walkFrom board pos above
{- | All letters immediately below a given square until a non-occupied square -}
lettersBelow :: Board -> Pos -> Seq (Pos,Square)
lettersBelow board pos = Seq.reverse $ walkFrom board pos below
{- | All letters immediately left of a given square until a non-occupied square -}
lettersLeft :: Board -> Pos -> Seq (Pos,Square)
lettersLeft board pos = Seq.reverse $ walkFrom board pos left
{- | All letters immediately right of a given square until a non-occupied square -}
lettersRight :: Board -> Pos -> Seq (Pos,Square)
lettersRight board pos = walkFrom board pos right
{- | Finds the empty square positions horizontally or vertically from a given position,
skipping any squares that are occupied by a tile
-}
emptySquaresFrom :: Board -> Pos -> Int -> Direction -> [Pos]
emptySquaresFrom board startPos numSquares direction =
let changingOnwards = [changing .. 15]
in L.take numSquares $
L.filter (isJust . unoccupiedSquareAt board) $
mapMaybe posAt $ zipDirection (repeat constant) changingOnwards
where
(constant, changing) = if direction == Horizontal then (yPos startPos, xPos startPos) else (xPos startPos, yPos startPos)
zipDirection = if (direction == Horizontal) then flip L.zip else L.zip
{- | Pretty prints a board to a human readable string representation. Helpful for development. -}
prettyPrint :: Board -> String
prettyPrint board = rowsWithLabels ++ columnLabelSeparator ++ columnLabels
where
rows = L.transpose . S.chunksOf 15 . map (squareToString . snd) . allSquares
rowsWithLabels = concatMap (\(rowNo, row) -> (rowStr rowNo) ++ concat row ++ "\n") . Prelude.zip [1 .. ] $ (rows board)
rowStr :: Int -> String
rowStr number = if number < 10 then ((show number) ++ " | ") else (show number) ++ "| "
columnLabelSeparator = " " ++ (Prelude.take (15 * 5) $ repeat '-') ++ "\n"
columnLabels = " " ++ (concat $ Prelude.take (15 * 2) . L.intersperse " " . map ( : []) $ ['A' .. ])
squareToString square =
case (tileIfOccupied square) of
Just sq -> maybe " |_| " (\lt -> " |" ++ lt : "| ") $ tileLetter sq
Nothing ->
case square of
(Normal _) -> " N "
(DoubleLetter _) -> " DL "
(TripleLetter _) -> " TL "
(DoubleWord _) -> " DW "
(TripleWord _) -> " TW "
{-
Walks the tiles from a given position in a given direction
until an empty square is found or the boundary of the board
is reached.
-}
walkFrom :: Board -> Pos -> (Pos -> Maybe Pos) -> Seq (Pos,Square)
walkFrom board pos direction = maybe mzero (\(next,sq) ->
(next, sq) <| walkFrom board next direction) neighbourPos
where
neighbourPos = direction pos >>= \nextPos -> occupiedSquareAt board nextPos >>=
\sq -> return (nextPos, sq)
{- |
Creates an empty board.
-}
emptyBoard :: Board
emptyBoard = Board (Map.fromList posSquares)
where
layout =
[["TW","N","N","DL","N","N","N","TW","N","N","N","DL","N","N","TW"]
,["N","DW","N","N","N","TL","N","N","N","TL","N","N","N","DW","N"]
,["N","N","DW","N","N","N","DL","N","DL","N","N","N","DW","N","N"]
,["DL","N","N","DW","N","N","N","DL","N","N","N","DW","N","N","DL"]
,["N","N","N","N","DW","N","N","N","N","N","DW","N","N","N","N"]
,["N","TL","N","N","N","TL","N","N","N","TL","N","N","N","TL","N"]
,["N","N","DL","N","N","N","DL","N","DL","N","N","N","DL","N","N"]
,["TW","N","N","DL","N","N","N","DW","N","N","N","DL","N","N","TW"]
,["N","N","DL","N","N","N","DL","N","DL","N","N","N","DL","N","N"]
,["N","TL","N","N","N","TL","N","N","N","TL","N","N","N","TL","N"]
,["N","N","N","N","DW","N","N","N","N","N","DW","N","N","N","N"]
,["DL","N","N","DW","N","N","N","DL","N","N","N","DW","N","N","DL"]
,["N","N","DW","N","N","N","DL","N","DL","N","N","N","DW","N","N"]
,["N","DW","N","N","N","TL","N","N","N","TL","N","N","N","DW","N"]
,["TW","N","N","DL","N","N","N","TW","N","N","N","DL","N","N","TW"]]
squares = (map . map) toSquare layout
columns = Prelude.zip [1..15] squares
labeledSquares= concatMap (uncurry columnToMapping) columns
columnToMapping columnNo columnSquares = Prelude.zipWith (\sq y -> ((columnNo,y),sq)) columnSquares [1..15]
posSquares = mapMaybe (\((x,y), sq) -> fmap (\pos -> (pos, sq)) (posAt (x,y))) labeledSquares
toSquare :: String -> Square
toSquare "DL" = DoubleLetter Nothing
toSquare "TL" = TripleLetter Nothing
toSquare "DW" = DoubleWord Nothing
toSquare "TW" = TripleWord Nothing
toSquare _ = Normal Nothing
| Happy0/haskellscrabble | src/Wordify/Rules/Board.hs | gpl-3.0 | 7,072 | 0 | 15 | 1,754 | 2,262 | 1,305 | 957 | 108 | 7 |
module Pirandello.Outgoing where
import Control.Concurrent.STM (TVar, atomically, newTVar, readTVar, writeTVar)
import Control.Exception (SomeException)
import Control.Monad.Trans.Class (lift)
import Data.ByteString.Lazy (fromStrict)
import Data.Text (unpack)
import Data.Text.Encoding (encodeUtf8)
import Network.Socket (HostName, ServiceName)
import Network.Socket.ByteString.Lazy (sendAll)
import Pipes.Group (concats)
import Pipes.Network.TCP.UIO (fromSocket)
import Pirandello.Events (EventQueue, ConnectionId, RemoteId)
import Pirandello.Frames (Oversized (Oversized), fromFrames, asFrame)
import Pirandello.Incoming (forwardToLocal)
import Pirandello.Kruger (Address (Address), Manager, krugerLookup)
import Pirandello.SocketUtil (connectToRemote)
import System.Directory.UIO (getTemporaryDirectory)
import UnexceptionalIO (unsafeFromIO)
import UnexceptionalIO.ExceptT (ExceptT, UIO, (<!$), (<!!$), throwE)
type IdCount = TVar Integer
newIdCount ∷ UIO IdCount
newIdCount = unsafeFromIO $ atomically $ newTVar 0
data ConnectDirectError = EIOException SomeException | EOversized deriving (Show)
connectDirect ∷ EventQueue → IdCount → RemoteId → HostName → ServiceName → ExceptT ConnectDirectError UIO (ConnectionId, FilePath)
connectDirect q idCount name host port = do
remote ← EIOException <!$ connectToRemote host port
nameB ← case asFrame (fromStrict $ encodeUtf8 name) of
Left Oversized → throwE EOversized
Right nameB' → return nameB'
EIOException <!!$ sendAll remote nameB
temp ← lift getTemporaryDirectory
let frs = concats . fromFrames $ fromSocket remote 4096
cint ← lift $ unsafeFromIO $ atomically $ do
cmax ← readTVar idCount
writeTVar idCount $ cmax + 1
return cmax
let cid = 'o' : show cint
path ← EIOException <!$ forwardToLocal q cid temp frs remote
return (cid, path)
data ConnectViaKrugerError = EConnectDirect ConnectDirectError | LookupFailure deriving (Show)
connectViaKruger ∷ EventQueue → IdCount → Manager → RemoteId → RemoteId → ExceptT ConnectViaKrugerError UIO (ConnectionId, FilePath)
connectViaKruger q idCount man name toLookup = do
mAddr ← const LookupFailure <!$ krugerLookup man toLookup
case mAddr of
Just (Address host port) → EConnectDirect <!$ connectDirect q idCount name (unpack host) (show port)
Nothing → throwE LookupFailure
| rimmington/pirandello | src/Pirandello/Outgoing.hs | gpl-3.0 | 2,449 | 0 | 13 | 403 | 716 | 383 | 333 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetPools.SetBackup
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Changes a backup target pool\'s configurations.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetPools.setBackup@.
module Network.Google.Resource.Compute.TargetPools.SetBackup
(
-- * REST Resource
TargetPoolsSetBackupResource
-- * Creating a Request
, targetPoolsSetBackup
, TargetPoolsSetBackup
-- * Request Lenses
, tpsbProject
, tpsbTargetPool
, tpsbPayload
, tpsbFailoverRatio
, tpsbRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetPools.setBackup@ method which the
-- 'TargetPoolsSetBackup' request conforms to.
type TargetPoolsSetBackupResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"targetPools" :>
Capture "targetPool" Text :>
"setBackup" :>
QueryParam "failoverRatio" (Textual Double) :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TargetReference :>
Post '[JSON] Operation
-- | Changes a backup target pool\'s configurations.
--
-- /See:/ 'targetPoolsSetBackup' smart constructor.
data TargetPoolsSetBackup = TargetPoolsSetBackup'
{ _tpsbProject :: !Text
, _tpsbTargetPool :: !Text
, _tpsbPayload :: !TargetReference
, _tpsbFailoverRatio :: !(Maybe (Textual Double))
, _tpsbRegion :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TargetPoolsSetBackup' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tpsbProject'
--
-- * 'tpsbTargetPool'
--
-- * 'tpsbPayload'
--
-- * 'tpsbFailoverRatio'
--
-- * 'tpsbRegion'
targetPoolsSetBackup
:: Text -- ^ 'tpsbProject'
-> Text -- ^ 'tpsbTargetPool'
-> TargetReference -- ^ 'tpsbPayload'
-> Text -- ^ 'tpsbRegion'
-> TargetPoolsSetBackup
targetPoolsSetBackup pTpsbProject_ pTpsbTargetPool_ pTpsbPayload_ pTpsbRegion_ =
TargetPoolsSetBackup'
{ _tpsbProject = pTpsbProject_
, _tpsbTargetPool = pTpsbTargetPool_
, _tpsbPayload = pTpsbPayload_
, _tpsbFailoverRatio = Nothing
, _tpsbRegion = pTpsbRegion_
}
-- | Project ID for this request.
tpsbProject :: Lens' TargetPoolsSetBackup Text
tpsbProject
= lens _tpsbProject (\ s a -> s{_tpsbProject = a})
-- | Name of the TargetPool resource to set a backup pool for.
tpsbTargetPool :: Lens' TargetPoolsSetBackup Text
tpsbTargetPool
= lens _tpsbTargetPool
(\ s a -> s{_tpsbTargetPool = a})
-- | Multipart request metadata.
tpsbPayload :: Lens' TargetPoolsSetBackup TargetReference
tpsbPayload
= lens _tpsbPayload (\ s a -> s{_tpsbPayload = a})
-- | New failoverRatio value for the target pool.
tpsbFailoverRatio :: Lens' TargetPoolsSetBackup (Maybe Double)
tpsbFailoverRatio
= lens _tpsbFailoverRatio
(\ s a -> s{_tpsbFailoverRatio = a})
. mapping _Coerce
-- | Name of the region scoping this request.
tpsbRegion :: Lens' TargetPoolsSetBackup Text
tpsbRegion
= lens _tpsbRegion (\ s a -> s{_tpsbRegion = a})
instance GoogleRequest TargetPoolsSetBackup where
type Rs TargetPoolsSetBackup = Operation
type Scopes TargetPoolsSetBackup =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient TargetPoolsSetBackup'{..}
= go _tpsbProject _tpsbRegion _tpsbTargetPool
_tpsbFailoverRatio
(Just AltJSON)
_tpsbPayload
computeService
where go
= buildClient
(Proxy :: Proxy TargetPoolsSetBackupResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/TargetPools/SetBackup.hs | mpl-2.0 | 4,763 | 0 | 19 | 1,179 | 646 | 378 | 268 | 101 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, TypeFamilies #-}
module Action.Types
( RequestContext(..)
, Handler(..)
, runHandler
) where
import Control.Applicative (Alternative)
import Control.Monad (MonadPlus)
import Control.Monad.Base (MonadBase)
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Reader (MonadReader, ReaderT(..))
import Control.Monad.Trans.Control (MonadBaseControl(..))
import Control.Monad.Trans.Resource (MonadThrow, MonadResource(..), runInternalState, InternalState)
import Has
import Model.Id.Types
import Model.Identity
import Model.Party.Types
import Model.Permission.Types
import Model.Time
import HTTP.Client
import HTTP.Request
import Context
import Ingest.Service
import Solr.Service
import Static.Service
import Store.AV
import Store.Types
import Service.DB
import Service.Entropy
import Service.Log
import Service.Mail
import Service.Messages
import Service.Notification
import Service.Passwd
import Service.Types
import Web.Types
data RequestContext = RequestContext
{ requestContext :: !ActionContext
, contextRequest :: !Request
, requestIdentity :: !Identity
}
-- makeHasRec ''RequestContext ['requestContext, 'contextRequest, 'requestIdentity]
instance Has ActionContext RequestContext where
view = requestContext
instance Has Service.DB.DBConn RequestContext where
view = view . requestContext
instance Has Control.Monad.Trans.Resource.InternalState RequestContext where
view = view . requestContext
-- instance Has time-1.6.0.1:Data.Time.Calendar.Days.Day RequestContext where
-- view = (view . requestContext)
instance Has Model.Time.Timestamp RequestContext where
view = contextTimestamp . requestContext
instance Has Service.Types.Secret RequestContext where
view = view . requestContext
instance Has Service.Entropy.Entropy RequestContext where
view = view . requestContext
instance Has Service.Passwd.Passwd RequestContext where
view = view . requestContext
instance Has Service.Log.Logs RequestContext where
view = view . requestContext
instance Has Service.Mail.Mailer RequestContext where
view = serviceMailer . contextService . requestContext
instance Has Service.Messages.Messages RequestContext where
view = view . requestContext
-- instance Has Service.DB.DBPool RequestContext where
-- view = (view . requestContext)
instance Has Store.Types.Storage RequestContext where
view = view . requestContext
instance Has Store.AV.AV RequestContext where
view = view . requestContext
instance Has Web.Types.Web RequestContext where
view = view . requestContext
instance Has HTTP.Client.HTTPClient RequestContext where
view = view . requestContext
instance Has Static.Service.Static RequestContext where
view = view . requestContext
instance Has Ingest.Service.Ingest RequestContext where
view = view . requestContext
instance Has Solr.Service.Solr RequestContext where
view = view . requestContext
instance Has Service.Notification.Notifications RequestContext where
view = view . requestContext
instance Has Service.Types.Service RequestContext where
view = view . requestContext
instance Has Request RequestContext where
view = contextRequest
instance Has Identity RequestContext where
view = requestIdentity
instance Has Model.Permission.Types.Access RequestContext where
view = view . requestIdentity
instance Has (Model.Id.Types.Id Model.Party.Types.Party) RequestContext where
view = view . requestIdentity
instance Has Model.Party.Types.Account RequestContext where
view = siteAccount . view . requestIdentity
instance Has Model.Party.Types.Party RequestContext where
view = view . requestIdentity
instance Has Model.Party.Types.SiteAuth RequestContext where
view = view . requestIdentity
-- | The monad in which route handlers run. At the top, each route 'Action'
-- returns a 'Handler' 'Response'
newtype Handler a = Handler { unHandler :: ReaderT RequestContext IO a }
deriving
( Functor
, Applicative
, Alternative
, Monad
, MonadPlus
, MonadIO
, MonadBase IO
, MonadThrow
, MonadReader RequestContext
)
{-# INLINE runHandler #-}
runHandler :: Handler a -> RequestContext -> IO a
runHandler (Handler (ReaderT f)) = f
instance MonadResource Handler where
liftResourceT = focusIO . runInternalState
instance MonadBaseControl IO Handler where
type StM Handler a = a
liftBaseWith f = Handler $ liftBaseWith $ \r -> f (r . unHandler)
restoreM = Handler . restoreM
| databrary/databrary | src/Action/Types.hs | agpl-3.0 | 4,496 | 0 | 10 | 677 | 1,037 | 592 | 445 | -1 | -1 |
module QuantLib.Event
(module QuantLib.Event
) where
import QuantLib.Prices
import QuantLib.Time.Date
class Event a where
evDate :: a->Date
evOccured :: a->Date->Bool
evOccured event date = evDate event < date
evOccuredInclude:: a->Date->Bool
evOccuredInclude event date = evDate event <= date
evCompare :: a->a->Ordering
evCompare x y
| evDate x == evDate y = EQ
| evDate x <= evDate y = LT
| otherwise = GT
evEqual :: a->a->Bool
evEqual x y = evDate x == evDate y
-- | Cash flows data type
data CashFlow = CashFlow {
cfDate :: Date,
cfAmount :: Double
} deriving (Show)
instance Event CashFlow where
evDate (CashFlow date _) = date
instance Eq CashFlow where
(==) = evEqual
instance Ord CashFlow where
compare = evCompare
-- | Sequence of cash-flows
type Leg = [CashFlow]
data Callability = Call {
cPrice :: CallPrice,
cDate :: Date
} | Put {
cPrice :: CallPrice,
cDate :: Date
} deriving (Show)
instance Event Callability where
evDate = cDate
instance Eq Callability where
(==) = evEqual
instance Ord Callability where
compare = evCompare
| paulrzcz/hquantlib | src/QuantLib/Event.hs | lgpl-3.0 | 1,368 | 0 | 11 | 500 | 380 | 205 | 175 | 41 | 0 |
module Bindings.Verba.HL.Helpers where
import Data.ByteString.Char8
import qualified Data.ByteString.Unsafe as BSU
import System.IO.Unsafe
import qualified GHC.IO.Encoding as GHC
import qualified GHC.Foreign as GHC
-- Every time someone write `unsafePerformIO` a cute kitty dies!
cp1251 = unsafePerformIO $ GHC.mkTextEncoding "cp1251"
decodeCp1251 :: ByteString -> String
decodeCp1251 bs =
unsafePerformIO $ BSU.unsafeUseAsCStringLen bs $ \str ->
GHC.peekCStringLen cp1251 str
| Macil-dev/verhface-hl | src/Bindings/Verba/HL/Helpers.hs | unlicense | 513 | 0 | 8 | 92 | 103 | 63 | 40 | 11 | 1 |
{-# OPTIONS_GHC -Wno-unused-do-bind #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module X03_LedgerTamperEvident where
import qualified Control.Concurrent.Async as Async
import qualified Crypto.Hash.SHA256 as SHA
import qualified Data.Atomics as A
import qualified Data.ByteString as BS
import qualified Data.Concurrent.Queue.MichaelScott as Q
import qualified Data.IORef as IOR
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Prelude
import RIO
import qualified System.Log.Logger as Log
------------------------------------------------------------------------------
import Config
import Ledger
import Logging
import X00_Base
import X02_LedgerWithPool (miner)
type BHash = ByteString
type BData = T.Text
data Block = Block
{ bPrevHash :: ! BHash -- ^ hash of previous block
, bData :: ! BData -- ^ this block's data
} deriving (Eq, Show)
calculateHash :: BHash -> BData -> BHash
calculateHash p d = SHA.hash (BS.concat [p, TE.encodeUtf8 d]) -- TODO: serialize
genesisBlock :: Block
genesisBlock = Block
{ bPrevHash = "0"
, bData = "March 22, 2018"
}
-- | Returns `Right ()` if valid.
-- otherwise `Left reason`
isValidBlockchain :: Seq.Seq Block -> Either T.Text ()
isValidBlockchain bc = do
when (Seq.length bc == 0) (Left "empty blockchain")
when (Seq.length bc == 1 && Seq.index bc 0 /= genesisBlock) (Left "invalid genesis block")
let elements = toList bc
-- `sequence_` causes function to return on/with first `Left` value
sequence_ (map isValidBlock (Prelude.zip3 [1 .. ] elements (Prelude.tail elements)))
return ()
-- | Given a valid previous block and a block to check.
-- | Returns `Just ()` if valid.
-- otherwise `Left reason`
isValidBlock :: (Int, Block, Block) -> Either Text ()
isValidBlock (i, validBlock, checkBlock) =
if hashBlock validBlock /= bPrevHash checkBlock then
Left ("invalid bPrevHash at " <> T.pack (show i))
else
Right ()
where
hashBlock b = calculateHash (bPrevHash b) (bData b)
createLedgerWithBlocks
:: IO (Ledger Block)
createLedgerWithBlocks = do
r <- IOR.newIORef (Seq.singleton genesisBlock)
return Ledger
{ lContents = IOR.readIORef r
, lCommit = \_ (Block _ d) -> A.atomicModifyIORefCAS_ r $ \existing ->
let l = Seq.length existing
(Block ph pd) = Seq.index existing (l - 1)
h = calculateHash ph pd
in existing Seq.|> Block h d
, lModify = \i a -> A.atomicModifyIORefCAS_ r $ \existing -> Seq.update i a existing
, lCheck = do
c <- IOR.readIORef r
return $
case isValidBlockchain c of
Left err -> Just err
_ -> Nothing
, fromByteString = Block "0" . TE.decodeUtf8
}
runLedgerTamperEvident :: IO ()
runLedgerTamperEvident = do
l <- createLedgerWithBlocks
q <- Q.newQ
let e = defaultConfig
committer = lCommit l e
txHandler tx = do
Q.pushL q tx
Log.infoM lTAMPEREVIDENT ("POOLED: " <> show tx)
Async.replicateConcurrently_ (cNumMiners (getConfig e)) (miner q committer)
`Async.concurrently_`
runServerAndClients e l txHandler
| haroldcarr/learn-haskell-coq-ml-etc | haskell/playpen/blockchain/2018-06-maurice-herlihy-blockchains-from-distributed-computing-perspective/src/X03_LedgerTamperEvident.hs | unlicense | 3,572 | 4 | 19 | 1,009 | 913 | 492 | 421 | 82 | 2 |
#!/usr/bin/env stack
-- stack --install-ghc runghc --package bytestring --package text --package amqp --package uuid
{-# LANGUAGE OverloadedStrings #-}
import Control.Concurrent (MVar, newEmptyMVar, putMVar,
takeMVar)
import Control.Monad (when)
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Maybe (fromJust)
import Data.Text (Text)
import Data.UUID (toText)
import Data.UUID.V4 (nextRandom)
import Network.AMQP
type QueueName = Text
main :: IO ()
main = do
conn <- openConnection "127.0.0.1" "/" "guest" "guest"
ch <- openChannel conn
putStrLn " [x] Requesting fib(30)"
res <- callFib ch rpcQueue 30
putStrLn $ " [.] Got '" ++ show res ++ "'"
closeConnection conn
where
rpcQueue = "rpc_queue"
callFib :: Channel -> QueueName -> Int -> IO Int
callFib ch queue n = do
cid <- genCorrelationId
rqn <- declareReplyQueue
let body = BL.pack . show $ n
let message = newMsg {msgCorrelationID = Just cid, msgReplyTo = Just rqn, msgBody = body}
publishMsg ch "" queue message
m <- newEmptyMVar
consumeMsgs ch rqn Ack $ handleResponse cid m
res <- takeMVar m
return res
where
genCorrelationId = toText <$> nextRandom
declareReplyQueue = do
let opts = newQueue {queueAutoDelete = True, queueExclusive = True}
(rqn, _, _) <- declareQueue ch opts
return rqn
handleResponse :: Text -> MVar Int -> (Message, Envelope) -> IO ()
handleResponse corrId m (msg, envelope) = do
let msgCorrId = fromJust (msgCorrelationID msg)
when (msgCorrId == corrId) $ do
res <- readIO (BL.unpack . msgBody $ msg)
putMVar m res
ackEnv envelope
| rabbitmq/rabbitmq-tutorials | haskell/rpcClient.hs | apache-2.0 | 1,869 | 0 | 15 | 579 | 528 | 267 | 261 | 43 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module Main where
import Control.Concurrent (forkIO, threadDelay)
import Control.Concurrent.MVar
import Control.Lens
import Control.Wire hiding (unless)
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.String (fromString)
import Network
import Prelude hiding ((.), id)
import System.Timeout (timeout)
import Test.Hspec
import Test.Hspec.Runner
import Test.Hspec.QuickCheck
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Control.Scrobbler.Announce
import Control.Scrobbler.Netwire (mkFixM)
import Control.Scrobbler.Network
import Control.Scrobbler.Types
instance Arbitrary Track where
arbitrary = Track
<$> (fmap fromString arbitrary)
<*> (fmap fromString arbitrary)
<*> (fmap fromString arbitrary)
<*> arbitrary
instance Arbitrary a => Arbitrary (Stamped a) where
arbitrary = Stamped
<$> arbitrary
<*> arbitrary
<*> arbitrary
main :: IO ()
main = do
b <- newEmptyMVar
hspecWith options $ do
describe "announcements" $
prop "does nothing with its argument" $ announcement_is_id
describe "communication" $ do
it "correctly maintains the queue of failures" $ do
let ds = ["AAAA", "BBBB", "CCCC", "DDDD"]
(_, w) <- stepWire (send (defaultNetworkSettings & port .~ PortNumber 4567)) mempty (Right "AAAA")
(_, w) <- stepWire w mempty (Right "BBBB")
(_, w) <- stepWire w mempty (Right "CCCC")
forkIO $ do
(r, _) <- stepWire (receiver (PortNumber 4567)) mempty (Right ())
case r of
Right rs -> putMVar b rs
Left _ -> return ()
threadDelay 100000
(_, _) <- stepWire w mempty (Right "DDDD")
ds' <- takeMVar b
ds' `shouldBe` ds
it "correctly does not maintain the queue of failures" $ do
let ds = ["DDDD"]
(_, w) <- stepWire (send (defaultNetworkSettings & port .~ PortNumber 4568 & failures .~ Drop)) mempty (Right "AAAA")
(_, w) <- stepWire w mempty (Right "BBBB")
(_, w) <- stepWire w mempty (Right "CCCC")
forkIO $ do
(r, _) <- stepWire (receiver (PortNumber 4568)) mempty (Right ())
case r of
Right rs -> putMVar b rs
Left _ -> return ()
threadDelay 100000
(_, _) <- stepWire w mempty (Right "DDDD")
ds' <- takeMVar b
ds' `shouldBe` ds
where
options = defaultConfig { configQuickCheckMaxSuccess = Just 500 }
-- Since 'announce' is sufficiently polymorphic
-- it's enough to check property on 'Track' only
announcement_is_id :: Stamped Track -> Property
announcement_is_id t = monadicIO $ do
x <- run $ do
(et, _) <- stepWire announce mempty (Right t)
return $ case et of
Right t' -> t == t'
_ -> False
assert x
receiver :: PortID -> Wire (Timed NominalDiffTime ()) e IO () [ByteString]
receiver pid = mkFixM $ \_dt () -> do
s <- listenOn pid
xs <- go s
return (Right xs)
where
go s = do
r <- timeout 1000000 (accept s)
case r of
Just (h, _, _) -> do
[n] <- B.unpack <$> B.hGet h 1
bs <- B.hGet h (fromIntegral n)
(bs :) <$> go s
Nothing -> return []
| supki/scrobblers | tests/behaviours.hs | bsd-2-clause | 3,480 | 0 | 24 | 1,024 | 1,153 | 583 | 570 | 91 | 3 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE PackageImports #-}
---------------------------------------------------------
--
-- Module : Control.Monad.Attempt
-- Copyright : Michael Snoyman
-- License : BSD3
--
-- Maintainer : Michael Snoyman <[email protected]>
-- Stability : Unstable
-- Portability : portable
--
---------------------------------------------------------
-- | Provide a monad transformer for the attempt monad, which allows the
-- reporting of errors using extensible exceptions.
module Control.Monad.Attempt
( AttemptT (..)
, evalAttemptT
, attemptT
, attemptTIO
, module Data.Attempt
) where
import Data.Attempt
import Control.Applicative
import Control.Monad
#if MIN_VERSION_transformers(0,2,0)
import "transformers" Control.Monad.Trans.Class
import "transformers" Control.Monad.IO.Class
#else
import "transformers" Control.Monad.Trans
#endif
import Control.Exception (Exception)
newtype AttemptT m v = AttemptT {
runAttemptT :: m (Attempt v)
}
instance Monad m => Functor (AttemptT m) where
fmap f = AttemptT . liftM (liftM f) . runAttemptT
instance Monad m => Applicative (AttemptT m) where
pure = return
(<*>) = ap
instance Monad m => Monad (AttemptT m) where
return = AttemptT . return . return
(AttemptT mv) >>= f = AttemptT $ do
v <- mv
attempt (return . failure) (runAttemptT . f) v
instance (Exception e, Monad m) => Failure e (AttemptT m) where
failure = AttemptT . return . failure
instance (Monad m, Exception e) => WrapFailure e (AttemptT m) where
wrapFailure f (AttemptT mv) = AttemptT $ liftM (wrapFailure f) mv
instance MonadTrans AttemptT where
lift = AttemptT . liftM return where
instance MonadIO m => MonadIO (AttemptT m) where
liftIO = AttemptT . liftM return . liftIO where
instance Monad m => FromAttempt (AttemptT m) where
fromAttempt = attempt failure return
-- | Instances of 'FromAttempt' specify a manner for embedding 'Attempt'
-- failures directly into the target data type. For example, the 'IO' instance
-- simply throws a runtime error. This is a convenience wrapper when you simply
-- want to use that default action.
--
-- So given a type 'AttemptT' 'IO' 'Int', this function will convert it to 'IO'
-- 'Int', throwing any exceptions in the original value.
evalAttemptT :: (Monad m, FromAttempt m)
=> AttemptT m v
-> m v
evalAttemptT = join . liftM fromAttempt . runAttemptT where
-- | The equivalent of 'attempt' for transformers. Given a success and failure
-- handler, eliminates the 'AttemptT' portion of the transformer stack.
attemptT :: Monad m
=> (forall e. Exception e => e -> b)
-> (a -> b)
-> AttemptT m a
-> m b
attemptT s f = liftM (attempt s f) . runAttemptT
-- | Catches runtime (ie, IO) exceptions and represents them in an 'AttemptT'
-- transformer.
--
-- Like 'handle', the first argument to this function must explicitly state the
-- type of its input.
attemptTIO :: (Exception eIn, Exception eOut)
=> (eIn -> eOut)
-> IO v
-> AttemptT IO v
attemptTIO f = AttemptT . attemptIO f
| snoyberg/control-monad-attempt | Control/Monad/Attempt.hs | bsd-2-clause | 3,261 | 0 | 11 | 691 | 682 | 374 | 308 | 53 | 1 |
-- -*- Mode: Haskell; -*-
--
-- QuickCheck tests for Megaparsec's primitive parser combinators.
--
-- Copyright © 2015 Megaparsec contributors
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
--
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- This software is provided by the copyright holders "as is" and any
-- express or implied warranties, including, but not limited to, the implied
-- warranties of merchantability and fitness for a particular purpose are
-- disclaimed. In no event shall the copyright holders be liable for any
-- direct, indirect, incidental, special, exemplary, or consequential
-- damages (including, but not limited to, procurement of substitute goods
-- or services; loss of use, data, or profits; or business interruption)
-- however caused and on any theory of liability, whether in contract,
-- strict liability, or tort (including negligence or otherwise) arising in
-- any way out of the use of this software, even if advised of the
-- possibility of such damage.
{-# OPTIONS -fno-warn-orphans #-}
module Prim (tests) where
import Control.Applicative
import Control.Monad (guard)
import Data.Bool (bool)
import Data.Char (isLetter)
import Data.Foldable (asum)
import Data.List (isPrefixOf)
import Data.Maybe (maybeToList, fromMaybe)
import Test.Framework
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
import Text.Megaparsec.Char
import Text.Megaparsec.Error (Message (..))
import Text.Megaparsec.Pos
import Text.Megaparsec.Prim
import Text.Megaparsec.String
import Pos ()
import Util
tests :: Test
tests = testGroup "Primitive parser combinators"
[ testProperty "ParsecT functor" prop_functor
, testProperty "ParsecT applicative (<*>)" prop_applicative_0
, testProperty "ParsecT applicative (*>)" prop_applicative_1
, testProperty "ParsecT applicative (<*)" prop_applicative_2
, testProperty "ParsecT alternative empty and (<|>)" prop_alternative_0
, testProperty "ParsecT alternative (<|>)" prop_alternative_1
, testProperty "ParsecT alternative (<|>) pos" prop_alternative_2
, testProperty "ParsecT alternative (<|>) hints" prop_alternative_3
, testProperty "ParsecT alternative many" prop_alternative_4
, testProperty "ParsecT alternative some" prop_alternative_5
, testProperty "ParsecT alternative optional" prop_alternative_6
, testProperty "ParsecT monad return" prop_monad_0
, testProperty "ParsecT monad (>>)" prop_monad_1
, testProperty "ParsecT monad (>>=)" prop_monad_2
, testProperty "ParsecT monad fail" prop_monad_3
, testProperty "combinator unexpected" prop_unexpected
, testProperty "combinator label" prop_label
, testProperty "combinator hidden hints" prop_hidden_0
, testProperty "combinator hidden error" prop_hidden_1
, testProperty "combinator try" prop_try
, testProperty "combinator lookAhead" prop_lookAhead_0
, testProperty "combinator lookAhead hints" prop_lookAhead_1
, testProperty "combinator lookAhead messages" prop_lookAhead_2
, testProperty "combinator notFollowedBy" prop_notFollowedBy_0
, testProperty "combinator notFollowedBy twice" prop_notFollowedBy_1
, testProperty "combinator notFollowedBy eof" prop_notFollowedBy_2
, testProperty "combinator token" prop_token
, testProperty "combinator tokens" prop_tokens
, testProperty "parser state position" prop_state_pos
, testProperty "parser state input" prop_state_input
, testProperty "parser state general" prop_state
, testProperty "user state set and get" prop_user_state
, testProperty "user state backtracking" prop_user_backtrack ]
instance Arbitrary u => Arbitrary (State String u) where
arbitrary = State <$> arbitrary <*> arbitrary <*> arbitrary
-- Functor instance
prop_functor :: Integer -> Integer -> Property
prop_functor n m =
((+ m) <$> return n) /=\ n + m .&&. ((* n) <$> return m) /=\ n * m
-- Applicative instance
prop_applicative_0 :: Integer -> Integer -> Property
prop_applicative_0 n m = ((+) <$> pure n <*> pure m) /=\ n + m
prop_applicative_1 :: Integer -> Integer -> Property
prop_applicative_1 n m = (pure n *> pure m) /=\ m
prop_applicative_2 :: Integer -> Integer -> Property
prop_applicative_2 n m = (pure n <* pure m) /=\ n
-- Alternative instance
prop_alternative_0 :: Integer -> Property
prop_alternative_0 n = (empty <|> return n) /=\ n
prop_alternative_1 :: String -> String -> Property
prop_alternative_1 s0 s1
| s0 == s1 = checkParser p (Right s0) s1
| null s0 = checkParser p (posErr 0 s1 [uneCh (head s1), exEof]) s1
| s0 `isPrefixOf` s1 =
checkParser p (posErr s0l s1 [uneCh (s1 !! s0l), exEof]) s1
| otherwise = checkParser p (Right s0) s0 .&&. checkParser p (Right s1) s1
where p = try (string s0) <|> string s1
s0l = length s0
prop_alternative_2 :: Char -> Char -> Char -> Bool -> Property
prop_alternative_2 a b c l = checkParser p r s
where p = char a <|> (char b >> char a)
r | l = Right a
| a == b = posErr 1 s [uneCh c, exEof]
| a == c = Right a
| otherwise = posErr 1 s [uneCh c, exCh a]
s = if l then [a] else [b,c]
prop_alternative_3 :: Property
prop_alternative_3 = checkParser p r s
where p = asum [empty, try (string ">>>"), empty, return "foo"] <?> "bar"
p' = bsum [empty, try (string ">>>"), empty, return "foo"] <?> "bar"
bsum = foldl (<|>) empty
r = simpleParse p' s
s = ">>"
prop_alternative_4 :: NonNegative Int -> NonNegative Int -> NonNegative Int ->
Property
prop_alternative_4 a' b' c' = checkParser p r s
where [a,b,c] = getNonNegative <$> [a',b',c']
p = (++) <$> many (char 'a') <*> many (char 'b')
r | null s = Right s
| c > 0 = posErr (a + b) s $ [uneCh 'c', exCh 'b', exEof]
++ [exCh 'a' | b == 0]
| otherwise = Right s
s = abcRow a b c
prop_alternative_5 :: NonNegative Int -> NonNegative Int -> NonNegative Int ->
Property
prop_alternative_5 a' b' c' = checkParser p r s
where [a,b,c] = getNonNegative <$> [a',b',c']
p = (++) <$> some (char 'a') <*> some (char 'b')
r | null s = posErr 0 s [uneEof, exCh 'a']
| a == 0 = posErr 0 s [uneCh (head s), exCh 'a']
| b == 0 = posErr a s $ [exCh 'a', exCh 'b'] ++
if c > 0 then [uneCh 'c'] else [uneEof]
| c > 0 = posErr (a + b) s [uneCh 'c', exCh 'b', exEof]
| otherwise = Right s
s = abcRow a b c
prop_alternative_6 :: Bool -> Bool -> Bool -> Property
prop_alternative_6 a b c = checkParser p r s
where p = f <$> optional (char 'a') <*> optional (char 'b')
f x y = maybe "" (:[]) x ++ maybe "" (:[]) y
r | c = posErr ab s $ [uneCh 'c', exEof] ++
[exCh 'a' | not a && not b] ++ [exCh 'b' | not b]
| otherwise = Right s
s = abcRow' a b c
ab = fromEnum a + fromEnum b
-- Monad instance
prop_monad_0 :: Integer -> Property
prop_monad_0 n = checkParser (return n) (Right n) ""
prop_monad_1 :: Char -> Char -> Maybe Char -> Property
prop_monad_1 a b c = checkParser p r s
where p = char a >> char b
r = simpleParse (char a *> char b) s
s = a : b : maybeToList c
prop_monad_2 :: Char -> Char -> Maybe Char -> Property
prop_monad_2 a b c = checkParser p r s
where p = char a >>= \x -> char b >> return x
r = simpleParse (char a <* char b) s
s = a : b : maybeToList c
prop_monad_3 :: String -> Property
prop_monad_3 m = checkParser p r s
where p = fail m :: Parser ()
r | null m = posErr 0 s []
| otherwise = posErr 0 s [msg m]
s = ""
-- TODO MonadReader instance
-- TODO MonadState instance
-- Primitive combinators
prop_unexpected :: String -> Property
prop_unexpected m = checkParser p r s
where p = unexpected m :: Parser ()
r | null m = posErr 0 s []
| otherwise = posErr 0 s [uneSpec m]
s = ""
prop_label :: NonNegative Int -> NonNegative Int -> NonNegative Int ->
String -> Property
prop_label a' b' c' l = checkParser p r s
where [a,b,c] = getNonNegative <$> [a',b',c']
p = (++) <$> many (char 'a') <*> (many (char 'b') <?> l)
r | null s = Right s
| c > 0 = posErr (a + b) s $ [uneCh 'c', exSpec l, exEof]
++ [exCh 'a' | b == 0]
| otherwise = Right s
s = abcRow a b c
prop_hidden_0 :: NonNegative Int -> NonNegative Int -> NonNegative Int ->
Property
prop_hidden_0 a' b' c' = checkParser p r s
where [a,b,c] = getNonNegative <$> [a',b',c']
p = (++) <$> many (char 'a') <*> hidden (many (char 'b'))
r | null s = Right s
| c > 0 = posErr (a + b) s $ [uneCh 'c', exEof]
++ [exCh 'a' | b == 0]
| otherwise = Right s
s = abcRow a b c
prop_hidden_1 :: String -> NonEmptyList Char -> String -> Property
prop_hidden_1 a c' s = checkParser p r s
where c = getNonEmpty c'
p = fromMaybe a <$> optional (hidden $ string c)
r | null s = Right a
| c == s = Right s
| head c /= head s = posErr 0 s [uneCh (head s), exEof]
| otherwise = simpleParse (string c) s
prop_try :: String -> String -> String -> Property
prop_try pre s1' s2' = checkParser p r s
where s1 = pre ++ s1'
s2 = pre ++ s2'
p = try (string s1) <|> string s2
r | s == s1 || s == s2 = Right s
| otherwise = posErr 0 s $ bool [uneStr pre] [uneEof] (null s)
++ [uneStr pre, exStr s1, exStr s2]
s = pre
prop_lookAhead_0 :: Bool -> Bool -> Bool -> Property
prop_lookAhead_0 a b c = checkParser p r s
where p = do
l <- lookAhead (oneOf "ab" <?> "label")
guard (l == h)
char 'a'
h = head s
r | null s = posErr 0 s [uneEof, exSpec "label"]
| s == "a" = Right 'a'
| h == 'b' = posErr 0 s [uneCh 'b', exCh 'a']
| h == 'c' = posErr 0 s [uneCh 'c', exSpec "label"]
| otherwise = posErr 1 s [uneCh (s !! 1), exEof]
s = abcRow' a b c
prop_lookAhead_1 :: String -> Property
prop_lookAhead_1 s = checkParser p r s
where p = lookAhead (some letterChar) >> fail "failed" :: Parser ()
h = head s
r | null s = posErr 0 s [uneEof, exSpec "letter"]
| isLetter h = posErr 0 s [msg "failed"]
| otherwise = posErr 0 s [uneCh h, exSpec "letter"]
prop_lookAhead_2 :: Bool -> Bool -> Bool -> Property
prop_lookAhead_2 a b c = checkParser p r s
where p = lookAhead (some (char 'a')) >> char 'b'
r | null s = posErr 0 s [uneEof, exCh 'a']
| a = posErr 0 s [uneCh 'a', exCh 'b']
| otherwise = posErr 0 s [uneCh (head s), exCh 'a']
s = abcRow' a b c
prop_notFollowedBy_0 :: NonNegative Int -> NonNegative Int -> NonNegative Int ->
Property
prop_notFollowedBy_0 a' b' c' = checkParser p r s
where [a,b,c] = getNonNegative <$> [a',b',c']
p = many (char 'a') <* notFollowedBy (char 'b') <* many (char 'c')
r | b > 0 = posErr a s [uneCh 'b', exCh 'a']
| otherwise = Right (replicate a 'a')
s = abcRow a b c
prop_notFollowedBy_1 :: NonNegative Int -> NonNegative Int -> NonNegative Int ->
Property
prop_notFollowedBy_1 a' b' c' = checkParser p r s
where [a,b,c] = getNonNegative <$> [a',b',c']
p = many (char 'a') <* f (char 'c') <* many (char 'c')
f = notFollowedBy . notFollowedBy -- = 'lookAhead' in this case
r | b == 0 && c > 0 = Right (replicate a 'a')
| b > 0 = posErr a s [uneCh 'b', exCh 'a']
| otherwise = posErr a s [uneEof, exCh 'a']
s = abcRow a b c
prop_notFollowedBy_2 :: NonNegative Int -> NonNegative Int -> NonNegative Int ->
Property
prop_notFollowedBy_2 a' b' c' = checkParser p r s
where [a,b,c] = getNonNegative <$> [a',b',c']
p = many (char 'a') <* notFollowedBy eof <* many anyChar
r | b > 0 || c > 0 = Right (replicate a 'a')
| otherwise = posErr a s [uneEof, exCh 'a']
s = abcRow a b c
-- We omit tests for 'eof' here because it's used virtually everywhere, it's
-- already thoroughly tested.
prop_token :: String -> Property
prop_token s = checkParser p r s
where p = token nextPos testChar
nextPos pos x _ = updatePosChar pos x
testChar x = if isLetter x
then Right x
else Left . pure . Unexpected . showToken $ x
h = head s
r | null s = posErr 0 s [uneEof]
| isLetter h && length s == 1 = Right (head s)
| isLetter h && length s > 1 = posErr 1 s [uneCh (s !! 1), exEof]
| otherwise = posErr 0 s [uneCh h]
prop_tokens :: String -> String -> Property
prop_tokens a = checkString p a (==) (showToken a)
where p = tokens updatePosString (==) a
-- Parser state combinators
prop_state_pos :: SourcePos -> Property
prop_state_pos pos = p /=\ pos
where p = setPosition pos >> getPosition
prop_state_input :: String -> Property
prop_state_input s = p /=\ s
where p = do
st0 <- getInput
guard (null st0)
setInput s
result <- string s
st1 <- getInput
guard (null st1)
return result
prop_state :: State String Integer -> State String Integer -> Property
prop_state s1 s2 = runParser p 0 "" "" === Right (f s2 s1)
where f (State s1' pos u1) (State s2' _ u2) =
State (max s1' s2' ) pos (u1 + u2)
p = do
st <- getParserState
guard (st == State "" (initialPos "") 0)
setParserState s1
updateParserState (f s2)
getParserState
-- User state combinators
prop_user_state :: Integer -> Integer -> Property
prop_user_state n m = runParser p 0 "" "" === Right (n + m)
where p = setState n >> modifyState (+ m) >> getState
prop_user_backtrack :: Integer -> Integer -> Property
prop_user_backtrack n m = runParser p 0 "" "" === Right n
where p = setState n >> lookAhead (setState m >> eof) >> getState
| omefire/megaparsec | tests/Prim.hs | bsd-2-clause | 14,783 | 0 | 14 | 4,140 | 5,020 | 2,505 | 2,515 | 280 | 2 |
{- re-export many things to simplify external use -}
module Database.Drasil (
-- ChunkDB
ChunkDB(defTable), RefbyMap, TraceMap, UMap, asOrderedList, cdb, collectUnits
, conceptMap, conceptinsLookup, conceptinsTable, dataDefnTable
, datadefnLookup, defResolve, gendefLookup, gendefTable, generateRefbyMap
, idMap, insmodelLookup, insmodelTable, labelledconLookup, labelledcontentTable
, refbyLookup, refbyTable, sectionLookup, sectionTable, symbResolve
, termResolve, termTable, theoryModelLookup, theoryModelTable, traceLookup
, traceMap, traceTable
-- ChunkDB.GetChunk
, ccss, ccss', combine, getIdeaDict, vars
-- SystemInformation
, Block(Parallel), RefMap, ReferenceDB, SystemInformation(..), citeDB, rdb, simpleMap
) where
import Database.Drasil.ChunkDB
import Database.Drasil.ChunkDB.GetChunk
import Database.Drasil.SystemInformation
| JacquesCarette/literate-scientific-software | code/drasil-database/Database/Drasil.hs | bsd-2-clause | 864 | 0 | 5 | 104 | 178 | 121 | 57 | 17 | 0 |
{-
The haskell base for the Agda implementation of a Relational Algebra
-- Toon Nolten
-}
module RelationalBase (connectSqlite3, read) where
import Database.HDBC.sqlite3 (connectSqlite3)
import Database.HDBC
type DatabasePath = String
type TableName = String
-- Read : all s -> Handle s -> RA s
read :: IConnection conn => conn -> TableName
-> IO [[SqlValue]]
read conn table =
quickQuery' conn
"SELECT * FROM "++ table
| toonn/haskell-casestt | relational_base.hs | bsd-2-clause | 455 | 1 | 10 | 102 | 92 | 52 | 40 | -1 | -1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.ATI.ElementArray
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.ATI.ElementArray (
-- * Extension Support
glGetATIElementArray,
gl_ATI_element_array,
-- * Enums
pattern GL_ELEMENT_ARRAY_ATI,
pattern GL_ELEMENT_ARRAY_POINTER_ATI,
pattern GL_ELEMENT_ARRAY_TYPE_ATI,
-- * Functions
glDrawElementArrayATI,
glDrawRangeElementArrayATI,
glElementPointerATI
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/ATI/ElementArray.hs | bsd-3-clause | 848 | 0 | 5 | 115 | 73 | 53 | 20 | 13 | 0 |
{-# LANGUAGE CPP, MagicHash, ScopedTypeVariables #-}
{-# OPTIONS_GHC -optc-DNON_POSIX_SOURCE #-}
--
-- (c) The University of Glasgow 2002-2006
--
-- | ByteCodeItbls: Generate infotables for interpreter-made bytecodes
module ByteCodeItbls ( mkITbls ) where
#include "HsVersions.h"
import ByteCodeTypes
import GHCi
import DynFlags
import HscTypes
import Name ( Name, getName )
import NameEnv
import DataCon ( DataCon, dataConRepArgTys, dataConIdentity )
import TyCon ( TyCon, tyConFamilySize, isDataTyCon, tyConDataCons )
import RepType ( typePrimRep, repTypeArgs )
import StgCmmLayout ( mkVirtHeapOffsets )
import Util
import Panic
{-
Manufacturing of info tables for DataCons
-}
-- Make info tables for the data decls in this module
mkITbls :: HscEnv -> [TyCon] -> IO ItblEnv
mkITbls hsc_env tcs =
foldr plusNameEnv emptyNameEnv <$>
mapM (mkITbl hsc_env) (filter isDataTyCon tcs)
where
mkITbl :: HscEnv -> TyCon -> IO ItblEnv
mkITbl hsc_env tc
| dcs `lengthIs` n -- paranoia; this is an assertion.
= make_constr_itbls hsc_env dcs
where
dcs = tyConDataCons tc
n = tyConFamilySize tc
mkITbl _ _ = panic "mkITbl"
mkItblEnv :: [(Name,ItblPtr)] -> ItblEnv
mkItblEnv pairs = mkNameEnv [(n, (n,p)) | (n,p) <- pairs]
-- Assumes constructors are numbered from zero, not one
make_constr_itbls :: HscEnv -> [DataCon] -> IO ItblEnv
make_constr_itbls hsc_env cons =
mkItblEnv <$> mapM (uncurry mk_itbl) (zip cons [0..])
where
dflags = hsc_dflags hsc_env
mk_itbl :: DataCon -> Int -> IO (Name,ItblPtr)
mk_itbl dcon conNo = do
let rep_args = [ (typePrimRep rep_arg,rep_arg)
| arg <- dataConRepArgTys dcon
, rep_arg <- repTypeArgs arg ]
(tot_wds, ptr_wds, _) =
mkVirtHeapOffsets dflags False{-not a THUNK-} rep_args
ptrs' = ptr_wds
nptrs' = tot_wds - ptr_wds
nptrs_really
| ptrs' + nptrs' >= mIN_PAYLOAD_SIZE dflags = nptrs'
| otherwise = mIN_PAYLOAD_SIZE dflags - ptrs'
descr = dataConIdentity dcon
r <- iservCmd hsc_env (MkConInfoTable ptrs' nptrs_really conNo descr)
return (getName dcon, ItblPtr r)
| sgillespie/ghc | compiler/ghci/ByteCodeItbls.hs | bsd-3-clause | 2,245 | 0 | 16 | 553 | 558 | 301 | 257 | 47 | 2 |
-- | Helpers for setting up a tls connection with @HsOpenSSL@ package,
-- for further customization, please refer to @HsOpenSSL@ package.
--
-- Note, functions in this module will throw error if can't load certificates or CA store.
--
module Data.OpenSSLSetting
( -- * choose a CAStore
TrustedCAStore(..)
-- * make TLS settings
, makeClientSSLContext
, makeClientSSLContext'
, makeServerSSLContext
, makeServerSSLContext'
) where
import qualified OpenSSL.X509.SystemStore as X509
import qualified OpenSSL.Session as SSL
import OpenSSL (withOpenSSL)
import Data.TLSSetting (TrustedCAStore(..), mozillaCAStorePath)
makeCAStore :: TrustedCAStore -> SSL.SSLContext -> IO ()
makeCAStore SystemCAStore ctx = X509.contextLoadSystemCerts ctx
makeCAStore MozillaCAStore ctx = SSL.contextSetCAFile ctx =<< mozillaCAStorePath
makeCAStore (CustomCAStore fp) ctx = SSL.contextSetCAFile ctx fp
-- | make a simple 'SSL.SSLContext' that will validate server and use tls connection
-- without providing client's own certificate. suitable for connecting server which don't
-- validate clients.
--
makeClientSSLContext :: TrustedCAStore -- ^ trusted certificates.
-> IO SSL.SSLContext
makeClientSSLContext tca = withOpenSSL $ do
let caStore = makeCAStore tca
ctx <- SSL.context
caStore ctx
SSL.contextSetDefaultCiphers ctx
SSL.contextSetVerificationMode ctx (SSL.VerifyPeer True True Nothing)
return ctx
-- | make a simple 'SSL.SSLContext' that will validate server and use tls connection
-- while providing client's own certificate. suitable for connecting server which
-- validate clients.
--
-- The chain certificate must be in PEM format and must be sorted starting with the subject's certificate
-- (actual client or server certificate), followed by intermediate CA certificates if applicable,
-- and ending at the highest level (root) CA.
--
makeClientSSLContext' :: FilePath -- ^ public certificate (X.509 format).
-> [FilePath] -- ^ chain certificate (X.509 format).
-> FilePath -- ^ private key associated.
-> TrustedCAStore -- ^ server will use these certificates to validate clients.
-> IO SSL.SSLContext
makeClientSSLContext' pub certs priv tca = withOpenSSL $ do
let caStore = makeCAStore tca
ctx <- SSL.context
caStore ctx
SSL.contextSetDefaultCiphers ctx
SSL.contextSetCertificateFile ctx pub
SSL.contextSetPrivateKeyFile ctx priv
mapM_ (SSL.contextSetCertificateChainFile ctx) certs
SSL.contextSetVerificationMode ctx (SSL.VerifyPeer True True Nothing)
return ctx
-- | make a simple 'SSL.SSLContext' for server without validating client's certificate.
--
makeServerSSLContext :: FilePath -- ^ public certificate (X.509 format).
-> [FilePath] -- ^ chain certificate (X.509 format).
-> FilePath -- ^ private key associated.
-> IO SSL.SSLContext
makeServerSSLContext pub certs priv = withOpenSSL $ do
ctx <- SSL.context
SSL.contextSetDefaultCiphers ctx
SSL.contextSetCertificateFile ctx pub
SSL.contextSetPrivateKeyFile ctx priv
mapM_ (SSL.contextSetCertificateChainFile ctx) certs
return ctx
-- | make a 'SSL.SSLConext' that also validating client's certificate.
--
-- This's an alias to 'makeClientSSLContext''.
--
makeServerSSLContext' :: FilePath -- ^ public certificate (X.509 format).
-> [FilePath] -- ^ chain certificates (X.509 format).
-> FilePath -- ^ private key associated.
-> TrustedCAStore -- ^ server will use these certificates to validate clients.
-> IO SSL.SSLContext
makeServerSSLContext' = makeClientSSLContext'
| didi-FP/tcp-streams | tcp-streams-openssl/Data/OpenSSLSetting.hs | bsd-3-clause | 3,932 | 0 | 11 | 955 | 552 | 288 | 264 | 56 | 1 |
-- | Basic operations on graphs.
--
module ETA.Utils.GraphOps (
addNode, delNode, getNode, lookupNode, modNode,
size,
union,
addConflict, delConflict, addConflicts,
addCoalesce, delCoalesce,
addExclusion, addExclusions,
addPreference,
coalesceNodes, coalesceGraph,
freezeNode, freezeOneInGraph, freezeAllInGraph,
scanGraph,
setColor,
validateGraph,
slurpNodeConflictCount
)
where
import ETA.Utils.GraphBase
import ETA.Utils.Outputable
import ETA.BasicTypes.Unique
import ETA.Utils.UniqSet
import ETA.Utils.UniqFM
import Data.List hiding (union)
import Data.Maybe
-- | Lookup a node from the graph.
lookupNode
:: Uniquable k
=> Graph k cls color
-> k -> Maybe (Node k cls color)
lookupNode graph k
= lookupUFM (graphMap graph) k
-- | Get a node from the graph, throwing an error if it's not there
getNode
:: Uniquable k
=> Graph k cls color
-> k -> Node k cls color
getNode graph k
= case lookupUFM (graphMap graph) k of
Just node -> node
Nothing -> panic "ColorOps.getNode: not found"
-- | Add a node to the graph, linking up its edges
addNode :: Uniquable k
=> k -> Node k cls color
-> Graph k cls color -> Graph k cls color
addNode k node graph
= let
-- add back conflict edges from other nodes to this one
map_conflict
= foldUniqSet
(adjustUFM_C (\n -> n { nodeConflicts = addOneToUniqSet (nodeConflicts n) k}))
(graphMap graph)
(nodeConflicts node)
-- add back coalesce edges from other nodes to this one
map_coalesce
= foldUniqSet
(adjustUFM_C (\n -> n { nodeCoalesce = addOneToUniqSet (nodeCoalesce n) k}))
map_conflict
(nodeCoalesce node)
in graph
{ graphMap = addToUFM map_coalesce k node}
-- | Delete a node and all its edges from the graph.
delNode :: (Uniquable k, Outputable k)
=> k -> Graph k cls color -> Maybe (Graph k cls color)
delNode k graph
| Just node <- lookupNode graph k
= let -- delete conflict edges from other nodes to this one.
graph1 = foldl' (\g k1 -> let Just g' = delConflict k1 k g in g') graph
$ uniqSetToList (nodeConflicts node)
-- delete coalesce edge from other nodes to this one.
graph2 = foldl' (\g k1 -> let Just g' = delCoalesce k1 k g in g') graph1
$ uniqSetToList (nodeCoalesce node)
-- delete the node
graph3 = graphMapModify (\fm -> delFromUFM fm k) graph2
in Just graph3
| otherwise
= Nothing
-- | Modify a node in the graph.
-- returns Nothing if the node isn't present.
--
modNode :: Uniquable k
=> (Node k cls color -> Node k cls color)
-> k -> Graph k cls color -> Maybe (Graph k cls color)
modNode f k graph
= case lookupNode graph k of
Just Node{}
-> Just
$ graphMapModify
(\fm -> let Just node = lookupUFM fm k
node' = f node
in addToUFM fm k node')
graph
Nothing -> Nothing
-- | Get the size of the graph, O(n)
size :: Uniquable k
=> Graph k cls color -> Int
size graph
= sizeUFM $ graphMap graph
-- | Union two graphs together.
union :: Uniquable k
=> Graph k cls color -> Graph k cls color -> Graph k cls color
union graph1 graph2
= Graph
{ graphMap = plusUFM (graphMap graph1) (graphMap graph2) }
-- | Add a conflict between nodes to the graph, creating the nodes required.
-- Conflicts are virtual regs which need to be colored differently.
addConflict
:: Uniquable k
=> (k, cls) -> (k, cls)
-> Graph k cls color -> Graph k cls color
addConflict (u1, c1) (u2, c2)
= let addNeighbor u c u'
= adjustWithDefaultUFM
(\node -> node { nodeConflicts = addOneToUniqSet (nodeConflicts node) u' })
(newNode u c) { nodeConflicts = unitUniqSet u' }
u
in graphMapModify
( addNeighbor u1 c1 u2
. addNeighbor u2 c2 u1)
-- | Delete a conflict edge. k1 -> k2
-- returns Nothing if the node isn't in the graph
delConflict
:: Uniquable k
=> k -> k
-> Graph k cls color -> Maybe (Graph k cls color)
delConflict k1 k2
= modNode
(\node -> node { nodeConflicts = delOneFromUniqSet (nodeConflicts node) k2 })
k1
-- | Add some conflicts to the graph, creating nodes if required.
-- All the nodes in the set are taken to conflict with each other.
addConflicts
:: Uniquable k
=> UniqSet k -> (k -> cls)
-> Graph k cls color -> Graph k cls color
addConflicts conflicts getClass
-- just a single node, but no conflicts, create the node anyway.
| (u : []) <- uniqSetToList conflicts
= graphMapModify
$ adjustWithDefaultUFM
id
(newNode u (getClass u))
u
| otherwise
= graphMapModify
$ (\fm -> foldl' (\g u -> addConflictSet1 u getClass conflicts g) fm
$ uniqSetToList conflicts)
addConflictSet1 :: Uniquable k
=> k -> (k -> cls) -> UniqSet k
-> UniqFM (Node k cls color)
-> UniqFM (Node k cls color)
addConflictSet1 u getClass set
= case delOneFromUniqSet set u of
set' -> adjustWithDefaultUFM
(\node -> node { nodeConflicts = unionUniqSets set' (nodeConflicts node) } )
(newNode u (getClass u)) { nodeConflicts = set' }
u
-- | Add an exclusion to the graph, creating nodes if required.
-- These are extra colors that the node cannot use.
addExclusion
:: (Uniquable k, Uniquable color)
=> k -> (k -> cls) -> color
-> Graph k cls color -> Graph k cls color
addExclusion u getClass color
= graphMapModify
$ adjustWithDefaultUFM
(\node -> node { nodeExclusions = addOneToUniqSet (nodeExclusions node) color })
(newNode u (getClass u)) { nodeExclusions = unitUniqSet color }
u
addExclusions
:: (Uniquable k, Uniquable color)
=> k -> (k -> cls) -> [color]
-> Graph k cls color -> Graph k cls color
addExclusions u getClass colors graph
= foldr (addExclusion u getClass) graph colors
-- | Add a coalescence edge to the graph, creating nodes if requried.
-- It is considered adventageous to assign the same color to nodes in a coalesence.
addCoalesce
:: Uniquable k
=> (k, cls) -> (k, cls)
-> Graph k cls color -> Graph k cls color
addCoalesce (u1, c1) (u2, c2)
= let addCoalesce u c u'
= adjustWithDefaultUFM
(\node -> node { nodeCoalesce = addOneToUniqSet (nodeCoalesce node) u' })
(newNode u c) { nodeCoalesce = unitUniqSet u' }
u
in graphMapModify
( addCoalesce u1 c1 u2
. addCoalesce u2 c2 u1)
-- | Delete a coalescence edge (k1 -> k2) from the graph.
delCoalesce
:: Uniquable k
=> k -> k
-> Graph k cls color -> Maybe (Graph k cls color)
delCoalesce k1 k2
= modNode (\node -> node { nodeCoalesce = delOneFromUniqSet (nodeCoalesce node) k2 })
k1
-- | Add a color preference to the graph, creating nodes if required.
-- The most recently added preference is the most prefered.
-- The algorithm tries to assign a node it's prefered color if possible.
--
addPreference
:: Uniquable k
=> (k, cls) -> color
-> Graph k cls color -> Graph k cls color
addPreference (u, c) color
= graphMapModify
$ adjustWithDefaultUFM
(\node -> node { nodePreference = color : (nodePreference node) })
(newNode u c) { nodePreference = [color] }
u
-- | Do agressive coalescing on this graph.
-- returns the new graph and the list of pairs of nodes that got coaleced together.
-- for each pair, the resulting node will have the least key and be second in the pair.
--
coalesceGraph
:: (Uniquable k, Ord k, Eq cls, Outputable k)
=> Bool -- ^ If True, coalesce nodes even if this might make the graph
-- less colorable (aggressive coalescing)
-> Triv k cls color
-> Graph k cls color
-> ( Graph k cls color
, [(k, k)]) -- pairs of nodes that were coalesced, in the order that the
-- coalescing was applied.
coalesceGraph aggressive triv graph
= coalesceGraph' aggressive triv graph []
coalesceGraph'
:: (Uniquable k, Ord k, Eq cls, Outputable k)
=> Bool
-> Triv k cls color
-> Graph k cls color
-> [(k, k)]
-> ( Graph k cls color
, [(k, k)])
coalesceGraph' aggressive triv graph kkPairsAcc
= let
-- find all the nodes that have coalescence edges
cNodes = filter (\node -> not $ isEmptyUniqSet (nodeCoalesce node))
$ eltsUFM $ graphMap graph
-- build a list of pairs of keys for node's we'll try and coalesce
-- every pair of nodes will appear twice in this list
-- ie [(k1, k2), (k2, k1) ... ]
-- This is ok, GrapOps.coalesceNodes handles this and it's convenient for
-- build a list of what nodes get coalesced together for later on.
--
cList = [ (nodeId node1, k2)
| node1 <- cNodes
, k2 <- uniqSetToList $ nodeCoalesce node1 ]
-- do the coalescing, returning the new graph and a list of pairs of keys
-- that got coalesced together.
(graph', mPairs)
= mapAccumL (coalesceNodes aggressive triv) graph cList
-- keep running until there are no more coalesces can be found
in case catMaybes mPairs of
[] -> (graph', reverse kkPairsAcc)
pairs -> coalesceGraph' aggressive triv graph' (reverse pairs ++ kkPairsAcc)
-- | Coalesce this pair of nodes unconditionally \/ agressively.
-- The resulting node is the one with the least key.
--
-- returns: Just the pair of keys if the nodes were coalesced
-- the second element of the pair being the least one
--
-- Nothing if either of the nodes weren't in the graph
coalesceNodes
:: (Uniquable k, Ord k, Eq cls, Outputable k)
=> Bool -- ^ If True, coalesce nodes even if this might make the graph
-- less colorable (aggressive coalescing)
-> Triv k cls color
-> Graph k cls color
-> (k, k) -- ^ keys of the nodes to be coalesced
-> (Graph k cls color, Maybe (k, k))
coalesceNodes aggressive triv graph (k1, k2)
| (kMin, kMax) <- if k1 < k2
then (k1, k2)
else (k2, k1)
-- the nodes being coalesced must be in the graph
, Just nMin <- lookupNode graph kMin
, Just nMax <- lookupNode graph kMax
-- can't coalesce conflicting modes
, not $ elementOfUniqSet kMin (nodeConflicts nMax)
, not $ elementOfUniqSet kMax (nodeConflicts nMin)
-- can't coalesce the same node
, nodeId nMin /= nodeId nMax
= coalesceNodes_merge aggressive triv graph kMin kMax nMin nMax
-- don't do the coalescing after all
| otherwise
= (graph, Nothing)
coalesceNodes_merge
:: (Uniquable k, Ord k, Eq cls, Outputable k)
=> Bool
-> Triv k cls color
-> Graph k cls color
-> k -> k
-> Node k cls color
-> Node k cls color
-> (Graph k cls color, Maybe (k, k))
coalesceNodes_merge aggressive triv graph kMin kMax nMin nMax
-- sanity checks
| nodeClass nMin /= nodeClass nMax
= error "GraphOps.coalesceNodes: can't coalesce nodes of different classes."
| not (isNothing (nodeColor nMin) && isNothing (nodeColor nMax))
= error "GraphOps.coalesceNodes: can't coalesce colored nodes."
---
| otherwise
= let
-- the new node gets all the edges from its two components
node =
Node { nodeId = kMin
, nodeClass = nodeClass nMin
, nodeColor = Nothing
-- nodes don't conflict with themselves..
, nodeConflicts
= (unionUniqSets (nodeConflicts nMin) (nodeConflicts nMax))
`delOneFromUniqSet` kMin
`delOneFromUniqSet` kMax
, nodeExclusions = unionUniqSets (nodeExclusions nMin) (nodeExclusions nMax)
, nodePreference = nodePreference nMin ++ nodePreference nMax
-- nodes don't coalesce with themselves..
, nodeCoalesce
= (unionUniqSets (nodeCoalesce nMin) (nodeCoalesce nMax))
`delOneFromUniqSet` kMin
`delOneFromUniqSet` kMax
}
in coalesceNodes_check aggressive triv graph kMin kMax node
coalesceNodes_check
:: (Uniquable k, Ord k, Eq cls, Outputable k)
=> Bool
-> Triv k cls color
-> Graph k cls color
-> k -> k
-> Node k cls color
-> (Graph k cls color, Maybe (k, k))
coalesceNodes_check aggressive triv graph kMin kMax node
-- Unless we're coalescing aggressively, if the result node is not trivially
-- colorable then don't do the coalescing.
| not aggressive
, not $ triv (nodeClass node) (nodeConflicts node) (nodeExclusions node)
= (graph, Nothing)
| otherwise
= let -- delete the old nodes from the graph and add the new one
Just graph1 = delNode kMax graph
Just graph2 = delNode kMin graph1
graph3 = addNode kMin node graph2
in (graph3, Just (kMax, kMin))
-- | Freeze a node
-- This is for the iterative coalescer.
-- By freezing a node we give up on ever coalescing it.
-- Move all its coalesce edges into the frozen set - and update
-- back edges from other nodes.
--
freezeNode
:: Uniquable k
=> k -- ^ key of the node to freeze
-> Graph k cls color -- ^ the graph
-> Graph k cls color -- ^ graph with that node frozen
freezeNode k
= graphMapModify
$ \fm ->
let -- freeze all the edges in the node to be frozen
Just node = lookupUFM fm k
node' = node
{ nodeCoalesce = emptyUniqSet }
fm1 = addToUFM fm k node'
-- update back edges pointing to this node
freezeEdge k node
= if elementOfUniqSet k (nodeCoalesce node)
then node { nodeCoalesce = delOneFromUniqSet (nodeCoalesce node) k }
else node -- panic "GraphOps.freezeNode: edge to freeze wasn't in the coalesce set"
-- If the edge isn't actually in the coelesce set then just ignore it.
fm2 = foldUniqSet (adjustUFM_C (freezeEdge k)) fm1
$ nodeCoalesce node
in fm2
-- | Freeze one node in the graph
-- This if for the iterative coalescer.
-- Look for a move related node of low degree and freeze it.
--
-- We probably don't need to scan the whole graph looking for the node of absolute
-- lowest degree. Just sample the first few and choose the one with the lowest
-- degree out of those. Also, we don't make any distinction between conflicts of different
-- classes.. this is just a heuristic, after all.
--
-- IDEA: freezing a node might free it up for Simplify.. would be good to check for triv
-- right here, and add it to a worklist if known triv\/non-move nodes.
--
freezeOneInGraph
:: (Uniquable k, Outputable k)
=> Graph k cls color
-> ( Graph k cls color -- the new graph
, Bool ) -- whether we found a node to freeze
freezeOneInGraph graph
= let compareNodeDegree n1 n2
= compare (sizeUniqSet $ nodeConflicts n1) (sizeUniqSet $ nodeConflicts n2)
candidates
= sortBy compareNodeDegree
$ take 5 -- 5 isn't special, it's just a small number.
$ scanGraph (\node -> not $ isEmptyUniqSet (nodeCoalesce node)) graph
in case candidates of
-- there wasn't anything available to freeze
[] -> (graph, False)
-- we found something to freeze
(n : _)
-> ( freezeNode (nodeId n) graph
, True)
-- | Freeze all the nodes in the graph
-- for debugging the iterative allocator.
--
freezeAllInGraph
:: (Uniquable k, Outputable k)
=> Graph k cls color
-> Graph k cls color
freezeAllInGraph graph
= foldr freezeNode graph
$ map nodeId
$ eltsUFM $ graphMap graph
-- | Find all the nodes in the graph that meet some criteria
--
scanGraph
:: Uniquable k
=> (Node k cls color -> Bool)
-> Graph k cls color
-> [Node k cls color]
scanGraph match graph
= filter match $ eltsUFM $ graphMap graph
-- | validate the internal structure of a graph
-- all its edges should point to valid nodes
-- If they don't then throw an error
--
validateGraph
:: (Uniquable k, Outputable k, Eq color)
=> SDoc -- ^ extra debugging info to display on error
-> Bool -- ^ whether this graph is supposed to be colored.
-> Graph k cls color -- ^ graph to validate
-> Graph k cls color -- ^ validated graph
validateGraph doc isColored graph
-- Check that all edges point to valid nodes.
| edges <- unionManyUniqSets
( (map nodeConflicts $ eltsUFM $ graphMap graph)
++ (map nodeCoalesce $ eltsUFM $ graphMap graph))
, nodes <- mkUniqSet $ map nodeId $ eltsUFM $ graphMap graph
, badEdges <- minusUniqSet edges nodes
, not $ isEmptyUniqSet badEdges
= pprPanic "GraphOps.validateGraph"
( text "Graph has edges that point to non-existant nodes"
$$ text " bad edges: " <> vcat (map ppr $ uniqSetToList badEdges)
$$ doc )
-- Check that no conflicting nodes have the same color
| badNodes <- filter (not . (checkNode graph))
$ eltsUFM $ graphMap graph
, not $ null badNodes
= pprPanic "GraphOps.validateGraph"
( text "Node has same color as one of it's conflicts"
$$ text " bad nodes: " <> hcat (map (ppr . nodeId) badNodes)
$$ doc)
-- If this is supposed to be a colored graph,
-- check that all nodes have a color.
| isColored
, badNodes <- filter (\n -> isNothing $ nodeColor n)
$ eltsUFM $ graphMap graph
, not $ null badNodes
= pprPanic "GraphOps.validateGraph"
( text "Supposably colored graph has uncolored nodes."
$$ text " uncolored nodes: " <> hcat (map (ppr . nodeId) badNodes)
$$ doc )
-- graph looks ok
| otherwise
= graph
-- | If this node is colored, check that all the nodes which
-- conflict with it have different colors.
checkNode
:: (Uniquable k, Eq color)
=> Graph k cls color
-> Node k cls color
-> Bool -- ^ True if this node is ok
checkNode graph node
| Just color <- nodeColor node
, Just neighbors <- sequence $ map (lookupNode graph)
$ uniqSetToList $ nodeConflicts node
, neighbourColors <- catMaybes $ map nodeColor neighbors
, elem color neighbourColors
= False
| otherwise
= True
-- | Slurp out a map of how many nodes had a certain number of conflict neighbours
slurpNodeConflictCount
:: Uniquable k
=> Graph k cls color
-> UniqFM (Int, Int) -- ^ (conflict neighbours, num nodes with that many conflicts)
slurpNodeConflictCount graph
= addListToUFM_C
(\(c1, n1) (_, n2) -> (c1, n1 + n2))
emptyUFM
$ map (\node
-> let count = sizeUniqSet $ nodeConflicts node
in (count, (count, 1)))
$ eltsUFM
$ graphMap graph
-- | Set the color of a certain node
setColor
:: Uniquable k
=> k -> color
-> Graph k cls color -> Graph k cls color
setColor u color
= graphMapModify
$ adjustUFM_C
(\n -> n { nodeColor = Just color })
u
{-# INLINE adjustWithDefaultUFM #-}
adjustWithDefaultUFM
:: Uniquable k
=> (a -> a) -> a -> k
-> UniqFM a -> UniqFM a
adjustWithDefaultUFM f def k map
= addToUFM_C
(\old _ -> f old)
map
k def
-- Argument order different from UniqFM's adjustUFM
{-# INLINE adjustUFM_C #-}
adjustUFM_C
:: Uniquable k
=> (a -> a)
-> k -> UniqFM a -> UniqFM a
adjustUFM_C f k map
= case lookupUFM map k of
Nothing -> map
Just a -> addToUFM map k (f a)
| alexander-at-github/eta | compiler/ETA/Utils/GraphOps.hs | bsd-3-clause | 22,572 | 0 | 18 | 8,469 | 4,884 | 2,511 | 2,373 | 417 | 2 |
module Idris.ElabQuasiquote (extractUnquotes) where
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.TT
import Idris.AbsSyntax
extract1 :: Int -> (PTerm -> a) -> PTerm -> Elab' aux (a, [(Name, PTerm)])
extract1 n c tm = do (tm', ex) <- extractUnquotes n tm
return (c tm', ex)
extract2 :: Int -> (PTerm -> PTerm -> a) -> PTerm -> PTerm -> Elab' aux (a, [(Name, PTerm)])
extract2 n c a b = do (a', ex1) <- extractUnquotes n a
(b', ex2) <- extractUnquotes n b
return (c a' b', ex1 ++ ex2)
extractTUnquotes :: Int -> PTactic -> Elab' aux (PTactic, [(Name, PTerm)])
extractTUnquotes n (Rewrite t) = extract1 n Rewrite t
extractTUnquotes n (Induction t) = extract1 n Induction t
extractTUnquotes n (LetTac name t) = extract1 n (LetTac name) t
extractTUnquotes n (LetTacTy name t1 t2) = extract2 n (LetTacTy name) t1 t2
extractTUnquotes n (Exact tm) = extract1 n Exact tm
extractTUnquotes n (Try tac1 tac2)
= do (tac1', ex1) <- extractTUnquotes n tac1
(tac2', ex2) <- extractTUnquotes n tac2
return (Try tac1' tac2', ex1 ++ ex2)
extractTUnquotes n (TSeq tac1 tac2)
= do (tac1', ex1) <- extractTUnquotes n tac1
(tac2', ex2) <- extractTUnquotes n tac2
return (TSeq tac1' tac2', ex1 ++ ex2)
extractTUnquotes n (ApplyTactic t) = extract1 n ApplyTactic t
extractTUnquotes n (ByReflection t) = extract1 n ByReflection t
extractTUnquotes n (Reflect t) = extract1 n Reflect t
extractTUnquotes n (GoalType s tac)
= do (tac', ex) <- extractTUnquotes n tac
return (GoalType s tac', ex)
extractTUnquotes n (TCheck t) = extract1 n TCheck t
extractTUnquotes n (TEval t) = extract1 n TEval t
extractTUnquotes n (Claim name t) = extract1 n (Claim name) t
extractTUnquotes n tac = return (tac, []) -- the rest don't contain PTerms, or have been desugared away
extractPArgUnquotes :: Int -> PArg -> Elab' aux (PArg, [(Name, PTerm)])
extractPArgUnquotes d (PImp p m opts n t) =
do (t', ex) <- extractUnquotes d t
return (PImp p m opts n t', ex)
extractPArgUnquotes d (PExp p opts n t) =
do (t', ex) <- extractUnquotes d t
return (PExp p opts n t', ex)
extractPArgUnquotes d (PConstraint p opts n t) =
do (t', ex) <- extractUnquotes d t
return (PConstraint p opts n t', ex)
extractPArgUnquotes d (PTacImplicit p opts n scpt t) =
do (scpt', ex1) <- extractUnquotes d scpt
(t', ex2) <- extractUnquotes d t
return (PTacImplicit p opts n scpt' t', ex1 ++ ex2)
extractDoUnquotes :: Int -> PDo -> Elab' aux (PDo, [(Name, PTerm)])
extractDoUnquotes d (DoExp fc tm)
= do (tm', ex) <- extractUnquotes d tm
return (DoExp fc tm', ex)
extractDoUnquotes d (DoBind fc n nfc tm)
= do (tm', ex) <- extractUnquotes d tm
return (DoBind fc n nfc tm', ex)
extractDoUnquotes d (DoBindP fc t t' alts)
= fail "Pattern-matching binds cannot be quasiquoted"
extractDoUnquotes d (DoLet fc n nfc v b)
= do (v', ex1) <- extractUnquotes d v
(b', ex2) <- extractUnquotes d b
return (DoLet fc n nfc v' b', ex1 ++ ex2)
extractDoUnquotes d (DoLetP fc t t') = fail "Pattern-matching lets cannot be quasiquoted"
extractUnquotes :: Int -> PTerm -> Elab' aux (PTerm, [(Name, PTerm)])
extractUnquotes n (PLam fc name nfc ty body)
= do (ty', ex1) <- extractUnquotes n ty
(body', ex2) <- extractUnquotes n body
return (PLam fc name nfc ty' body', ex1 ++ ex2)
extractUnquotes n (PPi plicity name fc ty body)
= do (ty', ex1) <- extractUnquotes n ty
(body', ex2) <- extractUnquotes n body
return (PPi plicity name fc ty' body', ex1 ++ ex2)
extractUnquotes n (PLet fc name nfc ty val body)
= do (ty', ex1) <- extractUnquotes n ty
(val', ex2) <- extractUnquotes n val
(body', ex3) <- extractUnquotes n body
return (PLet fc name nfc ty' val' body', ex1 ++ ex2 ++ ex3)
extractUnquotes n (PTyped tm ty)
= do (tm', ex1) <- extractUnquotes n tm
(ty', ex2) <- extractUnquotes n ty
return (PTyped tm' ty', ex1 ++ ex2)
extractUnquotes n (PApp fc f args)
= do (f', ex1) <- extractUnquotes n f
args' <- mapM (extractPArgUnquotes n) args
let (args'', exs) = unzip args'
return (PApp fc f' args'', ex1 ++ concat exs)
extractUnquotes n (PAppBind fc f args)
= do (f', ex1) <- extractUnquotes n f
args' <- mapM (extractPArgUnquotes n) args
let (args'', exs) = unzip args'
return (PAppBind fc f' args'', ex1 ++ concat exs)
extractUnquotes n (PCase fc expr cases)
= do (expr', ex1) <- extractUnquotes n expr
let (pats, rhss) = unzip cases
(pats', exs1) <- fmap unzip $ mapM (extractUnquotes n) pats
(rhss', exs2) <- fmap unzip $ mapM (extractUnquotes n) rhss
return (PCase fc expr' (zip pats' rhss'), ex1 ++ concat exs1 ++ concat exs2)
extractUnquotes n (PIfThenElse fc c t f)
= do (c', ex1) <- extractUnquotes n c
(t', ex2) <- extractUnquotes n t
(f', ex3) <- extractUnquotes n f
return (PIfThenElse fc c' t' f', ex1 ++ ex2 ++ ex3)
extractUnquotes n (PRewrite fc x y z)
= do (x', ex1) <- extractUnquotes n x
(y', ex2) <- extractUnquotes n y
case z of
Just zz -> do (z', ex3) <- extractUnquotes n zz
return (PRewrite fc x' y' (Just z'), ex1 ++ ex2 ++ ex3)
Nothing -> return (PRewrite fc x' y' Nothing, ex1 ++ ex2)
extractUnquotes n (PPair fc info l r)
= do (l', ex1) <- extractUnquotes n l
(r', ex2) <- extractUnquotes n r
return (PPair fc info l' r', ex1 ++ ex2)
extractUnquotes n (PDPair fc info a b c)
= do (a', ex1) <- extractUnquotes n a
(b', ex2) <- extractUnquotes n b
(c', ex3) <- extractUnquotes n c
return (PDPair fc info a' b' c', ex1 ++ ex2 ++ ex3)
extractUnquotes n (PAlternative b alts)
= do alts' <- mapM (extractUnquotes n) alts
let (alts'', exs) = unzip alts'
return (PAlternative b alts'', concat exs)
extractUnquotes n (PHidden tm)
= do (tm', ex) <- extractUnquotes n tm
return (PHidden tm', ex)
extractUnquotes n (PGoal fc a name b)
= do (a', ex1) <- extractUnquotes n a
(b', ex2) <- extractUnquotes n b
return (PGoal fc a' name b', ex1 ++ ex2)
extractUnquotes n (PDoBlock steps)
= do steps' <- mapM (extractDoUnquotes n) steps
let (steps'', exs) = unzip steps'
return (PDoBlock steps'', concat exs)
extractUnquotes n (PIdiom fc tm)
= fmap (\(tm', ex) -> (PIdiom fc tm', ex)) $ extractUnquotes n tm
extractUnquotes n (PProof tacs)
= do (tacs', exs) <- fmap unzip $ mapM (extractTUnquotes n) tacs
return (PProof tacs', concat exs)
extractUnquotes n (PTactics tacs)
= do (tacs', exs) <- fmap unzip $ mapM (extractTUnquotes n) tacs
return (PTactics tacs', concat exs)
extractUnquotes n (PElabError err) = fail "Can't quasiquote an error"
extractUnquotes n (PCoerced tm)
= do (tm', ex) <- extractUnquotes n tm
return (PCoerced tm', ex)
extractUnquotes n (PDisamb ns tm)
= do (tm', ex) <- extractUnquotes n tm
return (PDisamb ns tm', ex)
extractUnquotes n (PUnifyLog tm)
= fmap (\(tm', ex) -> (PUnifyLog tm', ex)) $ extractUnquotes n tm
extractUnquotes n (PNoImplicits tm)
= fmap (\(tm', ex) -> (PNoImplicits tm', ex)) $ extractUnquotes n tm
extractUnquotes n (PQuasiquote tm goal)
= fmap (\(tm', ex) -> (PQuasiquote tm' goal, ex)) $ extractUnquotes (n+1) tm
extractUnquotes n (PUnquote tm)
| n == 0 = do n <- getNameFrom (sMN 0 "unquotation")
return (PRef (fileFC "(unquote)") n, [(n, tm)])
| otherwise = fmap (\(tm', ex) -> (PUnquote tm', ex)) $
extractUnquotes (n-1) tm
extractUnquotes n (PRunElab fc tm ns)
= fmap (\(tm', ex) -> (PRunElab fc tm' ns, ex)) $ extractUnquotes n tm
extractUnquotes n x = return (x, []) -- no subterms!
| bkoropoff/Idris-dev | src/Idris/ElabQuasiquote.hs | bsd-3-clause | 7,770 | 0 | 16 | 1,854 | 3,543 | 1,768 | 1,775 | 162 | 2 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
{-# OPTIONS_GHC -F -pgmF htfpp #-}
-- | test cards
module Web.MangoPay.CardsTest where
import Web.MangoPay
import Web.MangoPay.TestUtils
import Data.Default
import Data.Maybe (isJust, isNothing, fromJust)
import Test.Framework
import Test.HUnit (Assertion)
import qualified Data.Text as T
import qualified Control.Exception.Lifted as L
-- | test a card registration using euro currency
test_CardEUR :: Assertion
test_CardEUR = doTestCard "EUR"
-- | test a card registration using dollar currency
test_CardUSD :: Assertion
test_CardUSD = doTestCard "USD"
-- | perform the actual test of card registration in the provided currency
doTestCard :: T.Text->Assertion
doTestCard curr=L.handle (\(e::MpException)->assertFailure (show e)) $ do
usL<-testMP $ listUsers def (Just $ Pagination 1 1)
assertEqual 1 (length $ plData usL)
let uid=urId $ head $ plData usL
let cr1=mkCardRegistration uid curr
cr2<-testMP $ createCardRegistration cr1
assertBool (isJust $ crId cr2)
assertBool (isJust $ crCreationDate cr2)
assertBool (isJust $ crCardRegistrationURL cr2)
assertBool (isJust $ crAccessKey cr2)
assertBool (isJust $ crPreregistrationData cr2)
assertBool (isNothing $ crRegistrationData cr2)
assertBool (isNothing $ crCardId cr2)
cr3<-unsafeRegisterCard testCardInfo1 cr2
assertBool (isJust $ crRegistrationData cr3)
cr4<-testMP $ modifyCardRegistration cr3
assertBool (isJust $ crCardId cr4)
let cid=fromJust $ crCardId cr4
c<-testMP $ fetchCard cid
assertEqual cid $ cId c
assertBool $ not $ T.null $ cAlias c
assertBool $ not $ T.null $ cCardProvider c
assertEqual (ciExpire testCardInfo1) (cExpirationDate c)
--assertBool $ not $ T.null $ cExpirationDate c
assertEqual UNKNOWN $ cValidity c
assertBool $ cActive c
assertEqual uid $ cUserId c
assertEqual "CB_VISA_MASTERCARD" $ cCardType c
cs<-testMP $ getAll $ listCards uid def
assertBool $ not $ null cs
assertBool $ any (\ c1 -> cId c1 == cid) cs
| prowdsponsor/mangopay | mangopay/test/Web/MangoPay/CardsTest.hs | bsd-3-clause | 2,021 | 0 | 13 | 333 | 633 | 303 | 330 | 46 | 1 |
-- |
-- Module: Network.Protocool.ZigBee.ZNet25
-- Copyright: (c) 2012 David Joyner
-- License: BSD3
-- Maintainer: David Joyner <[email protected]>
-- Stability: experimental
-- Portability: portable
--
-- A protocol library enabling wireless communications via
-- XBee/XBee-PRO (ZibBee) ZNet 2.5 RF modules.
--
-- The library performs binary encoding/decoding, on-the-wire framing,
-- and error checking of control and data "API" frames. In other words
-- the library expects to communicate with an XBee modem that has AP
-- parameter = 2, indicating API frames with HDLC-like escape characters.
-- This mode provides for 8-bit transparent operation and is described in
-- Section 6 of <ftp://ftp1.digi.com/support/documentation/90000866_C.pdf>.
--
-- The library code is 100% pure; nothing here depends on the
-- IO monad and no particular modem device is assumed. The code
-- depends on the @cereal@ package for serialization but is otherwise
-- pretty standard. The test suite is based on @QuickCheck@ properties.
module Network.Protocol.ZigBee.ZNet25 (
module Network.Protocol.ZigBee.ZNet25.Encoder
, module Network.Protocol.ZigBee.ZNet25.Frame
) where
import Network.Protocol.ZigBee.ZNet25.Encoder
import Network.Protocol.ZigBee.ZNet25.Frame
| djoyner/zigbee-znet25 | src/Network/Protocol/ZigBee/ZNet25.hs | bsd-3-clause | 1,278 | 0 | 5 | 191 | 66 | 55 | 11 | 5 | 0 |
module Milib.Geometry
( cross
, dot
, norm2
, ccw
, convex_hull
) where
cross :: (Num a) => (a, a) -> (a,a) -> a
cross (x1,y1) (x2,y2) = x1 * y2 - y1 * x2
dot :: (Num a) => (a, a) -> (a,a) -> a
dot (x1,y1) (x2,y2) = x1 * x2 + y1 * y2
norm2 :: (Num a) => (a, a) -> a
norm2 (x1,y1) = x1 * x1 + y1 * y1
data Turn = CounterClockwise
| Clockwise
| Straight
| OverBackward
| Backward
ccw :: (Num a, Ord a) => (a, a) -> (a, a) -> (a, a) -> Turn
ccw (xa,ya) (xb,yb) (xc,yc)
| cross p q > 0 = CounterClockwise
| cross p q < 0 = Clockwise
| dot p q < 0 = OverBackward
| norm2 p < norm2 q = Straight
| otherwise = Backward
where
p = (xb - xa, yb - ya)
q = (xc - xa, yc - ya)
convex_hull :: (Num a, Ord a) => [(a, a)] -> [(a, a)]
convex_hull x = lower ++ upper
where
lower = tail $ convex_hull' x []
upper = tail $ convex_hull' (reverse x) []
convex_hull' [] ys = ys
convex_hull' (x:xs) [] = convex_hull' xs [x]
convex_hull' (x:xs) [y] = convex_hull' xs [x,y]
convex_hull' (x:xs) (y1:y2:ys) = case ccw y2 y1 x of
Clockwise -> convex_hull' (x:xs) (y2:ys)
CounterClockwise -> convex_hull' xs (x:y1:y2:ys)
Straight -> convex_hull' xs (x:y1:y2:ys)
otherwise -> [] -- err
-- vim: set expandtab:
| mkut/milib_haskell | src/Milib/Geometry.hs | bsd-3-clause | 1,333 | 0 | 12 | 405 | 717 | 390 | 327 | 38 | 4 |
-- We refer to otherwise unused modules in documentation.
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module System.Win32.SystemServices.Services.SERVICE_ACCEPT
( SERVICE_ACCEPT (..)
, pokeServiceAccept
, peekServiceAccept
) where
import Data.Bits
import Data.Maybe
import Text.Printf
import Import
-- Imported for haddocks
import qualified System.Win32.SystemServices.Services.SERVICE_CONTROL as C
-- | The control codes the service accepts and processes in its handler
-- function (See 'HandlerFunction'). By default, all services accept the
-- 'C.INTERROGATE' value. To accept the 'DEVICEEVENT' value, the service must
-- register to receive device events by using the
-- 'registerDeviceNotification' function.
data SERVICE_ACCEPT
-- | The service is a network component that can accept changes in its
-- binding without being stopped and restarted. This control code allows
-- the service to receive 'C.NETBINDADD', 'C.NETBINDREMOVE',
-- 'C.NETBINDENABLE', and 'C.NETBINDDISABLE' notifications.
= ACCEPT_NETBINDCHANGE
-- | The service can reread its startup parameters without being stopped
-- and restarted. This control code allows the service to receive
-- 'C.PARAMCHANGE' notifications.
| ACCEPT_PARAMCHANGE
-- | The service can be paused and continued. This control code allows the
-- service to receive 'C.PAUSE' and 'C.CONTINUE' notifications.
| ACCEPT_PAUSE_CONTINUE
-- | MSDN documentation says that this function is not supported on
-- Windows Server 2003 or Windows XP/2000. The support status on other
-- versions is unknown to me.
--
-- The service can perform preshutdown tasks. This control code enables
-- the service to receive 'C.PRESHUTDOWN' notifications.
-- Note that only the system can send it.
| ACCEPT_PRESHUTDOWN
-- | The service is notified when system shutdown occurs. This control
-- code allows the service to receive 'C.SHUTDOWN' notifications. Note
-- that only the system can send it.
| ACCEPT_SHUTDOWN
-- | The service can be stopped. This control code allows the service to
-- receive 'C.STOP' notifications.
| ACCEPT_STOP
deriving (Show)
peekServiceAccept :: Ptr DWORD -> IO [SERVICE_ACCEPT]
peekServiceAccept ptr = unflag <$> peek ptr
pokeServiceAccept :: Ptr DWORD -> [SERVICE_ACCEPT] -> IO ()
pokeServiceAccept ptr sas = poke ptr . flag $ sas
toDWORD :: SERVICE_ACCEPT -> DWORD
toDWORD ACCEPT_NETBINDCHANGE = 0x00000010
toDWORD ACCEPT_PARAMCHANGE = 0x00000008
toDWORD ACCEPT_PAUSE_CONTINUE = 0x00000002
toDWORD ACCEPT_PRESHUTDOWN = 0x00000100
toDWORD ACCEPT_SHUTDOWN = 0x00000004
toDWORD ACCEPT_STOP = 0x00000001
fromDWORD :: DWORD -> Either String SERVICE_ACCEPT
fromDWORD 0x00000010 = Right ACCEPT_NETBINDCHANGE
fromDWORD 0x00000008 = Right ACCEPT_PARAMCHANGE
fromDWORD 0x00000002 = Right ACCEPT_PAUSE_CONTINUE
fromDWORD 0x00000100 = Right ACCEPT_PRESHUTDOWN
fromDWORD 0x00000004 = Right ACCEPT_SHUTDOWN
fromDWORD 0x00000001 = Right ACCEPT_STOP
fromDWORD 0x00000020 = unsupported "SERVICE_ACCEPT_HARDWAREPROFILECHANGE"
fromDWORD 0x00000040 = unsupported "SERVICE_ACCEPT_POWEREVENT"
fromDWORD 0x00000080 = unsupported "SERVICE_ACCEPT_SESSIONCHANGE"
fromDWORD 0x00000200 = unsupported "SERVICE_ACCEPT_TIMECHANGE"
fromDWORD 0x00000400 = unsupported "SERVICE_ACCEPT_TRIGGEREVENT"
fromDWORD 0x00000800 = unsupported "SERVICE_ACCEPT_USERMODEREBOOT"
fromDWORD x = Left $ "The " ++ printf "%x" x ++ " control code is undocumented."
unsupported :: String -> Either String a
unsupported name = Left $ "The " ++ name ++ " control code is unsupported by this binding."
-- | This function takes a 'DWORD' and assumes it is a flagfield. Each bit
-- is masked off and converted into a value. Any failures are silently
-- discarded.
unflag :: DWORD -> [SERVICE_ACCEPT]
unflag f = mapMaybe (hush . fromDWORD . (.&. f)) masks
where
masks = take 32 $ iterate (`shiftL` 1) 1
flag :: [SERVICE_ACCEPT] -> DWORD
flag fs = foldl (\flag' f -> flag' .|. toDWORD f) 0 fs
| nick0x01/Win32-services | src/System/Win32/SystemServices/Services/SERVICE_ACCEPT.hs | bsd-3-clause | 4,099 | 0 | 9 | 748 | 563 | 309 | 254 | 50 | 1 |
module Main where
import Queue as Q
import Covariance
import Data.Time.Clock
import Data.IORef
import Control.Monad
import System.Random
import qualified Data.Vector.Unboxed as U
import qualified Data.List as L
import Control.DeepSeq
import Control.Exception
import Statistics.Sample as S
summer :: Queue Int
summer = queue (+)
cov :: Queue Covariance
cov = queue updateCovariance
pushTrace :: Show a => a -> IORef (Queue a) -> IO ()
pushTrace a xs = do
modifyIORef xs (push a)
xs' <- readIORef xs
print $ runFold xs'
popTrace :: Show a => IORef (Queue a) -> IO ()
popTrace xs = do
modifyIORef xs (\xs -> maybe xs snd $ pop xs)
xs' <- readIORef xs
print $ runFold xs'
showCov = show . getCovariance
-- Generates a lazy list of windows
windows :: Int -> U.Vector Double -> U.Vector Double -> [U.Vector (Double, Double)]
windows k xs ys = let
zs = pair xs ys
in flip L.unfoldr zs $ \vec -> if U.length vec >= k
then Just (U.take k vec, U.drop 1 vec)
else Nothing
queues :: Int -> U.Vector Double -> U.Vector Double -> [Queue Covariance]
queues k xs ys = let
zs = uncurry mkCovariance <$> U.toList (pair xs ys)
pushPop x d = case Q.pop (Q.push x d) of
Nothing -> error "Impossible situation: push resulted in empty queue"
Just d' -> snd d'
go d x = if Q.size d == k
then pushPop x d
else push x d
in dropWhile (\d -> Q.size d < k) $ L.scanl' go cov zs
time :: String -> IO a -> IO a
time msg action = do
t0 <- getCurrentTime
r <- action
t1 <- getCurrentTime
putStrLn $ msg ++ " took " ++ show (t1 `diffUTCTime` t0)
return r
covariance' :: Queue Covariance -> Double
covariance' = maybe (0/0) getCovariance . runFold
-- simple test suite
main :: IO ()
main = do
-- Find the rolling covariance of two randomly generated, million element vectors
xs <- evaluate . force =<< U.fromList <$> replicateM 10000 randomIO
ys <- evaluate . force =<< U.fromList <$> replicateM 10000 randomIO
time "Naive 10" $ print $ sum $ covariance <$> windows 10 xs ys
time "Window 10" $ print $ sum $ covariance' <$> queues 10 xs ys
time "Naive 100" $ print $ sum $ covariance <$> windows 100 xs ys
time "Window 100" $ print $ sum $ covariance' <$> queues 100 xs ys
time "Naive 1000" $ print $ sum $ covariance <$> windows 1000 xs ys
time "Window 1000" $ print $ sum $ covariance' <$> queues 1000 xs ys
| charles-cooper/hroll | src/Main.hs | bsd-3-clause | 2,378 | 0 | 13 | 536 | 955 | 467 | 488 | 62 | 3 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1998
\section[DataCon]{@DataCon@: Data Constructors}
-}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module DataCon (
-- * Main data types
DataCon, DataConRep(..),
HsBang(..), HsSrcBang, HsImplBang,
StrictnessMark(..),
ConTag,
-- ** Type construction
mkDataCon, fIRST_TAG,
buildAlgTyCon,
-- ** Type deconstruction
dataConRepType, dataConSig, dataConFullSig,
dataConName, dataConIdentity, dataConTag, dataConTyCon,
dataConOrigTyCon, dataConUserType,
dataConUnivTyVars, dataConExTyVars, dataConAllTyVars,
dataConEqSpec, eqSpecPreds, dataConTheta,
dataConStupidTheta,
dataConInstArgTys, dataConOrigArgTys, dataConOrigResTy,
dataConInstOrigArgTys, dataConRepArgTys,
dataConFieldLabels, dataConFieldType,
dataConSrcBangs,
dataConSourceArity, dataConRepArity, dataConRepRepArity,
dataConIsInfix,
dataConWorkId, dataConWrapId, dataConWrapId_maybe, dataConImplicitIds,
dataConRepStrictness, dataConImplBangs, dataConBoxer,
splitDataProductType_maybe,
-- ** Predicates on DataCons
isNullarySrcDataCon, isNullaryRepDataCon, isTupleDataCon, isUnboxedTupleCon,
isVanillaDataCon, classDataCon, dataConCannotMatch,
isBanged, isMarkedStrict, eqHsBang,
-- ** Promotion related functions
promoteKind, promoteDataCon, promoteDataCon_maybe
) where
#include "HsVersions.h"
import {-# SOURCE #-} MkId( DataConBoxer )
import Type
import TypeRep( Type(..) ) -- Used in promoteType
import PrelNames( liftedTypeKindTyConKey )
import ForeignCall( CType )
import Coercion
import Kind
import Unify
import TyCon
import Class
import Name
import Var
import Outputable
import Unique
import ListSetOps
import Util
import BasicTypes
import FastString
import Module
import VarEnv
import qualified Data.Data as Data
import qualified Data.Typeable
import Data.Maybe
import Data.Char
import Data.Word
{-
Data constructor representation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following Haskell data type declaration
data T = T !Int ![Int]
Using the strictness annotations, GHC will represent this as
data T = T Int# [Int]
That is, the Int has been unboxed. Furthermore, the Haskell source construction
T e1 e2
is translated to
case e1 of { I# x ->
case e2 of { r ->
T x r }}
That is, the first argument is unboxed, and the second is evaluated. Finally,
pattern matching is translated too:
case e of { T a b -> ... }
becomes
case e of { T a' b -> let a = I# a' in ... }
To keep ourselves sane, we name the different versions of the data constructor
differently, as follows.
Note [Data Constructor Naming]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Each data constructor C has two, and possibly up to four, Names associated with it:
OccName Name space Name of Notes
---------------------------------------------------------------------------
The "data con itself" C DataName DataCon In dom( GlobalRdrEnv )
The "worker data con" C VarName Id The worker
The "wrapper data con" $WC VarName Id The wrapper
The "newtype coercion" :CoT TcClsName TyCon
EVERY data constructor (incl for newtypes) has the former two (the
data con itself, and its worker. But only some data constructors have a
wrapper (see Note [The need for a wrapper]).
Each of these three has a distinct Unique. The "data con itself" name
appears in the output of the renamer, and names the Haskell-source
data constructor. The type checker translates it into either the wrapper Id
(if it exists) or worker Id (otherwise).
The data con has one or two Ids associated with it:
The "worker Id", is the actual data constructor.
* Every data constructor (newtype or data type) has a worker
* The worker is very like a primop, in that it has no binding.
* For a *data* type, the worker *is* the data constructor;
it has no unfolding
* For a *newtype*, the worker has a compulsory unfolding which
does a cast, e.g.
newtype T = MkT Int
The worker for MkT has unfolding
\\(x:Int). x `cast` sym CoT
Here CoT is the type constructor, witnessing the FC axiom
axiom CoT : T = Int
The "wrapper Id", \$WC, goes as follows
* Its type is exactly what it looks like in the source program.
* It is an ordinary function, and it gets a top-level binding
like any other function.
* The wrapper Id isn't generated for a data type if there is
nothing for the wrapper to do. That is, if its defn would be
\$wC = C
Note [The need for a wrapper]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Why might the wrapper have anything to do? Two reasons:
* Unboxing strict fields (with -funbox-strict-fields)
data T = MkT !(Int,Int)
\$wMkT :: (Int,Int) -> T
\$wMkT (x,y) = MkT x y
Notice that the worker has two fields where the wapper has
just one. That is, the worker has type
MkT :: Int -> Int -> T
* Equality constraints for GADTs
data T a where { MkT :: a -> T [a] }
The worker gets a type with explicit equality
constraints, thus:
MkT :: forall a b. (a=[b]) => b -> T a
The wrapper has the programmer-specified type:
\$wMkT :: a -> T [a]
\$wMkT a x = MkT [a] a [a] x
The third argument is a coerion
[a] :: [a]~[a]
INVARIANT: the dictionary constructor for a class
never has a wrapper.
A note about the stupid context
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Data types can have a context:
data (Eq a, Ord b) => T a b = T1 a b | T2 a
and that makes the constructors have a context too
(notice that T2's context is "thinned"):
T1 :: (Eq a, Ord b) => a -> b -> T a b
T2 :: (Eq a) => a -> T a b
Furthermore, this context pops up when pattern matching
(though GHC hasn't implemented this, but it is in H98, and
I've fixed GHC so that it now does):
f (T2 x) = x
gets inferred type
f :: Eq a => T a b -> a
I say the context is "stupid" because the dictionaries passed
are immediately discarded -- they do nothing and have no benefit.
It's a flaw in the language.
Up to now [March 2002] I have put this stupid context into the
type of the "wrapper" constructors functions, T1 and T2, but
that turned out to be jolly inconvenient for generics, and
record update, and other functions that build values of type T
(because they don't have suitable dictionaries available).
So now I've taken the stupid context out. I simply deal with
it separately in the type checker on occurrences of a
constructor, either in an expression or in a pattern.
[May 2003: actually I think this decision could evasily be
reversed now, and probably should be. Generics could be
disabled for types with a stupid context; record updates now
(H98) needs the context too; etc. It's an unforced change, so
I'm leaving it for now --- but it does seem odd that the
wrapper doesn't include the stupid context.]
[July 04] With the advent of generalised data types, it's less obvious
what the "stupid context" is. Consider
C :: forall a. Ord a => a -> a -> T (Foo a)
Does the C constructor in Core contain the Ord dictionary? Yes, it must:
f :: T b -> Ordering
f = /\b. \x:T b.
case x of
C a (d:Ord a) (p:a) (q:a) -> compare d p q
Note that (Foo a) might not be an instance of Ord.
************************************************************************
* *
\subsection{Data constructors}
* *
************************************************************************
-}
-- | A data constructor
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnOpen',
-- 'ApiAnnotation.AnnClose','ApiAnnotation.AnnComma'
-- For details on above see note [Api annotations] in ApiAnnotation
data DataCon
= MkData {
dcName :: Name, -- This is the name of the *source data con*
-- (see "Note [Data Constructor Naming]" above)
dcUnique :: Unique, -- Cached from Name
dcTag :: ConTag, -- ^ Tag, used for ordering 'DataCon's
-- Running example:
--
-- *** As declared by the user
-- data T a where
-- MkT :: forall x y. (x~y,Ord x) => x -> y -> T (x,y)
-- *** As represented internally
-- data T a where
-- MkT :: forall a. forall x y. (a~(x,y),x~y,Ord x) => x -> y -> T a
--
-- The next six fields express the type of the constructor, in pieces
-- e.g.
--
-- dcUnivTyVars = [a]
-- dcExTyVars = [x,y]
-- dcEqSpec = [a~(x,y)]
-- dcOtherTheta = [x~y, Ord x]
-- dcOrigArgTys = [x,y]
-- dcRepTyCon = T
dcVanilla :: Bool, -- True <=> This is a vanilla Haskell 98 data constructor
-- Its type is of form
-- forall a1..an . t1 -> ... tm -> T a1..an
-- No existentials, no coercions, nothing.
-- That is: dcExTyVars = dcEqSpec = dcOtherTheta = []
-- NB 1: newtypes always have a vanilla data con
-- NB 2: a vanilla constructor can still be declared in GADT-style
-- syntax, provided its type looks like the above.
-- The declaration format is held in the TyCon (algTcGadtSyntax)
dcUnivTyVars :: [TyVar], -- Universally-quantified type vars [a,b,c]
-- INVARIANT: length matches arity of the dcRepTyCon
--- result type of (rep) data con is exactly (T a b c)
dcExTyVars :: [TyVar], -- Existentially-quantified type vars
-- In general, the dcUnivTyVars are NOT NECESSARILY THE SAME AS THE TYVARS
-- FOR THE PARENT TyCon. With GADTs the data con might not even have
-- the same number of type variables.
-- [This is a change (Oct05): previously, vanilla datacons guaranteed to
-- have the same type variables as their parent TyCon, but that seems ugly.]
-- INVARIANT: the UnivTyVars and ExTyVars all have distinct OccNames
-- Reason: less confusing, and easier to generate IfaceSyn
dcEqSpec :: [(TyVar,Type)], -- Equalities derived from the result type,
-- _as written by the programmer_
-- This field allows us to move conveniently between the two ways
-- of representing a GADT constructor's type:
-- MkT :: forall a b. (a ~ [b]) => b -> T a
-- MkT :: forall b. b -> T [b]
-- Each equality is of the form (a ~ ty), where 'a' is one of
-- the universally quantified type variables
-- The next two fields give the type context of the data constructor
-- (aside from the GADT constraints,
-- which are given by the dcExpSpec)
-- In GADT form, this is *exactly* what the programmer writes, even if
-- the context constrains only universally quantified variables
-- MkT :: forall a b. (a ~ b, Ord b) => a -> T a b
dcOtherTheta :: ThetaType, -- The other constraints in the data con's type
-- other than those in the dcEqSpec
dcStupidTheta :: ThetaType, -- The context of the data type declaration
-- data Eq a => T a = ...
-- or, rather, a "thinned" version thereof
-- "Thinned", because the Report says
-- to eliminate any constraints that don't mention
-- tyvars free in the arg types for this constructor
--
-- INVARIANT: the free tyvars of dcStupidTheta are a subset of dcUnivTyVars
-- Reason: dcStupidTeta is gotten by thinning the stupid theta from the tycon
--
-- "Stupid", because the dictionaries aren't used for anything.
-- Indeed, [as of March 02] they are no longer in the type of
-- the wrapper Id, because that makes it harder to use the wrap-id
-- to rebuild values after record selection or in generics.
dcOrigArgTys :: [Type], -- Original argument types
-- (before unboxing and flattening of strict fields)
dcOrigResTy :: Type, -- Original result type, as seen by the user
-- NB: for a data instance, the original user result type may
-- differ from the DataCon's representation TyCon. Example
-- data instance T [a] where MkT :: a -> T [a]
-- The OrigResTy is T [a], but the dcRepTyCon might be :T123
-- Now the strictness annotations and field labels of the constructor
dcSrcBangs :: [HsBang],
-- See Note [Bangs on data constructor arguments]
-- For DataCons defined in this module:
-- the [HsSrcBang] as written by the programmer.
-- For DataCons imported from an interface file:
-- the [HsImplBang] determined when compiling the
-- defining module
--
-- Matches 1-1 with dcOrigArgTys
-- Hence length = dataConSourceArity dataCon
dcFields :: [FieldLabel],
-- Field labels for this constructor, in the
-- same order as the dcOrigArgTys;
-- length = 0 (if not a record) or dataConSourceArity.
-- The curried worker function that corresponds to the constructor:
-- It doesn't have an unfolding; the code generator saturates these Ids
-- and allocates a real constructor when it finds one.
dcWorkId :: Id,
-- Constructor representation
dcRep :: DataConRep,
-- Cached
dcRepArity :: Arity, -- == length dataConRepArgTys
dcSourceArity :: Arity, -- == length dcOrigArgTys
-- Result type of constructor is T t1..tn
dcRepTyCon :: TyCon, -- Result tycon, T
dcRepType :: Type, -- Type of the constructor
-- forall a x y. (a~(x,y), x~y, Ord x) =>
-- x -> y -> T a
-- (this is *not* of the constructor wrapper Id:
-- see Note [Data con representation] below)
-- Notice that the existential type parameters come *second*.
-- Reason: in a case expression we may find:
-- case (e :: T t) of
-- MkT x y co1 co2 (d:Ord x) (v:r) (w:F s) -> ...
-- It's convenient to apply the rep-type of MkT to 't', to get
-- forall x y. (t~(x,y), x~y, Ord x) => x -> y -> T t
-- and use that to check the pattern. Mind you, this is really only
-- used in CoreLint.
dcInfix :: Bool, -- True <=> declared infix
-- Used for Template Haskell and 'deriving' only
-- The actual fixity is stored elsewhere
dcPromoted :: Maybe TyCon -- The promoted TyCon if this DataCon is promotable
-- See Note [Promoted data constructors] in TyCon
}
deriving Data.Typeable.Typeable
data DataConRep
= NoDataConRep -- No wrapper
| DCR { dcr_wrap_id :: Id -- Takes src args, unboxes/flattens,
-- and constructs the representation
, dcr_boxer :: DataConBoxer
, dcr_arg_tys :: [Type] -- Final, representation argument types,
-- after unboxing and flattening,
-- and *including* all evidence args
, dcr_stricts :: [StrictnessMark] -- 1-1 with dcr_arg_tys
-- See also Note [Data-con worker strictness] in MkId.hs
, dcr_bangs :: [HsImplBang] -- The actual decisions made (including failures)
-- about the original arguments; 1-1 with orig_arg_tys
-- See Note [Bangs on data constructor arguments]
}
-- Algebraic data types always have a worker, and
-- may or may not have a wrapper, depending on whether
-- the wrapper does anything.
--
-- Data types have a worker with no unfolding
-- Newtypes just have a worker, which has a compulsory unfolding (just a cast)
-- _Neither_ the worker _nor_ the wrapper take the dcStupidTheta dicts as arguments
-- The wrapper (if it exists) takes dcOrigArgTys as its arguments
-- The worker takes dataConRepArgTys as its arguments
-- If the worker is absent, dataConRepArgTys is the same as dcOrigArgTys
-- The 'NoDataConRep' case is important
-- Not only is this efficient,
-- but it also ensures that the wrapper is replaced
-- by the worker (because it *is* the worker)
-- even when there are no args. E.g. in
-- f (:) x
-- the (:) *is* the worker.
-- This is really important in rule matching,
-- (We could match on the wrappers,
-- but that makes it less likely that rules will match
-- when we bring bits of unfoldings together.)
-------------------------
-- HsBang describes the strictness/unpack status of one
-- of the original data constructor arguments (i.e. *not*
-- of the representation data constructor which may have
-- more arguments after the originals have been unpacked)
-- See Note [Bangs on data constructor arguments]
data HsBang
= HsNoBang -- Equivalent to (HsSrcBang Nothing False)
| HsSrcBang -- What the user wrote in the source code
(Maybe SourceText) -- Note [Pragma source text] in BasicTypes
(Maybe Bool) -- Just True {-# UNPACK #-}
-- Just False {-# NOUNPACK #-}
-- Nothing no pragma
Bool -- True <=> '!' specified
-- (HsSrcBang (Just True) False) makes no sense
-- We emit a warning (in checkValidDataCon) and treat it
-- just like (HsSrcBang Nothing False)
-- Definite implementation commitments, generated by the compiler
-- after consulting HsSrcBang (if any), flags, etc
| HsUnpack -- Definite commitment: this field is strict and unboxed
(Maybe Coercion) -- co :: arg-ty ~ product-ty
| HsStrict -- Definite commitment: this field is strict but not unboxed
deriving (Data.Data, Data.Typeable)
-- Two type-insecure, but useful, synonyms
type HsSrcBang = HsBang -- What the user wrote; hence always HsNoBang or HsSrcBang
type HsImplBang = HsBang -- A HsBang implementation decision,
-- as determined by the compiler
-- Never HsSrcBang
-------------------------
-- StrictnessMark is internal only, used to indicate strictness
-- of the DataCon *worker* fields
data StrictnessMark = MarkedStrict | NotMarkedStrict
{- Note [Bangs on data constructor arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T = MkT !Int {-# UNPACK #-} !Int Bool
When compiling the module, GHC will decide how to represent
MkT, depending on the optimisation level, and settings of
flags like -funbox-small-strict-fields.
Terminology:
* HsSrcBang: What the user wrote
Constructors: HsNoBang, HsUserBang
* HsImplBang: What GHC decided
Constructors: HsNoBang, HsStrict, HsUnpack
* If T was defined in this module, MkT's dcSrcBangs field
records the [HsSrcBang] of what the user wrote; in the example
[ HsSrcBang Nothing True
, HsSrcBang (Just True) True
, HsNoBang]
* However, if T was defined in an imported module, MkT's dcSrcBangs
field gives the [HsImplBang] recording the decisions of the
defining module. The importing module must follow those decisions,
regardless of the flag settings in the importing module.
* The dcr_bangs field of the dcRep field records the [HsImplBang]
If T was defined in this module, Without -O the dcr_bangs might be
[HsStrict, HsStrict, HsNoBang]
With -O it might be
[HsStrict, HsUnpack, HsNoBang]
With -funbox-small-strict-fields it might be
[HsUnpack, HsUnpack, HsNoBang]
Note [Data con representation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The dcRepType field contains the type of the representation of a contructor
This may differ from the type of the contructor *Id* (built
by MkId.mkDataConId) for two reasons:
a) the constructor Id may be overloaded, but the dictionary isn't stored
e.g. data Eq a => T a = MkT a a
b) the constructor may store an unboxed version of a strict field.
Here's an example illustrating both:
data Ord a => T a = MkT Int! a
Here
T :: Ord a => Int -> a -> T a
but the rep type is
Trep :: Int# -> a -> T a
Actually, the unboxed part isn't implemented yet!
************************************************************************
* *
\subsection{Instances}
* *
************************************************************************
-}
instance Eq DataCon where
a == b = getUnique a == getUnique b
a /= b = getUnique a /= getUnique b
instance Ord DataCon where
a <= b = getUnique a <= getUnique b
a < b = getUnique a < getUnique b
a >= b = getUnique a >= getUnique b
a > b = getUnique a > getUnique b
compare a b = getUnique a `compare` getUnique b
instance Uniquable DataCon where
getUnique = dcUnique
instance NamedThing DataCon where
getName = dcName
instance Outputable DataCon where
ppr con = ppr (dataConName con)
instance OutputableBndr DataCon where
pprInfixOcc con = pprInfixName (dataConName con)
pprPrefixOcc con = pprPrefixName (dataConName con)
instance Data.Data DataCon where
-- don't traverse?
toConstr _ = abstractConstr "DataCon"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "DataCon"
instance Outputable HsBang where
ppr HsNoBang = empty
ppr (HsSrcBang _ prag bang) = pp_unpk prag <+> ppWhen bang (char '!')
ppr (HsUnpack Nothing) = ptext (sLit "Unpk")
ppr (HsUnpack (Just co)) = ptext (sLit "Unpk") <> parens (ppr co)
ppr HsStrict = ptext (sLit "SrictNotUnpacked")
pp_unpk :: Maybe Bool -> SDoc
pp_unpk Nothing = empty
pp_unpk (Just True) = ptext (sLit "{-# UNPACK #-}")
pp_unpk (Just False) = ptext (sLit "{-# NOUNPACK #-}")
instance Outputable StrictnessMark where
ppr MarkedStrict = ptext (sLit "!")
ppr NotMarkedStrict = empty
eqHsBang :: HsBang -> HsBang -> Bool
eqHsBang HsNoBang HsNoBang = True
eqHsBang HsStrict HsStrict = True
eqHsBang (HsSrcBang _ u1 b1) (HsSrcBang _ u2 b2) = u1==u2 && b1==b2
eqHsBang (HsUnpack Nothing) (HsUnpack Nothing) = True
eqHsBang (HsUnpack (Just c1)) (HsUnpack (Just c2)) = eqType (coercionType c1) (coercionType c2)
eqHsBang _ _ = False
isBanged :: HsBang -> Bool
isBanged HsNoBang = False
isBanged (HsSrcBang _ _ bang) = bang
isBanged (HsUnpack {}) = True
isBanged (HsStrict {}) = True
isMarkedStrict :: StrictnessMark -> Bool
isMarkedStrict NotMarkedStrict = False
isMarkedStrict _ = True -- All others are strict
{-
************************************************************************
* *
\subsection{Construction}
* *
************************************************************************
-}
-- | Build a new data constructor
mkDataCon :: Name
-> Bool -- ^ Is the constructor declared infix?
-> [HsBang] -- ^ Strictness/unpack annotations, from user;
-- or, for imported DataCons, from the interface file
-> [FieldLabel] -- ^ Field labels for the constructor, if it is a record,
-- otherwise empty
-> [TyVar] -- ^ Universally quantified type variables
-> [TyVar] -- ^ Existentially quantified type variables
-> [(TyVar,Type)] -- ^ GADT equalities
-> ThetaType -- ^ Theta-type occuring before the arguments proper
-> [Type] -- ^ Original argument types
-> Type -- ^ Original result type
-> TyCon -- ^ Representation type constructor
-> ThetaType -- ^ The "stupid theta", context of the data declaration
-- e.g. @data Eq a => T a ...@
-> Id -- ^ Worker Id
-> DataConRep -- ^ Representation
-> DataCon
-- Can get the tag from the TyCon
mkDataCon name declared_infix
arg_stricts -- Must match orig_arg_tys 1-1
fields
univ_tvs ex_tvs
eq_spec theta
orig_arg_tys orig_res_ty rep_tycon
stupid_theta work_id rep
-- Warning: mkDataCon is not a good place to check invariants.
-- If the programmer writes the wrong result type in the decl, thus:
-- data T a where { MkT :: S }
-- then it's possible that the univ_tvs may hit an assertion failure
-- if you pull on univ_tvs. This case is checked by checkValidDataCon,
-- so the error is detected properly... it's just that asaertions here
-- are a little dodgy.
= con
where
is_vanilla = null ex_tvs && null eq_spec && null theta
con = MkData {dcName = name, dcUnique = nameUnique name,
dcVanilla = is_vanilla, dcInfix = declared_infix,
dcUnivTyVars = univ_tvs, dcExTyVars = ex_tvs,
dcEqSpec = eq_spec,
dcOtherTheta = theta,
dcStupidTheta = stupid_theta,
dcOrigArgTys = orig_arg_tys, dcOrigResTy = orig_res_ty,
dcRepTyCon = rep_tycon,
dcSrcBangs = arg_stricts,
dcFields = fields, dcTag = tag, dcRepType = rep_ty,
dcWorkId = work_id,
dcRep = rep,
dcSourceArity = length orig_arg_tys,
dcRepArity = length rep_arg_tys,
dcPromoted = mb_promoted }
-- The 'arg_stricts' passed to mkDataCon are simply those for the
-- source-language arguments. We add extra ones for the
-- dictionary arguments right here.
tag = assoc "mkDataCon" (tyConDataCons rep_tycon `zip` [fIRST_TAG..]) con
rep_arg_tys = dataConRepArgTys con
rep_ty = mkForAllTys univ_tvs $ mkForAllTys ex_tvs $
mkFunTys rep_arg_tys $
mkTyConApp rep_tycon (mkTyVarTys univ_tvs)
mb_promoted -- See Note [Promoted data constructors] in TyCon
| isJust (promotableTyCon_maybe rep_tycon)
-- The TyCon is promotable only if all its datacons
-- are, so the promoteType for prom_kind should succeed
= Just (mkPromotedDataCon con name (getUnique name) prom_kind roles)
| otherwise
= Nothing
prom_kind = promoteType (dataConUserType con)
roles = map (const Nominal) (univ_tvs ++ ex_tvs) ++
map (const Representational) orig_arg_tys
eqSpecPreds :: [(TyVar,Type)] -> ThetaType
eqSpecPreds spec = [ mkEqPred (mkTyVarTy tv) ty | (tv,ty) <- spec ]
{-
Note [Unpack equality predicates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have a GADT with a contructor C :: (a~[b]) => b -> T a
we definitely want that equality predicate *unboxed* so that it
takes no space at all. This is easily done: just give it
an UNPACK pragma. The rest of the unpack/repack code does the
heavy lifting. This one line makes every GADT take a word less
space for each equality predicate, so it's pretty important!
-}
-- | The 'Name' of the 'DataCon', giving it a unique, rooted identification
dataConName :: DataCon -> Name
dataConName = dcName
-- | The tag used for ordering 'DataCon's
dataConTag :: DataCon -> ConTag
dataConTag = dcTag
-- | The type constructor that we are building via this data constructor
dataConTyCon :: DataCon -> TyCon
dataConTyCon = dcRepTyCon
-- | The original type constructor used in the definition of this data
-- constructor. In case of a data family instance, that will be the family
-- type constructor.
dataConOrigTyCon :: DataCon -> TyCon
dataConOrigTyCon dc
| Just (tc, _) <- tyConFamInst_maybe (dcRepTyCon dc) = tc
| otherwise = dcRepTyCon dc
-- | The representation type of the data constructor, i.e. the sort
-- type that will represent values of this type at runtime
dataConRepType :: DataCon -> Type
dataConRepType = dcRepType
-- | Should the 'DataCon' be presented infix?
dataConIsInfix :: DataCon -> Bool
dataConIsInfix = dcInfix
-- | The universally-quantified type variables of the constructor
dataConUnivTyVars :: DataCon -> [TyVar]
dataConUnivTyVars = dcUnivTyVars
-- | The existentially-quantified type variables of the constructor
dataConExTyVars :: DataCon -> [TyVar]
dataConExTyVars = dcExTyVars
-- | Both the universal and existentiatial type variables of the constructor
dataConAllTyVars :: DataCon -> [TyVar]
dataConAllTyVars (MkData { dcUnivTyVars = univ_tvs, dcExTyVars = ex_tvs })
= univ_tvs ++ ex_tvs
-- | Equalities derived from the result type of the data constructor, as written
-- by the programmer in any GADT declaration
dataConEqSpec :: DataCon -> [(TyVar,Type)]
dataConEqSpec = dcEqSpec
-- | The *full* constraints on the constructor type
dataConTheta :: DataCon -> ThetaType
dataConTheta (MkData { dcEqSpec = eq_spec, dcOtherTheta = theta })
= eqSpecPreds eq_spec ++ theta
-- | Get the Id of the 'DataCon' worker: a function that is the "actual"
-- constructor and has no top level binding in the program. The type may
-- be different from the obvious one written in the source program. Panics
-- if there is no such 'Id' for this 'DataCon'
dataConWorkId :: DataCon -> Id
dataConWorkId dc = dcWorkId dc
-- | Get the Id of the 'DataCon' wrapper: a function that wraps the "actual"
-- constructor so it has the type visible in the source program: c.f. 'dataConWorkId'.
-- Returns Nothing if there is no wrapper, which occurs for an algebraic data constructor
-- and also for a newtype (whose constructor is inlined compulsorily)
dataConWrapId_maybe :: DataCon -> Maybe Id
dataConWrapId_maybe dc = case dcRep dc of
NoDataConRep -> Nothing
DCR { dcr_wrap_id = wrap_id } -> Just wrap_id
-- | Returns an Id which looks like the Haskell-source constructor by using
-- the wrapper if it exists (see 'dataConWrapId_maybe') and failing over to
-- the worker (see 'dataConWorkId')
dataConWrapId :: DataCon -> Id
dataConWrapId dc = case dcRep dc of
NoDataConRep-> dcWorkId dc -- worker=wrapper
DCR { dcr_wrap_id = wrap_id } -> wrap_id
-- | Find all the 'Id's implicitly brought into scope by the data constructor. Currently,
-- the union of the 'dataConWorkId' and the 'dataConWrapId'
dataConImplicitIds :: DataCon -> [Id]
dataConImplicitIds (MkData { dcWorkId = work, dcRep = rep})
= case rep of
NoDataConRep -> [work]
DCR { dcr_wrap_id = wrap } -> [wrap,work]
-- | The labels for the fields of this particular 'DataCon'
dataConFieldLabels :: DataCon -> [FieldLabel]
dataConFieldLabels = dcFields
-- | Extract the type for any given labelled field of the 'DataCon'
dataConFieldType :: DataCon -> FieldLabel -> Type
dataConFieldType con label
= case lookup label (dcFields con `zip` dcOrigArgTys con) of
Just ty -> ty
Nothing -> pprPanic "dataConFieldType" (ppr con <+> ppr label)
-- | The strictness markings written by the porgrammer.
-- The list is in one-to-one correspondence with the arity of the 'DataCon'
dataConSrcBangs :: DataCon -> [HsSrcBang]
dataConSrcBangs = dcSrcBangs
-- | Source-level arity of the data constructor
dataConSourceArity :: DataCon -> Arity
dataConSourceArity (MkData { dcSourceArity = arity }) = arity
-- | Gives the number of actual fields in the /representation/ of the
-- data constructor. This may be more than appear in the source code;
-- the extra ones are the existentially quantified dictionaries
dataConRepArity :: DataCon -> Arity
dataConRepArity (MkData { dcRepArity = arity }) = arity
-- | The number of fields in the /representation/ of the constructor
-- AFTER taking into account the unpacking of any unboxed tuple fields
dataConRepRepArity :: DataCon -> RepArity
dataConRepRepArity dc = typeRepArity (dataConRepArity dc) (dataConRepType dc)
-- | Return whether there are any argument types for this 'DataCon's original source type
isNullarySrcDataCon :: DataCon -> Bool
isNullarySrcDataCon dc = null (dcOrigArgTys dc)
-- | Return whether there are any argument types for this 'DataCon's runtime representation type
isNullaryRepDataCon :: DataCon -> Bool
isNullaryRepDataCon dc = dataConRepArity dc == 0
dataConRepStrictness :: DataCon -> [StrictnessMark]
-- ^ Give the demands on the arguments of a
-- Core constructor application (Con dc args)
dataConRepStrictness dc = case dcRep dc of
NoDataConRep -> [NotMarkedStrict | _ <- dataConRepArgTys dc]
DCR { dcr_stricts = strs } -> strs
dataConImplBangs :: DataCon -> [HsImplBang]
-- The implementation decisions about the strictness/unpack of each
-- source program argument to the data constructor
dataConImplBangs dc
= case dcRep dc of
NoDataConRep -> replicate (dcSourceArity dc) HsNoBang
DCR { dcr_bangs = bangs } -> bangs
dataConBoxer :: DataCon -> Maybe DataConBoxer
dataConBoxer (MkData { dcRep = DCR { dcr_boxer = boxer } }) = Just boxer
dataConBoxer _ = Nothing
-- | The \"signature\" of the 'DataCon' returns, in order:
--
-- 1) The result of 'dataConAllTyVars',
--
-- 2) All the 'ThetaType's relating to the 'DataCon' (coercion, dictionary, implicit
-- parameter - whatever)
--
-- 3) The type arguments to the constructor
--
-- 4) The /original/ result type of the 'DataCon'
dataConSig :: DataCon -> ([TyVar], ThetaType, [Type], Type)
dataConSig (MkData {dcUnivTyVars = univ_tvs, dcExTyVars = ex_tvs,
dcEqSpec = eq_spec, dcOtherTheta = theta,
dcOrigArgTys = arg_tys, dcOrigResTy = res_ty})
= (univ_tvs ++ ex_tvs, eqSpecPreds eq_spec ++ theta, arg_tys, res_ty)
-- | The \"full signature\" of the 'DataCon' returns, in order:
--
-- 1) The result of 'dataConUnivTyVars'
--
-- 2) The result of 'dataConExTyVars'
--
-- 3) The result of 'dataConEqSpec'
--
-- 4) The result of 'dataConDictTheta'
--
-- 5) The original argument types to the 'DataCon' (i.e. before
-- any change of the representation of the type)
--
-- 6) The original result type of the 'DataCon'
dataConFullSig :: DataCon
-> ([TyVar], [TyVar], [(TyVar,Type)], ThetaType, [Type], Type)
dataConFullSig (MkData {dcUnivTyVars = univ_tvs, dcExTyVars = ex_tvs,
dcEqSpec = eq_spec, dcOtherTheta = theta,
dcOrigArgTys = arg_tys, dcOrigResTy = res_ty})
= (univ_tvs, ex_tvs, eq_spec, theta, arg_tys, res_ty)
dataConOrigResTy :: DataCon -> Type
dataConOrigResTy dc = dcOrigResTy dc
-- | The \"stupid theta\" of the 'DataCon', such as @data Eq a@ in:
--
-- > data Eq a => T a = ...
dataConStupidTheta :: DataCon -> ThetaType
dataConStupidTheta dc = dcStupidTheta dc
dataConUserType :: DataCon -> Type
-- ^ The user-declared type of the data constructor
-- in the nice-to-read form:
--
-- > T :: forall a b. a -> b -> T [a]
--
-- rather than:
--
-- > T :: forall a c. forall b. (c~[a]) => a -> b -> T c
--
-- NB: If the constructor is part of a data instance, the result type
-- mentions the family tycon, not the internal one.
dataConUserType (MkData { dcUnivTyVars = univ_tvs,
dcExTyVars = ex_tvs, dcEqSpec = eq_spec,
dcOtherTheta = theta, dcOrigArgTys = arg_tys,
dcOrigResTy = res_ty })
= mkForAllTys ((univ_tvs `minusList` map fst eq_spec) ++ ex_tvs) $
mkFunTys theta $
mkFunTys arg_tys $
res_ty
-- | Finds the instantiated types of the arguments required to construct a 'DataCon' representation
-- NB: these INCLUDE any dictionary args
-- but EXCLUDE the data-declaration context, which is discarded
-- It's all post-flattening etc; this is a representation type
dataConInstArgTys :: DataCon -- ^ A datacon with no existentials or equality constraints
-- However, it can have a dcTheta (notably it can be a
-- class dictionary, with superclasses)
-> [Type] -- ^ Instantiated at these types
-> [Type]
dataConInstArgTys dc@(MkData {dcUnivTyVars = univ_tvs, dcEqSpec = eq_spec,
dcExTyVars = ex_tvs}) inst_tys
= ASSERT2( length univ_tvs == length inst_tys
, ptext (sLit "dataConInstArgTys") <+> ppr dc $$ ppr univ_tvs $$ ppr inst_tys)
ASSERT2( null ex_tvs && null eq_spec, ppr dc )
map (substTyWith univ_tvs inst_tys) (dataConRepArgTys dc)
-- | Returns just the instantiated /value/ argument types of a 'DataCon',
-- (excluding dictionary args)
dataConInstOrigArgTys
:: DataCon -- Works for any DataCon
-> [Type] -- Includes existential tyvar args, but NOT
-- equality constraints or dicts
-> [Type]
-- For vanilla datacons, it's all quite straightforward
-- But for the call in MatchCon, we really do want just the value args
dataConInstOrigArgTys dc@(MkData {dcOrigArgTys = arg_tys,
dcUnivTyVars = univ_tvs,
dcExTyVars = ex_tvs}) inst_tys
= ASSERT2( length tyvars == length inst_tys
, ptext (sLit "dataConInstOrigArgTys") <+> ppr dc $$ ppr tyvars $$ ppr inst_tys )
map (substTyWith tyvars inst_tys) arg_tys
where
tyvars = univ_tvs ++ ex_tvs
-- | Returns the argument types of the wrapper, excluding all dictionary arguments
-- and without substituting for any type variables
dataConOrigArgTys :: DataCon -> [Type]
dataConOrigArgTys dc = dcOrigArgTys dc
-- | Returns the arg types of the worker, including *all* evidence, after any
-- flattening has been done and without substituting for any type variables
dataConRepArgTys :: DataCon -> [Type]
dataConRepArgTys (MkData { dcRep = rep
, dcEqSpec = eq_spec
, dcOtherTheta = theta
, dcOrigArgTys = orig_arg_tys })
= case rep of
NoDataConRep -> ASSERT( null eq_spec ) theta ++ orig_arg_tys
DCR { dcr_arg_tys = arg_tys } -> arg_tys
-- | The string @package:module.name@ identifying a constructor, which is attached
-- to its info table and used by the GHCi debugger and the heap profiler
dataConIdentity :: DataCon -> [Word8]
-- We want this string to be UTF-8, so we get the bytes directly from the FastStrings.
dataConIdentity dc = bytesFS (packageKeyFS (modulePackageKey mod)) ++
fromIntegral (ord ':') : bytesFS (moduleNameFS (moduleName mod)) ++
fromIntegral (ord '.') : bytesFS (occNameFS (nameOccName name))
where name = dataConName dc
mod = ASSERT( isExternalName name ) nameModule name
isTupleDataCon :: DataCon -> Bool
isTupleDataCon (MkData {dcRepTyCon = tc}) = isTupleTyCon tc
isUnboxedTupleCon :: DataCon -> Bool
isUnboxedTupleCon (MkData {dcRepTyCon = tc}) = isUnboxedTupleTyCon tc
-- | Vanilla 'DataCon's are those that are nice boring Haskell 98 constructors
isVanillaDataCon :: DataCon -> Bool
isVanillaDataCon dc = dcVanilla dc
classDataCon :: Class -> DataCon
classDataCon clas = case tyConDataCons (classTyCon clas) of
(dict_constr:no_more) -> ASSERT( null no_more ) dict_constr
[] -> panic "classDataCon"
dataConCannotMatch :: [Type] -> DataCon -> Bool
-- Returns True iff the data con *definitely cannot* match a
-- scrutinee of type (T tys)
-- where T is the dcRepTyCon for the data con
-- NB: look at *all* equality constraints, not only those
-- in dataConEqSpec; see Trac #5168
dataConCannotMatch tys con
| null theta = False -- Common
| all isTyVarTy tys = False -- Also common
| otherwise
= typesCantMatch [(Type.substTy subst ty1, Type.substTy subst ty2)
| (ty1, ty2) <- concatMap predEqs theta ]
where
dc_tvs = dataConUnivTyVars con
theta = dataConTheta con
subst = ASSERT2( length dc_tvs == length tys, ppr con $$ ppr dc_tvs $$ ppr tys )
zipTopTvSubst dc_tvs tys
-- TODO: could gather equalities from superclasses too
predEqs pred = case classifyPredType pred of
EqPred NomEq ty1 ty2 -> [(ty1, ty2)]
TuplePred ts -> concatMap predEqs ts
_ -> []
{-
************************************************************************
* *
Building an algebraic data type
* *
************************************************************************
buildAlgTyCon is here because it is called from TysWiredIn, which in turn
depends on DataCon, but not on BuildTyCl.
-}
buildAlgTyCon :: Name
-> [TyVar] -- ^ Kind variables and type variables
-> [Role]
-> Maybe CType
-> ThetaType -- ^ Stupid theta
-> AlgTyConRhs
-> RecFlag
-> Bool -- ^ True <=> this TyCon is promotable
-> Bool -- ^ True <=> was declared in GADT syntax
-> TyConParent
-> TyCon
buildAlgTyCon tc_name ktvs roles cType stupid_theta rhs
is_rec is_promotable gadt_syn parent
= tc
where
kind = mkPiKinds ktvs liftedTypeKind
-- tc and mb_promoted_tc are mutually recursive
tc = mkAlgTyCon tc_name kind ktvs roles cType stupid_theta
rhs parent is_rec gadt_syn
mb_promoted_tc
mb_promoted_tc
| is_promotable = Just (mkPromotedTyCon tc (promoteKind kind))
| otherwise = Nothing
{-
************************************************************************
* *
Promoting of data types to the kind level
* *
************************************************************************
These two 'promoted..' functions are here because
* They belong together
* 'promoteDataCon' depends on DataCon stuff
-}
promoteDataCon :: DataCon -> TyCon
promoteDataCon (MkData { dcPromoted = Just tc }) = tc
promoteDataCon dc = pprPanic "promoteDataCon" (ppr dc)
promoteDataCon_maybe :: DataCon -> Maybe TyCon
promoteDataCon_maybe (MkData { dcPromoted = mb_tc }) = mb_tc
{-
Note [Promoting a Type to a Kind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppsoe we have a data constructor D
D :: forall (a:*). Maybe a -> T a
We promote this to be a type constructor 'D:
'D :: forall (k:BOX). 'Maybe k -> 'T k
The transformation from type to kind is done by promoteType
* Convert forall (a:*) to forall (k:BOX), and substitute
* Ensure all foralls are at the top (no higher rank stuff)
* Ensure that all type constructors mentioned (Maybe and T
in the example) are promotable; that is, they have kind
* -> ... -> * -> *
-}
-- | Promotes a type to a kind.
-- Assumes the argument satisfies 'isPromotableType'
promoteType :: Type -> Kind
promoteType ty
= mkForAllTys kvs (go rho)
where
(tvs, rho) = splitForAllTys ty
kvs = [ mkKindVar (tyVarName tv) superKind | tv <- tvs ]
env = zipVarEnv tvs kvs
go (TyConApp tc tys) | Just prom_tc <- promotableTyCon_maybe tc
= mkTyConApp prom_tc (map go tys)
go (FunTy arg res) = mkArrowKind (go arg) (go res)
go (TyVarTy tv) | Just kv <- lookupVarEnv env tv
= TyVarTy kv
go _ = panic "promoteType" -- Argument did not satisfy isPromotableType
promoteKind :: Kind -> SuperKind
-- Promote the kind of a type constructor
-- from (* -> * -> *) to (BOX -> BOX -> BOX)
promoteKind (TyConApp tc [])
| tc `hasKey` liftedTypeKindTyConKey = superKind
promoteKind (FunTy arg res) = FunTy (promoteKind arg) (promoteKind res)
promoteKind k = pprPanic "promoteKind" (ppr k)
{-
************************************************************************
* *
\subsection{Splitting products}
* *
************************************************************************
-}
-- | Extract the type constructor, type argument, data constructor and it's
-- /representation/ argument types from a type if it is a product type.
--
-- Precisely, we return @Just@ for any type that is all of:
--
-- * Concrete (i.e. constructors visible)
--
-- * Single-constructor
--
-- * Not existentially quantified
--
-- Whether the type is a @data@ type or a @newtype@
splitDataProductType_maybe
:: Type -- ^ A product type, perhaps
-> Maybe (TyCon, -- The type constructor
[Type], -- Type args of the tycon
DataCon, -- The data constructor
[Type]) -- Its /representation/ arg types
-- Rejecting existentials is conservative. Maybe some things
-- could be made to work with them, but I'm not going to sweat
-- it through till someone finds it's important.
splitDataProductType_maybe ty
| Just (tycon, ty_args) <- splitTyConApp_maybe ty
, Just con <- isDataProductTyCon_maybe tycon
= Just (tycon, ty_args, con, dataConInstArgTys con ty_args)
| otherwise
= Nothing
| gcampax/ghc | compiler/basicTypes/DataCon.hs | bsd-3-clause | 47,245 | 0 | 18 | 13,922 | 5,061 | 2,885 | 2,176 | 412 | 4 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.MESA.YCbCrTexture
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.MESA.YCbCrTexture (
-- * Extension Support
glGetMESAYCbCrTexture,
gl_MESA_ycbcr_texture,
-- * Enums
pattern GL_UNSIGNED_SHORT_8_8_MESA,
pattern GL_UNSIGNED_SHORT_8_8_REV_MESA,
pattern GL_YCBCR_MESA
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/MESA/YCbCrTexture.hs | bsd-3-clause | 724 | 0 | 5 | 99 | 57 | 42 | 15 | 9 | 0 |
module Main where
import Cauterize.ErlangRef.Options
import Cauterize.ErlangRef.Generate
import System.Directory
import System.FilePath.Posix
import Data.Text (unpack)
import qualified Cauterize.Specification as S
main :: IO ()
main = runWithOptions caut2erlang
caut2erlang :: ErlangOpts -> IO ()
caut2erlang ErlangOpts { specFile = sf, outputDirectory = od } = createGuard od $ do
spec <- loadSpec sf
let baseName = unpack $ S.specName spec
generateDynamicFiles od baseName spec
where
loadSpec :: FilePath -> IO S.Specification
loadSpec p = do
s <- S.parseSpecificationFromFile p
case s of
Left e -> error $ show e
Right s' -> return s'
createGuard :: FilePath -> IO a -> IO a
createGuard out go = do
fe <- doesFileExist out
de <- doesDirectoryExist out
if fe
then error $ "Error: " ++ out ++ " is a file."
else if de
then go
else createDirectory out >> go
generateDynamicFiles :: FilePath -> String -> S.Specification -> IO ()
generateDynamicFiles path baseName spec = do
writeFile (path `combine` (baseName ++ ".erl")) (erlFileFromSpec spec)
| cauterize-tools/caut-erl-ref | app/Main.hs | bsd-3-clause | 1,132 | 0 | 13 | 250 | 369 | 186 | 183 | 32 | 3 |
import System.Environment (getArgs)
stripChars :: String -> String -> String
stripChars = filter . flip notElem
distance :: [Double] -> String
distance [x1, y1, x2, y2] = show . floor $ sqrt (x*x + y*y)
where x = x1 - x2
y = y1 - y2
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (distance . map read . words) . lines $ stripChars "()," input
| nikai3d/ce-challenges | easy/calculate_distance.hs | bsd-3-clause | 479 | 0 | 14 | 166 | 190 | 98 | 92 | 12 | 1 |
module Phil.AST.Expr where
import Data.List.NonEmpty (NonEmpty)
import Phil.AST.Binding
import Phil.Core.AST.Identifier
import Phil.Core.AST.Literal
import Phil.Core.AST.Pattern
import Phil.Core.AST.Types
import Phil.Typecheck.Unification
type Placeholder = (Ctor, NonEmpty Type)
subPlaceholders subs (DictPlaceholder (className, tyArgs))
= DictPlaceholder (className, substitute subs <$> tyArgs)
subPlaceholders _ expr = expr
data Expr
= Var (Either Ident Ctor)
| Lit Literal
| Prod Ctor [Expr]
| App Expr Expr
| Abs Ident Expr
| Let (Binding Expr) Expr
| Rec (Binding Expr) Expr
| Case Expr (NonEmpty (Pattern, Expr))
| DictPlaceholder Placeholder
| RecPlaceholder Ident
| DictVar Ident
| DictInst Ctor (NonEmpty Ctor)
| DictSel Ident Expr
| DictSuper Ctor Expr
| Error String
deriving (Eq, Show)
| LightAndLight/hindley-milner | src/Phil/AST/Expr.hs | bsd-3-clause | 838 | 0 | 9 | 146 | 270 | 158 | 112 | 29 | 1 |
{-# LANGUAGE ViewPatterns, TemplateHaskell, TypeFamilies, UndecidableInstances, Rank2Types, FlexibleContexts #-}
-- tournament between neural networks - each working with the same DBN.
module Main where
import Prelude hiding (putStr, putStrLn)
import GenericGameExperiments
import GenericGame
import AgentGeneric
import BreakthroughGame
import ConstraintsGA -- NN1 comes from here
import Matlab
import MinimalNN
import MinimalGA
import NeuralNets
import ThreadLocal
import Utils
import THUtils
import Control.Applicative
import Control.Concurrent
import Control.Concurrent.Async
import Control.Monad
import Data.Maybe
import Data.List.Split (splitEvery)
import Data.List (transpose)
import Data.Default
import Data.IORef
import Data.Timeout
import System.Random.MWC
import Text.Printf
import qualified Data.Map.Strict as Map
import Data.Map.Strict (Map)
type DBN = TNetwork
-- | this is our working element. first is DBN, then single layer of interpeters, finally summed by (virtual) neuron at the end with weights tied to every neuron.
-- the @g@ type param fixes game type, @ag@ type param fixes agent type.
data EvolNet ag g = EvolNet DBN [(NN1, Double)] deriving (Show,Eq,Ord,Read)
type MyEntType = EvolNet (AgentSimple TNetwork) Breakthrough
useCachedDBN = False
-- searchTimeout = 10 # Second
dbnGameCount = 100000
dbnGameProb = 0.1
dbnMatlabOpts = Just (def {dbnSizes = [25], numEpochs = 5, implementation = Matlab})
-- constraintSource = CS_Gameplay playerUseCoinstraints gameplayConstraints'0
-- playerUseCoinstraints = 100
-- evalCount = 12
mctsCount = 10 :: Int
-- constraintSetSize = 20
ecPopulationSizeP = 20
ecKillCountP = ecPopulationSizeP `div` 2
randomGamesCount :: (Num a) => a
randomGamesCount = 50
randomGamesProb :: Float
randomGamesProb = 0.01
initialNeuronCount = 50
nn1'mutation'range :: (Num a) => (a,a)
nn1'mutation'range = (0,1)
mctsLevel = 75
main :: IO ()
main = runThrLocMainIO $ do
printTL "exp10::start new run"
printTL "source code for this experiment: "
putStrLnTL showExperimentSource
printTL "DBN read/train"
fn <- getDBNCachedOrNew useCachedDBN dbnGameCount dbnGameProb dbnMatlabOpts
printTL ("DBN FN=",fn)
(dbn,dbnLayerSizes) <- parseNetFromFile <$> readFile fn
-- (dbn,dbnLayerSizes) <- return ((mkTNetwork [] []), [192])
let dbnLastLayerSize = last dbnLayerSizes
threads <- getNumCapabilities
gen <- mkGenIO
-- score reporting
let reportScores (esBest -> (score, ent)) = do
printTL ("exp10::evolutionNewStep::best.score",score)
printTL ("exp10::evolutionNewStep::best.ent")
printTL (ent :: MyEntType)
-- (AgentGameTree dbn _) <- mkAgentEvolNet ent
(AgentSimple dbn) <- mkAgentEvolNet ent
printTL ("exp10::evolutionNewStep::best.dbn::start")
printTL dbn
printTL ("exp10::evolutionNewStep::best.dbn::end")
benchmarkEntity ent
-- evolution params
let entParams = (dbn,initialNeuronCount)
workParams = (gen,dbnLastLayerSize)
scoreParams = ([],[]) -- scoreParams = ((readIORef agentsRef),(readIORef gamesRef))
callbackNewStep esParams = do
printTL ("exp10::callbackNewStep")
-- populateGamesRef
-- populateAgentsRef (map snd $ (esPopulation esParams ++ esArchive esParams))
reportScores esParams
return True
evOpts = def { ecPopulationSize = ecPopulationSizeP
, ecArchiveSize = 0
, ecKillCount = ecKillCountP
, ecCallbackNewStep = callbackNewStep
}
printTL "exp10::evolve"
results <- evolve evOpts entParams workParams scoreParams
mapM_ (\ (score,ent) -> do
printTL ("exp10::score",score)
printTL ("exp10::ent",(ent :: MyEntType))
) results
printTL "exp10::finished"
benchmarkEntity :: MyEntType -> ThrLocIO ()
benchmarkEntity ent@(EvolNet dbn lst) = do
-- threads <- getNumCapabilities
-- setNumCapabilities 1
printTL "exp10::benchmarkEntity"
printTL ent
myAgent <- mkAgentEvolNet (ent :: MyEntType)
agRnd <- mkAgent ()
agMCTS <- mkAgent mctsLevel
wcRND <- reportWinCount 100 myAgent (agRnd :: AgentRandom) P1
printTL ("exp10::win count random", wcRND)
wcMCTS <- reportWinCount 10 myAgent (agMCTS :: AgentMCTS) P1
printTL ("exp10::win count MCTS", wcMCTS)
-- setNumCapabilities threads
showExperimentSource :: String
showExperimentSource = $(quoteThisFile)
mkAgentEvolNet :: (EvolNet ag g) -> IO (AgentSimple TNetwork) -- (AgentGameTree TNetwork) --
mkAgentEvolNet (EvolNet dbn lst) = runThrLocMainIO $ do
mkAgent finalNetwork
-- mkAgent (finalNetwork, 2)
where
-- int. layer
neurons = map (getNeuron . fst) lst
(weights, biases) = unzip neurons
interpretingLayer = mkTNetwork [concat $ concat weights] [concat $ concat biases]
-- gath. neuron
gn'weigths = map snd lst
gatherNeuron = uncurry mkTNetwork . getNeuron . mkSingleNeuron $ gn'weigths
-- final network
finalNetwork = (appendNetwork dbn (appendNetwork interpretingLayer gatherNeuron))
calcEntitySize :: (EvolNet ag g) -> Int
calcEntitySize (EvolNet _ lst) = length lst
newtype Double' = Double' { fromD' :: Double } deriving (Ord,Eq)
instance Show Double' where
show d = printf "%2.5f" (fromD' d)
instance (Agent2 ag, ag ~ (AgentSimple TNetwork), g ~ Breakthrough) => MinimalGA (EvolNet ag g) where
type Score (EvolNet ag g) = (Double',Int)
-- | other networks to compare against, set of game states
type ScoreDataset (EvolNet ag g) = ([ag],[g])
-- | dbn, new neuron count
type EntityParams (EvolNet ag g) = (DBN, Int)
-- | DBN last layer size
type WorkParams (EvolNet ag g) = (GenIO,Int)
newEntity (gen,dbnSize) (dbn,initialLayerSize) = do
nns <- replicateM initialLayerSize (newEntity (gen,dbnSize,nn1'mutation'range) dbnSize)
weights <- replicateM initialLayerSize (uniformR (-1,1) gen)
return (EvolNet dbn (zip nns weights))
crossover (gen,_) (EvolNet dbn neurons1) (EvolNet _ neurons2) = do
let neurons = neurons1 ++ neurons2
choices <- replicateM (length neurons) (uniform gen)
let chosenNeurons = catMaybes (zipWith boolToMaybe choices neurons)
return (EvolNet dbn chosenNeurons)
mutation (gen,dbnSize) mutForce (EvolNet dbn neurons) = do
let params = (gen,dbnSize,nn1'mutation'range)
neurons' <- mapM (mutation params mutForce) (map fst neurons)
let weights = map snd neurons
single = mkSingleNeuron weights
single' <- mutation params mutForce single
let weights' = concat . concat . fst . getNeuron $ single'
neurons'new = zip neurons' weights'
return (EvolNet dbn neurons'new)
scoreEntity (agents, games) ent = do
error "exp10::scoreEntity is undefined for single entity"
-- caps <- getNumCapabilities
-- --
-- entAgents <- replicateM caps (mkAgentEvolNet ent)
-- let cb = GameDriverCallback (\ _ -> return ()) (\ _ _ -> return True)
-- work = zip entAgents (splitWorkCaps caps [ \ thisAg -> driverG2 g (thisAg :: AgentSimple TNetwork) otherAg cb | otherAg <- agents, g <- games ])
-- states <- concat <$> mapConcurrently (\ (agent, tasks) -> mapM (\ t -> t agent) tasks) work
-- -- game score
-- let wins = length $ filter (==(Just P1)) $ map winner states
-- totalGames = max (length states) 1 -- hack, but we dont want divide by zero.
-- score = Double' (1 - ((fromIntegral wins) / (fromIntegral totalGames)))
-- -- size score
-- let size = calcEntitySize ent
-- final = (score, size)
-- printTL ("exp10::final entity score",final)
-- return final
scorePopulation _dataset entities = do
-- prepare games
gamesRef <- newIORef []
sampleRandomGamesCount randomGamesCount randomGamesProb (\g -> modifyIORef gamesRef ((g::Breakthrough):))
games <- readIORef gamesRef
-- scores
wins <- newMVar (Map.fromList (zip entities (cycle [0]))) -- win counts
let addWin entity = modifyMVar_ wins ((return . addWinMap entity)$!)
addWinMap entity mapping = Map.adjust (+1) entity mapping
totalGames = 2 * (sum [ 1 | e1 <- entities, e1 /= (head entities), g <- games ])
calcFinalScore mapping entity =
case Map.lookup entity mapping of
Nothing -> error "exp10::missing entity in map"
Just wins -> do let score = Double' (1 - ((fromIntegral wins) / (fromIntegral totalGames)))
size = calcEntitySize entity
final = (score,size)
printTL ("exp10::final entity score",final)
return final
-- run game pairs
let runGame ent1 ent2 g = do
ag1 <- mkAgentEvolNet ent1
ag2 <- mkAgentEvolNet ent2
let cb = GameDriverCallback (\ _ -> return ()) (\ _ _ -> return True)
finished <- driverG2 g ag1 ag2 cb
case winner finished of
Nothing -> error "exp10::scorePopulation::finished game, no winner"
Just P1 -> addWin ent1
Just P2 -> addWin ent2
runWorkThreadsProgress_ [ runGame e1 e2 g | e1 <- entities, e2 <- entities, e1 /= e2, g <- games ]
-- calculate scores
scoreMap <- takeMVar wins
scores <- mapM (calcFinalScore scoreMap) entities
return (zip scores entities)
| Tener/deeplearning-thesis | src/gg-exp10.hs | bsd-3-clause | 9,402 | 4 | 25 | 2,141 | 2,277 | 1,195 | 1,082 | 167 | 1 |
------------------------------------------------------------------------------
module Extraction where
import Monad
import StrategyLib hiding (replaceFocus)
import VarsAnalyses
import Datatypes
--- Method extraction --------------------------------------------------------
--- Main strategy ---
extractMethod :: (Term t, MonadPlus m) => t -> m t
extractMethod joos = applyTP (once_tdTP (monoTP extrMethFromCls)) joos
extrMethFromCls :: MonadPlus m => ClassDeclaration -> m ClassDeclaration
extrMethFromCls (ClassDecl fin nm sup fs cs ds)
= do (pars,body) <- ifLegalGetParsAndBody ds
ds' <- replaceFocus pars (ds++[constructMethod pars body])
return (ClassDecl fin nm sup fs cs ds')
--- Check legality and extract parameters and body ---
ifLegalGetParsAndBody :: (Term t, MonadPlus m)
=> t -> m ([([Char],Type)],Statement)
ifLegalGetParsAndBody ds
= applyTU (once_peTU [] appendLocals ifLegalGetParsAndBody1) ds
where ifLegalGetParsAndBody1 env
= getFocus `passTU` \s ->
ifthenTU (voidTP (isLegal env))
( freeUseVars env `passTU` \pars ->
constTU (pars,s) )
appendLocals env = op2TU appendMap (tryTU declVars) (constTU env)
--- Replace focussed statement with method invocation ---
replaceFocus :: (Term t, MonadPlus m) => [(Identifier,Type)] -> t -> m t
replaceFocus pars ds = applyTP (once_tdTP (replaceFocus1 pars)) ds
where replaceFocus1 pars = getFocus `passTP` \_ ->
monoTP (const (return (constructMethodCall pars)))
--- Legality check ---
isLegal :: MonadPlus m => [([Char],Type)] -> TP m
isLegal env = freeDefVars env `passTP` \env' ->
if null env' then notTU (once_tdTU getReturn) else failTP
--- Retreive focused statement ---
getFocus :: MonadPlus m => TU Statement m
getFocus = monoTU (\s -> case s of (StatFocus s') -> return s'
_ -> mzero )
--- Test for a return statement ---
getReturn :: MonadPlus m => TU (Maybe Expression) m
getReturn = monoTU (\s -> case s of (ReturnStat x) -> return x
_ -> mzero )
--- Code generattion ---
constructMethod :: [(Identifier,Type)] -> Statement -> MethodDeclaration
constructMethod pars body
= MethodDecl Nothing "newMethod"
(FormalParams fpars) (BlockStatements [] [body])
where fpars = map (\(v,t) -> FormalParam t v) pars
constructMethodCall :: [(Identifier,Type)] -> Statement
constructMethodCall pars
= MethodInvocationStat
(ExpressionInvocation This "newMethod" (Arguments args))
where args = map (\(v,t) -> Identifier v) pars
------------------------------------------------------------------------------
| forste/haReFork | StrategyLib-4.0-beta/examples/joos-padl02/Extraction.hs | bsd-3-clause | 3,069 | 0 | 15 | 937 | 822 | 435 | 387 | 45 | 2 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.INTEL.MapTexture
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/INTEL/map_texture.txt INTEL_map_texture> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.INTEL.MapTexture (
-- * Enums
gl_LAYOUT_DEFAULT_INTEL,
gl_LAYOUT_LINEAR_CPU_CACHED_INTEL,
gl_LAYOUT_LINEAR_INTEL,
gl_TEXTURE_MEMORY_LAYOUT_INTEL,
-- * Functions
glMapTexture2DINTEL,
glSyncTextureINTEL,
glUnmapTexture2DINTEL
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/INTEL/MapTexture.hs | bsd-3-clause | 883 | 0 | 4 | 103 | 64 | 50 | 14 | 10 | 0 |
-- | Common pitch representation.
module Music.Pitch.Common (
module Music.Pitch.Common.Semitones,
module Music.Pitch.Common.Quality,
module Music.Pitch.Common.Number,
module Music.Pitch.Common.Interval,
module Music.Pitch.Common.Pitch,
module Music.Pitch.Common.Spell,
module Music.Pitch.Common.Types,
module Music.Pitch.Common.Harmony,
module Music.Pitch.Common.Names,
) where
import Music.Pitch.Common.Pitch
import Music.Pitch.Common.Quality
import Music.Pitch.Common.Number
import Music.Pitch.Common.Interval
import Music.Pitch.Common.Semitones
import Music.Pitch.Common.Spell
import Music.Pitch.Common.Types
import Music.Pitch.Common.Harmony
import Music.Pitch.Common.Names
| music-suite/music-pitch | src/Music/Pitch/Common.hs | bsd-3-clause | 719 | 0 | 5 | 85 | 146 | 107 | 39 | 19 | 0 |
module Language.Structure.Binary where
import qualified Data.Tree as Rose
import Language.POS (POS)
import Language.Word (Word (Word))
import Text.Printf (printf)
-- | Constituency trees are rose trees with POS tags in their nodes and words in
-- their leaves.
data Tree
= Leaf Word
| Node POS Tree Tree
deriving (Eq)
instance Ord Tree where
compare x y = compare (leftMostIndex x) (leftMostIndex y)
instance Show Tree where
show (Leaf lbl) = show lbl
show (Node pos l r) = "(" ++ unwords (show pos : show l : show r : []) ++ ")"
-- |Get the left-most index from a constituency tree.
leftMostIndex :: Tree -> Int
leftMostIndex (Leaf (Word _ _ i)) = i
leftMostIndex (Node _ l r) = leftMostIndex l `min` leftMostIndex r
-- |Compute a node with the correct left/right.
node :: POS -> Tree -> Tree -> Tree
node pos x y =
if leftMostIndex x < leftMostIndex y then Node pos x y else Node pos y x
-- |Convert a given tree to an instance of `Data.Tree` and draw it.
asASCII :: Tree -> String
asASCII = Rose.drawTree . go
where
go :: Tree -> Rose.Tree String
go (Leaf word) = Rose.Node (show word) []
go (Node pos l r) = Rose.Node (show pos) (map go [l,r])
-- |Convert a given tree to a Markdown representation of it.
asMarkdown :: Tree -> String
asMarkdown (Leaf (Word txt _ _)) = show txt
asMarkdown (Node pos left right) = printf "[%s %s %s]" (show pos) (asMarkdown left) (asMarkdown right)
-- |Check if two trees are structurally equal.
(==^) :: Tree -> Tree -> Bool
(Leaf (Word _ _ i)) ==^ (Leaf (Word _ _ j)) = i == j
(Node _ l1 r1) ==^ (Node _ l2 r2) = l1 ==^ l2 && r1 ==^ r2
_ ==^ _ = False
| pepijnkokke/Dep2Con | src/Language/Structure/Binary.hs | bsd-3-clause | 1,718 | 0 | 13 | 435 | 627 | 324 | 303 | 32 | 2 |
{-# LANGUAGE FlexibleContexts, TypeFamilies, GeneralizedNewtypeDeriving,
MultiParamTypeClasses, CPP, UndecidableInstances #-}
module Test.WebDriver.Monad
( WD(..), runWD, runSession, withSession, finallyClose, closeOnException, dumpSessionHistory
) where
import Test.WebDriver.Class
import Test.WebDriver.Session
import Test.WebDriver.Config
import Test.WebDriver.Commands
import Test.WebDriver.Internal
import Control.Monad.Base (MonadBase, liftBase)
import Control.Monad.Reader
import Control.Monad.Trans.Control (MonadBaseControl(..), StM)
import Control.Monad.State.Strict (StateT, MonadState, evalStateT, get, put)
--import Control.Monad.IO.Class (MonadIO)
import Control.Exception.Lifted
import Control.Monad.Catch (MonadThrow, MonadCatch)
import Control.Applicative
{- |A monadic interface to the WebDriver server. This monad is simply a
state monad transformer over 'IO', threading session information between sequential webdriver commands
-}
newtype WD a = WD (StateT WDSession IO a)
deriving (Functor, Applicative, Monad, MonadIO, MonadThrow, MonadCatch)
instance MonadBase IO WD where
liftBase = WD . liftBase
instance MonadBaseControl IO WD where
#if MIN_VERSION_monad_control(1,0,0)
type StM WD a = StM (StateT WDSession IO) a
liftBaseWith f = WD $
liftBaseWith $ \runInBase ->
f (\(WD sT) -> runInBase $ sT)
restoreM = WD . restoreM
#else
data StM WD a = StWD {unStWD :: StM (StateT WDSession IO) a}
liftBaseWith f = WD $
liftBaseWith $ \runInBase ->
f (\(WD sT) -> liftM StWD . runInBase $ sT)
restoreM = WD . restoreM . unStWD
#endif
instance WDSessionState WD where
getSession = WD get
putSession = WD . put
instance WebDriver WD where
doCommand headers method path args =
mkRequest headers method path args
>>= sendHTTPRequest
>>= getJSONResult
>>= either throwIO return
-- |Executes a 'WD' computation within the 'IO' monad, using the given
-- 'WDSession'.
runWD :: WDSession -> WD a -> IO a
runWD sess (WD wd) = evalStateT wd sess
-- |Executes a 'WD' computation within the 'IO' monad, automatically creating a new session beforehand.
--
-- NOTE: session is not automatically closed when complete. If you want this behavior, use 'finallyClose'.
-- Example:
-- > runSessionThenClose action = runSession myConfig . finallyClose $ action
runSession :: WDConfig -> WD a -> IO a
runSession conf wd = do
sess <- mkSession conf
runWD sess $ createSession (wdRequestHeaders conf) (wdCapabilities conf) >> wd
-- |Locally sets a 'WDSession' for use within the given 'WD' action.
-- The state of the outer action is unaffected by this function.
-- This function is useful if you need to work with multiple sessions at once.
withSession :: WDSession -> WD a -> WD a
withSession s' (WD wd) = WD . lift $ evalStateT wd s'
-- |A finalizer ensuring that the session is always closed at the end of
-- the given 'WD' action, regardless of any exceptions.
finallyClose:: WebDriver wd => wd a -> wd a
finallyClose wd = closeOnException wd <* closeSession
-- |Exception handler that closes the session when an
-- asynchronous exception is thrown, but otherwise leaves the session open
-- if the action was successful.
closeOnException :: WebDriver wd => wd a -> wd a
closeOnException wd = wd `onException` closeSession
-- |Prints a history of API requests to stdout after computing the given action.
dumpSessionHistory :: (MonadIO wd, WebDriver wd) => wd a -> wd a
dumpSessionHistory wd = do
v <- wd
getSession >>= liftIO . print . wdSessHist
return v
| plow-technologies/hs-webdriver | src/Test/WebDriver/Monad.hs | bsd-3-clause | 3,574 | 0 | 12 | 636 | 694 | 378 | 316 | 52 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad
import System.IO
import Text.XML.Pipe
import Network
import qualified Data.ByteString.Char8 as BSC
import Network.XmlPush.HttpPush
import TestPusher
main :: IO ()
main = do
soc <- listenOn $ PortNumber 80
forever $ do
(sh, _, _) <- accept soc
testPusher (undefined :: HttpPush Handle) (Two Nothing $ Just sh)
(HttpPushArgs getClientHandle Nothing
Nothing gtPth wntRspns)
getClientHandle :: XmlNode -> Maybe (IO Handle, String, Int, FilePath)
getClientHandle (XmlNode (_, "client") [] [] [XmlCharData hn]) = Just (
connectTo (BSC.unpack hn) $ PortNumber 8080,
"localhost",
8080,
"/" )
getClientHandle _ = Nothing
wntRspns :: XmlNode -> Bool
wntRspns (XmlNode (_, "monologue") _ [] []) = False
wntRspns _ = True
gtPth :: XmlNode -> FilePath
gtPth (XmlNode (_, "father") _ [] []) = "family"
gtPth _ = "others"
| YoshikuniJujo/xml-push | examples/httpPushT.hs | bsd-3-clause | 888 | 12 | 13 | 155 | 350 | 186 | 164 | 29 | 1 |
{-# LANGUAGE ExplicitForAll #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ViewPatterns #-}
-- | Utilities to remove unused qualified imports
module Importify.Resolution.Qualified
( removeUnusedQualifiedImports
) where
import Universum
import Data.List (partition)
import Language.Haskell.Exts (ImportDecl (..), ModuleHead (..),
ModuleName (..), QName (..))
import Language.Haskell.Names (NameInfo (GlobalSymbol), Scoped)
import Language.Haskell.Names.SyntaxUtils (dropAnn)
import Extended.Data.Bool ((==>))
import Importify.Syntax (getImportModuleName, isInsideExport,
scopedNameInfo, scopedNameInfo)
-- | Remove unused @qualified as@ imports, i.e. in one of the next form:
-- @
-- import qualified Data.List
-- import qualified Data.List as L
-- import Data.List as L
-- @
-- This function ignores imports with explicit import lists because it
-- is running after stage where symbols from explicit list removed.
removeUnusedQualifiedImports :: [ImportDecl l]
-> Maybe (ModuleHead l)
-> [Scoped l]
-> [ModuleName ()] -- ^ Unused @import A as B@
-> [ImportDecl l]
removeUnusedQualifiedImports imports moduleHead annotations unusedImplicits =
let (possiblyUnused, others) = partition (possiblyUnusedImport unusedImplicits) imports
isImportNeeded name = isInsideExport moduleHead name
|| isInsideModule annotations name
byModuleName = maybe True isImportNeeded
. fmap dropAnn
. qualifiedName
neededQualified = filter byModuleName possiblyUnused
in neededQualified ++ others
possiblyUnusedImport :: [ModuleName ()] -> ImportDecl l -> Bool
possiblyUnusedImport unusedImplicits decl = isNothing (importSpecs decl)
&& isNotImplicitUnused
where
isNotImplicitUnused = (isJust (importAs decl) && not (importQualified decl))
==> getImportModuleName decl `elem` unusedImplicits
-- | For given import collect qualified name.
-- Qualified names gathered using next scheme:
-- @
-- import A ⇒ Nothing
-- import qualified B ⇒ Just B
-- import qualified C as X ⇒ Just X
-- import D as Y ⇒ Just Y
-- @
-- Used later to determine whether empty @qualified@ import needed or not.
qualifiedName :: ImportDecl l -> Maybe (ModuleName l)
qualifiedName ImportDecl{ importAs = as@(Just _) } = as
qualifiedName ImportDecl{ importQualified = True, .. } = Just importModule
qualifiedName _ = Nothing
isInsideModule :: forall l. [Scoped l] -> ModuleName () -> Bool
isInsideModule annotations moduleName = any isNameUsed annotations
where
isNameUsed :: Scoped l -> Bool
isNameUsed (scopedNameInfo -> nameInfo) = case nameInfo of
GlobalSymbol _ (Qual _ usedName _) -> moduleName == usedName
_ -> False
| serokell/importify | src/Importify/Resolution/Qualified.hs | mit | 3,383 | 0 | 13 | 1,148 | 579 | 320 | 259 | -1 | -1 |
module Main where
import HaskellStarter.CommitPrinter
import System.Environment
main :: IO ()
main = do
args <- getArgs
printCommitsFor (args !! 0) (args !! 1)
| TomRegan/HaskellStarter | executables/Main.hs | mit | 167 | 0 | 9 | 31 | 58 | 31 | 27 | 7 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Graphics.HGL.Core
-- Copyright : (c) Alastair Reid, 1999-2003
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : non-portable (requires concurrency)
--
-- Core functions of a simple graphics library.
--
-----------------------------------------------------------------------------
module Graphics.HGL.Core
( module Graphics.HGL.Units
, module Graphics.HGL.Run
, module Graphics.HGL.Window
, module Graphics.HGL.Draw
, module Graphics.HGL.Key
) where
import Graphics.HGL.Units
import Graphics.HGL.Run
import Graphics.HGL.Window
import Graphics.HGL.Draw
import Graphics.HGL.Key
| FranklinChen/hugs98-plus-Sep2006 | packages/HGL/Graphics/HGL/Core.hs | bsd-3-clause | 794 | 4 | 5 | 105 | 91 | 66 | 25 | 11 | 0 |
{-# LANGUAGE RecordWildCards #-}
-- | This module manages storing the various GHC option flags in a modules
-- interface file as part of the recompilation checking infrastructure.
module FlagChecker (
fingerprintDynFlags
) where
import Binary
import BinIface ()
import DynFlags
import HscTypes
import Module
import Name
import Fingerprint
-- import Outputable
import qualified Data.IntSet as IntSet
import System.FilePath (normalise)
-- | Produce a fingerprint of a @DynFlags@ value. We only base
-- the finger print on important fields in @DynFlags@ so that
-- the recompilation checker can use this fingerprint.
fingerprintDynFlags :: DynFlags -> Module -> (BinHandle -> Name -> IO ())
-> IO Fingerprint
fingerprintDynFlags dflags@DynFlags{..} this_mod nameio =
let mainis = if mainModIs == this_mod then Just mainFunIs else Nothing
-- see #5878
-- pkgopts = (thisPackage dflags, sort $ packageFlags dflags)
safeHs = setSafeMode safeHaskell
-- oflags = sort $ filter filterOFlags $ flags dflags
-- *all* the extension flags and the language
lang = (fmap fromEnum language,
IntSet.toList $ extensionFlags)
-- -I, -D and -U flags affect CPP
cpp = (map normalise includePaths, opt_P dflags ++ picPOpts dflags)
-- normalise: eliminate spurious differences due to "./foo" vs "foo"
-- Note [path flags and recompilation]
paths = [ hcSuf ]
-- -fprof-auto etc.
prof = if gopt Opt_SccProfilingOn dflags then fromEnum profAuto else 0
-- -O, see https://ghc.haskell.org/trac/ghc/ticket/10923
opt = if hscTarget == HscInterpreted ||
hscTarget == HscNothing
then 0
else optLevel
in -- pprTrace "flags" (ppr (mainis, safeHs, lang, cpp, paths, prof, opt)) $
computeFingerprint nameio (mainis, safeHs, lang, cpp, paths, prof, opt)
{- Note [path flags and recompilation]
There are several flags that we deliberately omit from the
recompilation check; here we explain why.
-osuf, -odir, -hisuf, -hidir
If GHC decides that it does not need to recompile, then
it must have found an up-to-date .hi file and .o file.
There is no point recording these flags - the user must
have passed the correct ones. Indeed, the user may
have compiled the source file in one-shot mode using
-o to specify the .o file, and then loaded it in GHCi
using -odir.
-stubdir
We omit this one because it is automatically set by -outputdir, and
we don't want changes in -outputdir to automatically trigger
recompilation. This could be wrong, but only in very rare cases.
-i (importPaths)
For the same reason as -osuf etc. above: if GHC decides not to
recompile, then it must have already checked all the .hi files on
which the current module depends, so it must have found them
successfully. It is occasionally useful to be able to cd to a
different directory and use -i flags to enable GHC to find the .hi
files; we don't want this to force recompilation.
The only path-related flag left is -hcsuf.
-}
| snoyberg/ghc | compiler/iface/FlagChecker.hs | bsd-3-clause | 3,171 | 0 | 12 | 773 | 288 | 169 | 119 | 27 | 4 |
-- {-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE RankNTypes, MultiParamTypeClasses, FunctionalDependencies #-}
-- This one caught a bug in the implementation of functional
-- dependencies, where improvement must happen when
-- checking the call in 'test4'
module ShouldCompile where
newtype M s a = M a
class Modular s a | s -> a
wim :: forall a w. Integral a
=> a -> (forall s. Modular s a => M s w) -> w
wim i k = error "urk"
test4' :: (Modular s a, Integral a) => M s a
test4' = error "urk"
test4 = wim 4 test4'
-- Integral a0, (Modular s a0 => Modular s1 a1, Integral a1, M s1 a1 ~ M s w0)
-- Under the implication, [D] a1 ~ a0, [W] a1 ~ w0
-- Hence a1 := w0, [D] w0 ~ a0
| rahulmutt/ghcvm | tests/suite/typecheck/compile/tc237.hs | bsd-3-clause | 740 | 0 | 12 | 185 | 143 | 80 | 63 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.AutoScaling.DescribeAdjustmentTypes
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Lists the policy adjustment types for use with 'PutScalingPolicy'.
--
-- <http://docs.aws.amazon.com/AutoScaling/latest/APIReference/API_DescribeAdjustmentTypes.html>
module Network.AWS.AutoScaling.DescribeAdjustmentTypes
(
-- * Request
DescribeAdjustmentTypes
-- ** Request constructor
, describeAdjustmentTypes
-- * Response
, DescribeAdjustmentTypesResponse
-- ** Response constructor
, describeAdjustmentTypesResponse
-- ** Response lenses
, datrAdjustmentTypes
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.AutoScaling.Types
import qualified GHC.Exts
data DescribeAdjustmentTypes = DescribeAdjustmentTypes
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DescribeAdjustmentTypes' constructor.
describeAdjustmentTypes :: DescribeAdjustmentTypes
describeAdjustmentTypes = DescribeAdjustmentTypes
newtype DescribeAdjustmentTypesResponse = DescribeAdjustmentTypesResponse
{ _datrAdjustmentTypes :: List "member" AdjustmentType
} deriving (Eq, Read, Show, Monoid, Semigroup)
instance GHC.Exts.IsList DescribeAdjustmentTypesResponse where
type Item DescribeAdjustmentTypesResponse = AdjustmentType
fromList = DescribeAdjustmentTypesResponse . GHC.Exts.fromList
toList = GHC.Exts.toList . _datrAdjustmentTypes
-- | 'DescribeAdjustmentTypesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'datrAdjustmentTypes' @::@ ['AdjustmentType']
--
describeAdjustmentTypesResponse :: DescribeAdjustmentTypesResponse
describeAdjustmentTypesResponse = DescribeAdjustmentTypesResponse
{ _datrAdjustmentTypes = mempty
}
-- | The policy adjustment types.
datrAdjustmentTypes :: Lens' DescribeAdjustmentTypesResponse [AdjustmentType]
datrAdjustmentTypes =
lens _datrAdjustmentTypes (\s a -> s { _datrAdjustmentTypes = a })
. _List
instance ToPath DescribeAdjustmentTypes where
toPath = const "/"
instance ToQuery DescribeAdjustmentTypes where
toQuery = const mempty
instance ToHeaders DescribeAdjustmentTypes
instance AWSRequest DescribeAdjustmentTypes where
type Sv DescribeAdjustmentTypes = AutoScaling
type Rs DescribeAdjustmentTypes = DescribeAdjustmentTypesResponse
request = post "DescribeAdjustmentTypes"
response = xmlResponse
instance FromXML DescribeAdjustmentTypesResponse where
parseXML = withElement "DescribeAdjustmentTypesResult" $ \x -> DescribeAdjustmentTypesResponse
<$> x .@? "AdjustmentTypes" .!@ mempty
| kim/amazonka | amazonka-autoscaling/gen/Network/AWS/AutoScaling/DescribeAdjustmentTypes.hs | mpl-2.0 | 3,587 | 0 | 10 | 660 | 391 | 237 | 154 | 52 | 1 |
{-# LANGUAGE JavaScriptFFI #-}
{-
Copyright 2017 The CodeWorld Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Blockly.Connection ( Connection(..) )
where
import GHCJS.Types
import GHCJS.Foreign
import GHCJS.Marshal
newtype Connection = Connection JSVal
instance IsJSVal Connection
instance ToJSVal Connection where
toJSVal (Connection v) = return v
instance FromJSVal Connection where
fromJSVal v = return $ Just $ Connection v
| three/codeworld | funblocks-client/src/Blockly/Connection.hs | apache-2.0 | 980 | 0 | 8 | 176 | 99 | 54 | 45 | 11 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.