code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE UndecidableInstances #-}
module Language.ContextSemantics.Graph where
import Language.ContextSemantics.Common
import Language.ContextSemantics.Utilities
import Control.Arrow (second)
import Control.Monad
import qualified Data.IntMap as IM
import qualified Data.Foldable as F
import Data.List
import qualified Data.Traversable as T
--
-- Interactors: functors that we can build interaction graphs from
--
class (T.Traversable n, Eq (Selector n)) => Interactor n where
type Selector n :: *
selectors :: n a -> n (Selector n, a)
select :: Selector n -> n a -> a
--
-- Interaction graphs
--
type NodeId = Int
data Port n = Port {
port_node :: NodeId,
port_selector :: Selector n
}
-- Requires UndecidableInstances
instance Show (Selector n) => Show (Port n) where
show port = show (port_node port) ++ "." ++ show (port_selector port)
-- Requires UndecidableInstances
instance Eq (Selector n) => Eq (Port n) where
p1 == p2 = port_node p1 == port_node p2 &&
port_selector p1 == port_selector p2
data Graph n = Graph {
gr_nodes :: IM.IntMap (n (Port n)),
gr_named_ports :: [(PortName, Port n)]
}
foldNodewise :: Functor n => (n a -> a) -> Graph n -> [(PortName, a)]
foldNodewise f gr = map (second lookup_node) (gr_named_ports gr)
where lookup_node port = assertJust "foldNodewise" (IM.lookup (port_node port) node_vals)
node_vals = IM.map (f . fmap lookup_node) (gr_nodes gr)
foldPortwise :: Interactor n => (n a -> n a) -> Graph n -> [(PortName, a)]
foldPortwise f gr = map (second lookup_port) (gr_named_ports gr)
where lookup_port port = port_selector port `select` assertJust "foldPortwise" (IM.lookup (port_node port) node_port_vals)
node_port_vals = IM.map (f . fmap lookup_port) (gr_nodes gr)
--
-- Converting to .dot files
--
toDot :: Interactor n
=> (n () -> [(String, String)]) -- ^ Assignment of attributes to node
-> (Selector n -> Selector n -> [(String, String)]) -- ^ Assignment of attributes to edges
-> Graph n
-> String
toDot node_attrs edge_attrs gr = "graph {\r\n" ++ intercalate ";\r\n" statements ++ "\r\n}\r\n"
where nodes = IM.assocs (gr_nodes gr)
edges = [(Port from_nid from_selector, to_port)
| (from_nid, from_n) <- nodes
, (from_selector, to_port) <- F.toList (selectors from_n)]
unique_edges = nubBy (\(p1, p2) (q1, q2) -> (p1 == q1 && p2 == q2) || (p1 == q2 && p2 == q1)) edges
statements = named_node_statements ++ named_edge_statements ++ node_statements ++ edge_statements
named_node_statements = ["named" ++ name ++ " [shape=point,label=" ++ name ++ "]"
| (name, _) <- gr_named_ports gr]
named_edge_statements = ["named" ++ name ++ " -- node" ++ show (port_node port) ++ " [arrowhead=normal]"
| (name, port) <- gr_named_ports gr]
node_statements = ["node" ++ show nid ++ format_list (("label", show nid) : node_attrs (fmap (const ()) n))
| (nid, n) <- nodes]
edge_statements = ["node" ++ show (port_node from_port) ++ " -- node" ++ show (port_node to_port) ++ " " ++ format_list (edge_attrs (port_selector from_port) (port_selector to_port))
| (from_port, to_port) <- unique_edges]
format_list avps = "[" ++ intercalate "," [attr ++ "=" ++ val | (attr, val) <- avps] ++ "]"
--
-- Graph builder monad, for convenience of construction
--
newtype LooseEnd = LooseEnd { unLooseEnd :: Int }
deriving (Show)
data Knot n = KnotToLooseEnd LooseEnd
| KnotToPort (Port n)
data GraphBuilderEnv n = GraphBuilderEnv {
gbe_next_unique :: Int,
gbe_loose_end_joins :: IM.IntMap LooseEnd,
gbe_loose_ends :: IM.IntMap (Maybe (Knot n)),
gbe_nodes :: IM.IntMap (n LooseEnd)
}
emptyGraphBuilderEnv :: GraphBuilderEnv n
emptyGraphBuilderEnv = GraphBuilderEnv {
gbe_next_unique = 0,
gbe_loose_end_joins = IM.empty,
gbe_loose_ends = IM.empty,
gbe_nodes = IM.empty
}
newtype GraphBuilderM n a = GraphBuilderM {
unGraphBuilderM :: GraphBuilderEnv n -> (GraphBuilderEnv n, a)
}
instance Functor (GraphBuilderM n) where
fmap f mx = mx >>= \x -> return (f x)
instance Monad (GraphBuilderM n) where
return x = GraphBuilderM $ \env -> (env, x)
mx >>= f = GraphBuilderM $ \env -> case unGraphBuilderM mx env of (env', y) -> unGraphBuilderM (f y) env'
newUnique :: GraphBuilderM n Int
newUnique = GraphBuilderM $ \env -> let i = gbe_next_unique env in (env { gbe_next_unique = i + 1 }, i)
insertNode :: Int -> n LooseEnd -> GraphBuilderM n ()
insertNode i node = GraphBuilderM $ \env -> (env { gbe_nodes = IM.insert i node (gbe_nodes env) }, ())
knotOnce :: a -> Maybe a -> Maybe a
knotOnce what Nothing = Just what
knotOnce _ (Just _) = error "Can't knot a loose end twice!"
knotLooseEndToPort :: LooseEnd -> Port n -> GraphBuilderM n ()
knotLooseEndToPort le p = GraphBuilderM $ \env -> (env { gbe_loose_ends = IM.adjust (knotOnce (KnotToPort p)) (unLooseEnd le) (gbe_loose_ends env) }, ())
knotLooseEnds :: LooseEnd -> LooseEnd -> GraphBuilderM n ()
knotLooseEnds le1 le2 = GraphBuilderM $ \env -> (env { gbe_loose_ends = IM.adjust (knotOnce (KnotToLooseEnd le1)) (unLooseEnd le2) (IM.adjust (knotOnce (KnotToLooseEnd le2)) (unLooseEnd le1) (gbe_loose_ends env)) }, ())
newWire :: GraphBuilderM a (LooseEnd, LooseEnd)
newWire = do
le1 <- liftM LooseEnd newUnique
le2 <- liftM LooseEnd newUnique
GraphBuilderM $ \env -> (env { gbe_loose_end_joins = IM.insert (unLooseEnd le2) le1 (IM.insert (unLooseEnd le1) le2 (gbe_loose_end_joins env))
, gbe_loose_ends = IM.insert (unLooseEnd le1) Nothing (IM.insert (unLooseEnd le2) Nothing (gbe_loose_ends env)) }, (le1, le2))
newNode :: Interactor n => n LooseEnd -> GraphBuilderM n ()
newNode n_loose_ends = do
nid <- newUnique
insertNode nid n_loose_ends
fmapM_ (\(selector, loose_end) -> knotLooseEndToPort loose_end (Port nid selector)) (selectors n_loose_ends)
join :: LooseEnd -> LooseEnd -> GraphBuilderM n ()
join = knotLooseEnds
runGraphBuilderM :: Interactor n => GraphBuilderM n [(PortName, LooseEnd)] -> Graph n
runGraphBuilderM mx = Graph {
gr_nodes = nodes,
gr_named_ports = map (second lookupLooseEndPort) named_les
}
where (final_env, named_les) = unGraphBuilderM mx emptyGraphBuilderEnv
nodes = IM.map (fmap lookupLooseEndPort) (gbe_nodes final_env)
lookupLooseEndPort le = case iMlookupCertainly (unLooseEnd $ iMlookupCertainly (unLooseEnd le) (gbe_loose_end_joins final_env)) (gbe_loose_ends final_env) of
Nothing -> error $ "An unknotted loose end remained!"
Just (KnotToLooseEnd le') -> lookupLooseEndPort le'
Just (KnotToPort p) -> p
| batterseapower/context-semantics | Language/ContextSemantics/Graph.hs | bsd-3-clause | 7,002 | 0 | 17 | 1,648 | 2,363 | 1,244 | 1,119 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE MultiWayIf #-}
module Text.PDDL.SExp (
SExp(..)
, parseSExp
, ParseError(..)
) where
import Control.Applicative ( Applicative(..) )
import Text.PDDL.Location
( Source, SrcLoc(..), Position(..), Range(..)
, Located(..), movePos, at )
import Text.PDDL.PP
( PP(..), Doc, text, (<+>), sep, (<>), doubleQuotes
, isEmpty )
import qualified Text.PDDL.PP as PP
import Control.Monad ( unless )
import Data.Char ( isSpace, isNumber, isLetter, isSymbol )
import qualified Data.Text as S
import qualified Data.Text.Lazy as L
import MonadLib ( StateT, get, set, ExceptionT, raise, Id, runM )
-- External Interface ----------------------------------------------------------
data SExp = SList [SExp]
-- ^ Lists
| SSym !S.Text
-- ^ Symbols
| SName !S.Text
-- ^ Names
| SVar !S.Text
-- ^ Variables (names with a '?' prefix)
| SNum !Double
-- ^ Numbers
| SString !S.Text
-- ^ Quoted strings
| SLoc !(Located SExp)
-- ^ S-expressions with source location information
deriving (Show)
instance PP SExp where
pp (SList es) = pp '(' <> sep (map pp es) <> pp ')'
pp (SSym t) = pp t
pp (SName n) = pp n
pp (SVar n) = pp '?' <> pp n
pp (SNum d) = pp d
pp (SString s) = doubleQuotes (pp s)
pp (SLoc loc) = pp loc
data ParseError = ParseError SrcLoc Doc
deriving (Show)
instance PP ParseError where
pp (ParseError loc msg) = pp loc <> PP.char ':' <+> body
where
body | isEmpty msg = text "Parse error"
| otherwise = msg
-- | Parse an s-expression from a lazy ByteString, with source information.
parseSExp :: Source -> L.Text -> Either ParseError SExp
parseSExp src inp =
case runM (unParser sexp) rw of
Right (a,_) -> Right a
Left err -> Left err
where
rw = RW { rwInput = inp, rwSource = src, rwPos = Position 0 1 1 }
-- Parser ----------------------------------------------------------------------
newtype Parser a = Parser { unParser :: StateT RW (ExceptionT ParseError Id) a
} deriving (Functor,Applicative,Monad)
data RW = RW { rwInput :: L.Text
, rwSource :: Source
, rwPos :: !Position
}
getPos :: Parser Position
getPos = Parser $
do RW { .. } <- get
return rwPos
getLoc :: Position -> Parser SrcLoc
getLoc start = Parser $
do RW { .. } <- get
return (SrcLoc (Range start rwPos) rwSource)
peek :: Parser Char
peek = Parser $
do RW { .. } <- get
case L.uncons rwInput of
Just (c,_) -> return c
Nothing -> raise $ ParseError (SrcLoc (Range rwPos rwPos) rwSource)
(text "Unexpected end of input")
char :: Parser Char
char = Parser $
do RW { .. } <- get
case L.uncons rwInput of
Just (c,rest) -> do set $! RW { rwInput = rest
, rwPos = movePos rwPos c
, ..
}
return c
Nothing -> raise $ ParseError (SrcLoc (Range rwPos rwPos) rwSource)
(text "Unexpected end of input")
-- | Consume until the next character begins something interesting. As this
-- will fail if it doesn't find anything interesting, it should only be used in
-- cases where the grammar expects something.
trim :: Parser ()
trim =
do c <- peek
if | isSpace c -> do _ <- char
trim
| c == ';' -> comment >> trim
| otherwise -> return ()
sexp :: Parser SExp
sexp =
do trim
start <- getPos
c <- char
e <- if | c == '(' -> SList `fmap` list
| c == '"' -> (SString . S.pack) `fmap` string
| c == ':' -> (SSym . S.pack) `fmap` name
| c == '?' -> (SVar . S.pack) `fmap` name
| isNumber c -> do rest <- number
return (SNum (read (c:rest)))
| isLetter c -> do str <- nameBody c
return (SName (S.pack str))
| otherwise -> do rest <- symbol
return (SSym (S.pack (c:rest)))
loc <- getLoc start
return (SLoc (e `at` loc))
list :: Parser [SExp]
list =
do trim
c <- peek
if | c == ')' -> return []
| otherwise -> do e <- sexp
es <- list
return (e:es)
string :: Parser String
string =
do c <- char
if c == '"'
then return []
else do cs <- string
return (c:cs)
comment :: Parser ()
comment =
do c <- char
if c == '\n'
then return ()
else comment
name :: Parser String
name =
do l <- char
unless (isLetter l) (fail "expected a letter")
nameBody l
nameBody :: Char -> Parser String
nameBody l =
do rest <- loop
return (l:rest)
where
loop = do c <- peek
if isLetter c || isNumber c || c `elem` "-_"
then do _ <- char
rest <- loop
return (c:rest)
else return ""
number :: Parser String
number =
do c <- peek
if | isNumber c -> do _ <- char
rest <- number
return (c:rest)
| c == '.' -> do _ <- char
rest <- decimal
return (c:rest)
| otherwise -> return ""
decimal :: Parser String
decimal =
do c <- peek
if isNumber c
then do _ <- char
rest <- decimal
return (c:rest)
else return ""
symbol :: Parser String
symbol =
do c <- peek
if isSymbol c
then do _ <- char
rest <- symbol
return (c:rest)
else return ""
| elliottt/pddl | src/Text/PDDL/SExp.hs | bsd-3-clause | 6,241 | 0 | 18 | 2,504 | 2,009 | 1,020 | 989 | 178 | 7 |
--------------------------------------------------------------------------------
-- |
-- Module : Sound.OpenAL.ALC.Capture
-- Copyright : (c) Sven Panne 2005
-- License : BSD-style (see the file libraries/OpenAL/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- This module corresponds to section 6.4.2. (Capture) of the OpenAL
-- Specification and Reference (version 1.1).
--
--------------------------------------------------------------------------------
module Sound.OpenAL.ALC.Capture (
NumSamples, captureOpenDevice, captureStart, captureNumSamples,
captureSamples, captureStop, captureCloseDevice,
captureDefaultDeviceSpecifier, captureDeviceSpecifier,
allCaptureDeviceSpecifiers
) where
import Foreign.Ptr ( Ptr, nullPtr, FunPtr )
import Graphics.Rendering.OpenGL.GL.StateVar (
GettableStateVar, makeGettableStateVar, get )
import Sound.OpenAL.AL.Buffer ( Format )
import Sound.OpenAL.AL.Format ( marshalFormat )
import Sound.OpenAL.ALC.ALCboolean ( unmarshalALCboolean )
import Sound.OpenAL.ALC.BasicTypes (
ALCchar, ALCuint, ALCenum, ALCsizei, ALCboolean )
import Sound.OpenAL.ALC.Context ( Frequency )
import Sound.OpenAL.ALC.Device ( Device )
import Sound.OpenAL.ALC.Extensions ( alcProcAddress )
import Sound.OpenAL.ALC.QueryUtils ( IntQuery(..), getInteger )
import Sound.OpenAL.ALC.QueryUtils (
StringQuery(..), getString, getStringRaw, alcIsExtensionPresent )
import Sound.OpenAL.ALC.String ( withALCString, peekALCStrings )
import Sound.OpenAL.Config ( ALCdevice, marshalDevice, unmarshalDevice )
--------------------------------------------------------------------------------
type NumSamples = ALCsizei
--------------------------------------------------------------------------------
type Invoker a = FunPtr a -> a
getCaptureFunc :: String -> IO (FunPtr a)
getCaptureFunc = get . alcProcAddress Nothing
--------------------------------------------------------------------------------
captureOpenDevice ::
Maybe String -> Frequency -> Format -> NumSamples -> IO (Maybe Device)
captureOpenDevice maybeDeviceSpec frequency format size = do
funPtr <- getCaptureFunc "alcCaptureOpenDevice"
let open deviceSpec =
invokeCaptureOpenDevice funPtr deviceSpec (round frequency)
(fromIntegral (marshalFormat format)) size
fmap unmarshalDevice $
(maybe (open nullPtr) -- use preferred device
(flip withALCString open)
maybeDeviceSpec)
foreign import CALLCONV unsafe "dynamic"
invokeCaptureOpenDevice :: Invoker (Ptr ALCchar -> ALCuint -> ALCenum -> ALCsizei -> IO ALCdevice)
--------------------------------------------------------------------------------
captureStart :: Device -> IO ()
captureStart = captureStartStop "alcCaptureStart"
captureStartStop :: String -> Device -> IO ()
captureStartStop funName device = do
funPtr <- getCaptureFunc funName
invokeCaptureStartStop funPtr (marshalDevice device)
foreign import CALLCONV unsafe "dynamic"
invokeCaptureStartStop :: Invoker (ALCdevice -> IO ())
--------------------------------------------------------------------------------
captureNumSamples :: Device -> GettableStateVar NumSamples
captureNumSamples device = makeGettableStateVar $
fmap fromIntegral (getInteger (Just device) CaptureSamples)
--------------------------------------------------------------------------------
captureSamples :: Device -> Ptr a -> NumSamples -> IO ()
captureSamples device buf n = do
funPtr <- getCaptureFunc "alcCaptureSamples"
invokeCaptureSamples funPtr (marshalDevice device) buf n
foreign import CALLCONV unsafe "dynamic"
invokeCaptureSamples :: Invoker (ALCdevice -> Ptr a -> NumSamples -> IO ())
--------------------------------------------------------------------------------
captureStop :: Device -> IO ()
captureStop = captureStartStop "alcCaptureStop"
--------------------------------------------------------------------------------
captureCloseDevice :: Device -> IO Bool
captureCloseDevice device = do
funPtr <- getCaptureFunc "alcCaptureCloseDevice"
fmap unmarshalALCboolean .
invokeCaptureCloseDevice funPtr . marshalDevice $ device
foreign import CALLCONV unsafe "dynamic"
invokeCaptureCloseDevice :: Invoker (ALCdevice -> IO ALCboolean)
--------------------------------------------------------------------------------
-- | Contains the name of the default capture device.
captureDefaultDeviceSpecifier :: GettableStateVar String
captureDefaultDeviceSpecifier = makeGettableStateVar $
getString Nothing CaptureDefaultDeviceSpecifier
--------------------------------------------------------------------------------
-- | Contains the specifier string for the given capture device.
captureDeviceSpecifier :: Device -> GettableStateVar String
captureDeviceSpecifier device = makeGettableStateVar $
getString (Just device) CaptureDeviceSpecifier
--------------------------------------------------------------------------------
-- | Contains a list of specifiers for all available capture devices.
allCaptureDeviceSpecifiers :: GettableStateVar [String]
allCaptureDeviceSpecifiers = makeGettableStateVar $ do
enumExtPresent <- get (alcIsExtensionPresent Nothing "ALC_ENUMERATION_EXT")
if enumExtPresent
then peekALCStrings =<< getStringRaw Nothing CaptureDeviceSpecifier
else fmap (\s -> [s]) $ get captureDefaultDeviceSpecifier
| FranklinChen/hugs98-plus-Sep2006 | packages/OpenAL/Sound/OpenAL/ALC/Capture.hs | bsd-3-clause | 5,473 | 27 | 15 | 726 | 997 | 542 | 455 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude, UnicodeSyntax #-}
module Main (main) where
import Help.Imports
import Help.Logging
import Help.Settings
import Help.UI.WebSearch
import Help.UI.AdminConsole
import Data.Conduit (runResourceT)
import Control.Concurrent hiding (forkIO)
import Control.Concurrent.STM
import Control.Concurrent.Thread.Group
import Control.Lens ((^.))
main ∷ IO ()
main = do
settings ← loadSettings
if not $ null $ settings^.logFile
then runResourceT $ loadFile settings
else do
g ← new -- Creates a new thread group
(t1, _) ← forkIO g $ webSearch settings
(t2, _) ← forkIO g $ (adminConsole settings `catch` (\e -> print (e :: IOException)))
(t3, _) ← forkIO g $ runResourceT (logInterface settings `catch` (\e -> print (e :: IOException)))
-- Cleans up all threads if any thread dies
-- Each thread will need to clean up internally
waitAny g `finally` mapM_ killThread [t1, t2, t3]
-- |Unlike @wait@ (which waits for all threads to exit), @waitAny@ waits for any thread to exit, then returns
waitAny ∷ ThreadGroup → IO ()
waitAny tg = do
nr ← atomically $ nrOfRunning tg
atomically $ nrOfRunning tg >>= \n → when (n ≡ nr) retry
| argiopetech/help | Main.hs | bsd-3-clause | 1,270 | 0 | 18 | 285 | 363 | 199 | 164 | 27 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE ConstraintKinds #-}
-- |
-- Module : System.Rados.Monadic
-- Copyright : (c) 2010-2014 Anchor
-- License : BSD-3
-- Maintainer : Christian Marie <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- Monadic interface to librados, covers async read/writes, locks and atomic
-- writes (ensure you use the build flag).
--
-- This is the monadic API, you may use the underlying internals or FFI calls
-- via "System.Rados.Base" and "System.Rados.FFI".
--
-- A simple complete example:
--
-- @
-- {-\# LANGUAGE OverloadedStrings \#-}
-- module Main where
-- import System.Rados
-- import Control.Exception
-- import qualified Data.ByteString.Char8 as B
-- main :: IO ()
-- main = do
-- kitty \<- runConnect Nothing (parseConfig \"ceph.conf\") $
-- runPool \"magic_pool\" . runObject \"an oid\" $ do
-- writeFull \"hello kitty!\"
-- readFull
-- either throwIO B.putStrLn (kitty :: Either RadosError B.ByteString)
-- @
--
module System.Rados.Monadic
(
-- * Initialization
runConnect,
parseConfig,
parseArgv,
parseEnv,
runPool,
-- * Pool enumeration
objects,
unsafeObjects,
-- * Reading
-- ** A note on signatures
--
-- |In order to use these functions in any RadosReader monad, the type
-- signatures have been made generic.
--
-- This allows the same API to be used for synchronous and asynchronous
-- requests.
--
-- Thus, a1 and a2 below have different signatures:
--
-- @
-- runOurPool $ do
-- a1 \<- runObject \"object\" $ readFull
-- a2 \<- runAsync . runObject \"object\" $ readFull
-- a3 \<- look a2
-- a1 :: Either RadosError ByteString
-- a2 :: AsyncRead ByteString
-- a3 :: Either RadosError ByteString
-- @
-- ** Reading API
RadosReader(readChunk, readFull, stat),
-- * Writing
-- ** A note on signatures
--
-- |In order to use these functions in any RadosWriter monad, the type
-- signatures have been made generic.
--
-- This allows the same API to be used for synchronous and asynchronous
-- requests.
--
-- Thus, a1 and a2 below have different signatures:
--
-- @
-- runOurPool $ do
-- a1 \<- runObject \"object\" $ writeFull \"hai!\"
-- a2 \<- runAsync . runObject \"object\" $ writeFull \"hai!\"
-- a3 \<- waitSafe a2
-- a1 :: Maybe RadosError
-- a2 :: AsyncWrite
-- a3 :: Maybe RadosError
-- @
-- ** Writing API
RadosWriter(..),
-- * Asynchronous requests
async,
runAsync,
waitSafe,
waitComplete,
look,
runObject,
#if defined(ATOMIC_WRITES)
runAtomicWrite,
-- * Extra atomic operations
assertExists,
compareXAttribute,
B.eq, B.ne, B.gt, B.gte, B.lt, B.lte, B.nop,
setXAttribute,
#endif
-- * Locking
withExclusiveLock,
withSharedLock,
-- * Types
-- ** Data types
StatResult,
fileSize,
modifyTime,
AsyncRead,
AsyncWrite,
-- ** Monads
Connection,
Pool,
Object,
Async,
-- ** Exceptions
-- |
-- This library should never throw an error within runPool, runPool itself
-- may throw a 'RadosError' should it have a problem opening the given
-- pool.
E.RadosError(..),
-- ** Re-exports
liftIO
)
where
import Control.Exception (bracket, bracket_, throwIO)
import Control.Monad.State
import Control.Monad.Reader
import Control.Applicative
import Data.Word (Word64)
import System.Posix.Types(EpochTime)
import Foreign.ForeignPtr
import Foreign.Storable
import System.IO.Unsafe
import Data.Typeable
import Data.Maybe
import qualified Control.Concurrent.Async as A
import qualified Data.ByteString.Char8 as B
import qualified System.Rados.Error as E
import qualified System.Rados.Base as B
newtype Connection a = Connection (ReaderT B.Connection IO a)
deriving (Functor, Applicative, Monad, MonadIO, MonadReader B.Connection)
newtype Pool a = Pool (ReaderT B.IOContext IO a)
deriving (Functor, Applicative, Monad, MonadIO, MonadReader B.IOContext)
newtype Object parent a = Object (ReaderT B.ByteString parent a)
deriving (Functor, Applicative, Monad, MonadIO, MonadReader B.ByteString)
newtype Async a = Async (ReaderT B.IOContext IO a)
deriving (Functor, Applicative, Monad, MonadIO, MonadReader B.IOContext)
#if defined(ATOMIC_WRITES)
newtype AtomicWrite a = AtomicWrite (ReaderT B.WriteOperation IO a)
deriving (Functor, Applicative, Monad, MonadIO, MonadReader B.WriteOperation)
#endif
-- | A write request in flight, access a possible error with 'waitSafe'
data AsyncWrite = ActionFailure E.RadosError | ActionInFlight B.Completion
-- | A read request in flight, access the contents of the read with 'look'
data AsyncRead a = ReadFailure E.RadosError | ReadInFlight B.Completion a
-- | The result of a 'stat', access the contents with 'modifyTime' and
-- 'fileSize'
data StatResult = StatResult Word64 EpochTime
| StatInFlight (ForeignPtr Word64) (ForeignPtr EpochTime)
deriving (Typeable)
-- Make reading a StatResult transparent
fileSize :: StatResult -> Word64
fileSize (StatResult s _) = s
fileSize (StatInFlight s _) = unsafePerformIO $ withForeignPtr s peek
modifyTime :: StatResult -> EpochTime
modifyTime (StatResult _ m) = m
modifyTime (StatInFlight _ m) = unsafePerformIO $ withForeignPtr m peek
class (MonadReader B.IOContext m, MonadIO m) => PoolReader m where
unPoolReader :: m a -> ReaderT B.IOContext IO a
class Monad m => RadosWriter m e | m -> e where
-- | Write a chunk of data
--
-- The possible types of this function are:
--
-- @
-- writeChunk :: Word64 -> ByteString -> Object Pool (Maybe RadosError)
-- writeChunk :: Word64 -> ByteString -> Object A.AsyncWrite
-- @
writeChunk
:: Word64 -- ^ Offset to write at
-> B.ByteString -- ^ Bytes to write
-> m e
-- | Atomically replace an object
--
-- The possible types of this function are:
--
-- @
-- writeFull :: ByteString -> Object Pool (Maybe RadosError)
-- writeFull :: ByteString -> Object A.AsyncWrite
-- @
writeFull :: B.ByteString -> m e
-- | Append to the end of an object
--
-- The possible types of this function are:
--
-- @
-- append :: ByteString -> Object Pool (Maybe RadosError)
-- append :: ByteString -> Object A.AsyncWrite
-- @
append :: B.ByteString -> m e
-- | Delete an object
--
-- The possible types of this function are:
--
-- @
-- remove :: Object Pool (Maybe RadosError)
-- remove :: Object A.AsyncWrite
-- @
remove :: m e
#if defined(ATOMIC_WRITES)
class Monad m => AtomicWriter m e | m -> e where
-- | Must be run within an Object monad, this will run all writes
-- atomically. The writes will be queued up, and on execution of the monad
-- will be sent to ceph in one batch request.
--
-- @
-- e <- runOurPool . runObject \"hi\" . runAtomicWrite $ do
-- remove
-- writeChunk 42 "fourty-two"
-- isNothing e
-- @
--
-- Or for async:
--
-- @
-- e <- runOurPool . runAsync . runObject \"hi\" . runAtomicWrite $ do
-- remove
-- writeChunk 42 "fourty-two"
-- isNothing <$> waitSafe e
-- @
runAtomicWrite :: AtomicWrite a -> m e
#endif
class Monad m => RadosReader m wrapper | m -> wrapper where
-- | Read a chunk of data.
--
-- The possible types of this function are:
--
-- @
-- readChunk :: Word64 -> Word64 -> Object Pool (Either RadosError ByteString)
-- readChunk :: Word64 -> Word64 -> Object Async (AsyncRead ByteString)
-- @
readChunk
:: Word64 -- ^ Number of bytes to read
-> Word64 -- ^ Offset to read from
-> m (wrapper B.ByteString)
-- | Read all avaliable data.
--
-- This is implemented with a stat followed by a read.
--
-- If you call this within the Object Async monad, the async request will
-- wait for the result of the stat. The read itself will still be
-- asynchronous.
--
-- The possible types of this function are:
--
-- @
-- readFull :: Object Pool (Either RadosError ByteString)
-- readFull :: Object Async (AsyncRead ByteString)
-- @
readFull :: m (wrapper B.ByteString)
readFull =
stat >>= unWrap >>= either wrapFail (\r -> readChunk (fileSize r) 0)
-- | Retrive the file size and mtime of an object
--
-- The possible types of this function are:
--
-- @
-- stat :: Object Pool (Either RadosError StatResult)
-- stat :: Object Async (AsyncRead StatResult)
-- @
stat :: m (wrapper StatResult)
unWrap :: Typeable a => wrapper a -> m (Either E.RadosError a)
wrapFail :: E.RadosError -> m (wrapper a)
instance PoolReader Async where
unPoolReader (Async a) = a
instance PoolReader Pool where
unPoolReader (Pool a) = a
instance RadosWriter (Object Pool) (Maybe E.RadosError) where
writeChunk offset buffer = do
(object, pool) <- askObjectPool
liftIO $ B.syncWrite pool object offset buffer
writeFull buffer = do
(object, pool) <- askObjectPool
liftIO $ B.syncWriteFull pool object buffer
append buffer = do
(object, pool) <- askObjectPool
liftIO $ B.syncAppend pool object buffer
remove = do
(object, pool) <- askObjectPool
liftIO $ B.syncRemove pool object
instance RadosWriter (Object Async) AsyncWrite where
writeChunk offset buffer = do
(object, pool) <- askObjectPool
withActionCompletion $ \completion ->
liftIO $ B.asyncWrite pool completion object offset buffer
writeFull buffer = do
(object, pool) <- askObjectPool
withActionCompletion $ \completion ->
liftIO $ B.asyncWriteFull pool completion object buffer
append buffer = do
(object, pool) <- askObjectPool
withActionCompletion $ \completion ->
liftIO $ B.asyncAppend pool completion object buffer
remove = do
(object, pool) <- askObjectPool
withActionCompletion $ \completion ->
liftIO $ B.asyncRemove pool completion object
#if defined(ATOMIC_WRITES)
instance RadosWriter AtomicWrite () where
writeChunk offset buffer = do
op <- ask
liftIO $ B.writeOperationWrite op buffer offset
writeFull buffer = do
op <- ask
liftIO $ B.writeOperationWriteFull op buffer
append buffer = do
op <- ask
liftIO $ B.writeOperationAppend op buffer
remove = do
op <- ask
liftIO $ B.writeOperationRemove op
instance AtomicWriter (Object Pool) (Maybe E.RadosError) where
runAtomicWrite (AtomicWrite action) = do
(object, pool) <- askObjectPool
liftIO $ do
op <- B.newWriteOperation
runReaderT action op
B.writeOperate op pool object
instance AtomicWriter (Object Async) AsyncWrite where
runAtomicWrite (AtomicWrite action) = do
(object, pool) <- askObjectPool
withActionCompletion $ \completion ->
liftIO $ do
op <- B.newWriteOperation
runReaderT action op
B.asyncWriteOperate op pool completion object
#endif
instance RadosReader (Object Pool) (Either E.RadosError) where
readChunk len offset = do
(object, pool) <- askObjectPool
liftIO $ B.syncRead pool object len offset
stat = do
(object, pool) <- askObjectPool
liftIO $ do
s <- B.syncStat pool object
return $ case s of
Left e -> Left e
Right (size, time) -> Right $ StatResult size time
unWrap = return . id
wrapFail = return . Left
instance RadosReader (Object Async) AsyncRead where
readChunk len offset = do
(object, pool) <- askObjectPool
withReadCompletion $ \completion ->
liftIO $ B.asyncRead pool completion object len offset
stat = do
(object, pool) <- askObjectPool
withReadCompletion $ \completion ->
liftIO $ do
s <- B.asyncStat pool completion object
return $ case s of
Left e ->
Left e
Right (size_fp, mtime_fp) ->
Right $ StatInFlight size_fp mtime_fp
unWrap = look
wrapFail = return . ReadFailure
askObjectPool :: MonadReader B.IOContext m => Object m (B.ByteString, B.IOContext)
askObjectPool =
liftM2 (,) ask (Object . lift $ ask)
-- | Wrapper for the Control.Concurrent.Async library, you must be very careful
-- to wait for the completion of all created async actions within the pool
-- monad, or they will run with an invalid (cleaned up) context.
--
-- This will be rectified in future versions when reference counting is
-- implemented, for now it is very unpolished and will require you to import
-- qualified Control.Concurrent.Async.
async :: PoolReader m => m a -> m (A.Async a)
async action = do
pool <- ask
-- Stick the pool within async
liftIO . A.async $ runReaderT (unPoolReader action) pool
-- TODO: Implement reference counting here and within runPool
-- | Wait until a Rados write has hit stable storage on all replicas, you will
-- only know if a write has been successful when you inspect the AsyncWrite
-- with waitSafe.
--
-- Provides a Maybe RadosError.
--
-- @
-- runOurPool . runAsync . runObject \"a box\" $ do
-- async_request \<- writeFull \"schrodinger's hai?\\n\"
-- liftIO $ putStrLn \"Write is in flight!\"
-- maybe_error <- waitSafe async_request
-- case maybe_error of
-- Just e -> liftIO $ print e
-- Nothing -> return ()
-- @
waitSafe :: MonadIO m => AsyncWrite -> m (Maybe E.RadosError)
waitSafe = waitAsync B.waitForSafe
-- | Wait until a Rados write has hit memory on all replicas. This is less safe
-- than waitSafe, but still pretty safe. Safe.
waitComplete :: MonadIO m => AsyncWrite -> m (Maybe E.RadosError)
waitComplete = waitAsync B.waitForComplete
waitAsync :: MonadIO m
=> (B.Completion -> IO a) -> AsyncWrite -> m (Maybe E.RadosError)
waitAsync f async_request =
case async_request of
ActionFailure e ->
return $ Just e
ActionInFlight completion -> do
e <- liftIO $ do
f completion
B.getAsyncError completion
return $ either Just (const Nothing) e
-- | Take an 'AsyncRead' a and provide Either RadosError a
-- This function is used for retrieving the value of an async read.
--
-- @
-- runOurPool . runAsync . runObject \"a box\" $ do
-- async_read \<- readFull
-- liftIO $ putStrLn \"Request is in flight!\"
-- either_error_or_read \<- look async_read
-- either (liftIO . throwIO) BS.putStrLn either_error_or_read
-- @
look :: (MonadIO m, Typeable a)
=> AsyncRead a -> m (Either E.RadosError a)
look async_request =
case async_request of
ReadFailure e ->
return $ Left e
ReadInFlight completion a -> do
ret <- liftIO $ do
B.waitForSafe completion
B.getAsyncError completion
return $ case ret of
Left e -> Left e
Right n -> Right $
-- This is a hack to trim async bytestrings to the correct
-- size on recieving the actual number of bytes read from
-- getAsyncError. The casting is needed so that the user
-- can simply use "look" to look at any read value, it just
-- so happens that when the user looks at a ByteString
-- value we magically trim it to the correct size.
case (cast a :: Maybe B.ByteString) of
Just bs -> fromJust . cast $ B.take n bs
Nothing -> a
-- | Run an action with a completion.
withActionCompletion :: (B.Completion -> IO (Either E.RadosError a)) -> Object Async AsyncWrite
withActionCompletion f = do
completion <- liftIO B.newCompletion
result <- liftIO $ f completion
return $ case result of
Left e -> ActionFailure e
Right _ -> ActionInFlight completion
-- | Run an read with a completion
withReadCompletion :: (B.Completion -> IO (Either E.RadosError a)) -> Object Async (AsyncRead a)
withReadCompletion f = do
completion <- liftIO B.newCompletion
result <- liftIO $ f completion
return $ case result of
Left e -> ReadFailure e
Right a -> ReadInFlight completion a
-- |
-- Run an action within the 'Connection' monad, this may throw a RadosError to
-- IO if the connection or configuration fails.
--
-- @
-- runConnect Nothing (parseConfig \"ceph.conf\") $ runPool ...
-- @
runConnect
:: Maybe B.ByteString -- ^ Optional user name
-> (B.Connection -> IO (Maybe E.RadosError)) -- ^ Configuration function
-> Connection a
-> IO a
runConnect user configure (Connection action) =
bracket
(do h <- B.newConnection user
conf <- configure h
case conf of
Just e -> do
B.cleanupConnection h
throwIO e
Nothing -> do
B.connect h
return h)
B.cleanupConnection
(runReaderT action)
-- |
--
-- Run an action within the 'Pool' monad.
--
-- This may throw a RadosError to IO if the pool cannot be opened.
--
-- For the following examples, we shall use:
--
-- @
-- runOurPool :: Pool a -> IO a
-- runOurPool =
-- runConnect Nothing parseArgv . runPool \"magic_pool\"
-- @
runPool :: B.ByteString -> Pool a -> Connection a
runPool pool (Pool action) = do
connection <- ask
liftIO $ bracket
(B.newIOContext connection pool)
B.cleanupIOContext
(runReaderT action)
-- |
-- Run an action within the 'Object m' monad, where m is the caller's context.
--
-- @
-- (runOurPool . runObject \"an oid\" :: Object Pool a -> IO a
-- (runOurPool . runAsync . runObject \"an oid\") :: Object Async a -> IO a
-- @
runObject :: PoolReader m =>
B.ByteString -> Object m a -> m a
runObject object_id (Object action) =
runReaderT action object_id
-- |
-- Any read/writes within this monad will be run asynchronously.
--
-- Return values of reads and writes are wrapped within 'AsyncRead' or
-- 'AsyncWrite' respectively. You should extract the actual value from a read
-- via 'look' and 'waitSafe'.
--
-- The asynchronous nature of error handling means that if you fail to inspect
-- asynchronous writes with 'waitSafe', you will never know if they failed.
--
-- @
-- runOurPool . runAsync . runObject \"a box\" $ do
-- wr <- writeFull \"schrodinger's hai?\\n\"
-- writeChunk 14 \"cat\" -- Don't care about the cat.
-- print . isNothing \<$\> waitSafe wr
-- r \<- readFull \>>= look
-- either throwIO print r
-- @
runAsync :: PoolReader m => Async a -> m a
runAsync (Async action) = do
-- We merely re-wrap the pool.
pool <- ask
liftIO $ runReaderT action pool
-- | Read a config from a relative or absolute 'FilePath' into a 'Connection'.
--
-- Intended for use with 'runConnect'.
parseConfig :: FilePath -> B.Connection -> IO (Maybe E.RadosError)
parseConfig = flip B.confReadFile
-- | Read a config from the command line, note that no help flag will be
-- provided.
parseArgv :: B.Connection -> IO (Maybe E.RadosError)
parseArgv = B.confParseArgv
-- | Parse the contents of the environment variable CEPH_ARGS as if they were
-- ceph command line options.
parseEnv :: B.Connection -> IO (Maybe E.RadosError)
parseEnv = B.confParseEnv
-- | Perform an action with an exclusive lock.
withExclusiveLock
:: B.ByteString -- ^ Object ID
-> B.ByteString -- ^ Name of lock
-> B.ByteString -- ^ Description of lock (debugging)
-> Maybe Double -- ^ Optional duration of lock
-> Pool a -- ^ Action to perform with lock
-> Pool a
withExclusiveLock oid name desc duration action =
withLock oid name action $ \pool cookie ->
B.exclusiveLock pool oid name cookie desc duration []
-- | Perform an action with an shared lock.
withSharedLock
:: B.ByteString -- ^ Object ID
-> B.ByteString -- ^ Name of lock
-> B.ByteString -- ^ Description of lock (debugging)
-> B.ByteString -- ^ Tag for lock holder (debugging)
-> Maybe Double -- ^ Optional duration of lock
-> Pool a -- ^ Action to perform with lock
-> Pool a
withSharedLock oid name desc tag duration action =
withLock oid name action $ \pool cookie ->
B.sharedLock pool oid name cookie tag desc duration []
withLock
:: B.ByteString
-> B.ByteString
-> Pool a
-> (B.IOContext -> B.ByteString -> IO b)
-> Pool a
withLock oid name (Pool user_action) lock_action = do
pool <- ask
cookie <- liftIO B.newCookie
-- Re-wrap user's action in a sub-ReaderT that is identical, this way we
-- can just use bracket_ to ensure the lock is cleaned up even if they
-- generate an exception.
liftIO $ bracket_
(lock_action pool cookie)
(B.unlock pool oid name cookie >>= B.missingOK)
(runReaderT user_action pool)
-- | Return a strict list of pool items.
objects :: Pool [B.ByteString]
objects = do
os <- unsafeObjects
length os `seq` return os
-- | Return a lazy list of pool items. This list must be evaluated within the
-- pool monad, if you wish to access the list outside of the pool monad you
-- must fully evaluate it first (which is all objects does).
unsafeObjects :: Pool [B.ByteString]
unsafeObjects = do
pool <- ask
liftIO $ B.unsafeObjects pool
#if defined(ATOMIC_WRITES)
assertExists :: AtomicWrite ()
assertExists = do
op <- ask
liftIO $ B.writeOperationAssertExists op
compareXAttribute :: B.ByteString -> B.ComparisonFlag -> B.ByteString -> AtomicWrite ()
compareXAttribute key operator value = do
op <- ask
liftIO $ B.writeOperationCompareXAttribute op key operator value
setXAttribute :: B.ByteString -> B.ByteString -> AtomicWrite ()
setXAttribute key value = do
op <- ask
liftIO $ B.writeOperationSetXAttribute op key value
#endif
| anchor/rados-haskell | src/System/Rados/Monadic.hs | bsd-3-clause | 22,702 | 0 | 20 | 6,053 | 4,143 | 2,227 | 1,916 | -1 | -1 |
import "Lib/Prelude.hs"
unittest "not, null, iszero, even, odd" [
(not True, False),
(not False, True),
(null [], True),
(null [1], False),
(iszero 0, True),
(iszero 1, False),
(even 1, False),
(even 2, True),
(even 0, True),
(odd 0, False),
(odd 101, True),
]
unittest "abs, max, min, gcd, lcm" [
(abs -10, 10),
(abs (-10), 10),
(abs 10, 10),
(max 1 2, 2),
(min 1 2, 1),
(gcd 7 5, 1),
(gcd 24 12, 12),
(gcd 12 128, 4),
(gcd 24 (gcd 12 128), 4),
(lcm 7 5, 35),
(lcm (lcm 7 5) 9, 315),
]
unittest "fold-left, fold-right" [
(show (foldl (+) 0 [1,2,3,4,5,6,7,8,9,10]), "55"),
(show (foldl (\a x -> x : a) [] [1,2,3,4,5,6,7,8,9,10]), "[10,9,8,7,6,5,4,3,2,1]"),
(show (foldr (+) 0 [1,2,3,4,5,6,7,8,9,10]), "55"),
(show (foldr (:) [] [1,2,3,4,5,6,7,8,9,10]), "[1,2,3,4,5,6,7,8,9,10]"),
]
unittest "append(++), length, reverse" [
(append "abc" "def", "abcdef"),
(show (append [1,2,3] [4,5,6]), "[1,2,3,4,5,6]"),
(show (length []), "0"),
(show (length ['a','b','c','d','e']), "5"),
(show (reverse []), "[]"),
(show (reverse [1,2,3,4,5]), "[5,4,3,2,1]"),
]
unittest "!!, elem" [
(show ([0,1,2,3,4,5] !! 0), "0"),
(show ([0,1,2,3,4,5] !! 1), "1"),
(show ([0,1,2,3,4,5] !! 5), "5"),
(show ("abcdef" !! 0), "'a'"),
(show (elem 3 [0,1,2,3,4,5]), "True"),
(show (elem 9 [0,1,2,3,4,5]), "False"),
(show (elem 'a' "Hello"), "False"),
(show (elem 'H' "Hello"), "True"),
]
unittest "map, filter" [
(show (map (\x -> x * x) [1,2,3,4,5]), "[1,4,9,16,25]"),
(show (map ((*) 2) [1,2,3,4,5]), "[2,4,6,8,10]"),
(show (map (2 *) [1,2,3,4,5]), "[2,4,6,8,10]"),
(show (map (++"!") ["a","b","c"]), "[\"a!\",\"b!\",\"c!\"]"),
(show (map ("!"++) ["a","b","c"]), "[\"!a\",\"!b\",\"!c\"]"),
(show (filter even [1,2,3,4,5,6,7,8]), "[2,4,6,8]"),
(show (filter odd [1,2,3,4,5,6,7,8]), "[1,3,5,7]"),
]
unittest "(||), (&&), or, and, any, all" [
(True || False, True),
(True && False, False),
(or [True, False, True], True),
(and [True, False, True], False),
(any even [1,2,3], True),
(all even [1,2,3], False),
]
unittest "let" [
(let a = 10 in let b = 20 in
let a = 0 in let b = 1 in [a,b] , [0,1]),
(let a = 1 in
let b = a + 1 in [a,b] , [1,2]),
(let a = 1 in
let b = a + 1 in
let c = b * 2 in [a,b,c] , [1,2,4]),
]
unittest "fix" [
(let factorial = fix (\f m -> if iszero m then 1 else m * (f (m - 1)))
in factorial 10, 3628800)
]
unittest "letrec" [
(letrec factorial::Int->Int = \m -> if iszero m then 1 else m * (factorial (m - 1))
in factorial 10, 3628800)
]
{--}
| ocean0yohsuke/Simply-Typed-Lambda | Start/UnitTest/Prelude.hs | bsd-3-clause | 2,980 | 2 | 20 | 949 | 1,758 | 1,018 | 740 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Coinbase.Exchange.Rest
( coinbaseGet
, coinbasePost
, coinbaseDelete
, coinbaseDeleteDiscardBody
, voidBody
) where
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.Trans.Resource
import Crypto.Hash
import Data.Aeson
import Data.Byteable
import qualified Data.ByteString as BS
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Char8 as CBS
import qualified Data.ByteString.Lazy as LBS
import Data.Conduit
import Data.Conduit.Attoparsec (sinkParser)
import qualified Data.Conduit.Binary as CB
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time
import Data.Time.Clock.POSIX
import Network.HTTP.Conduit
import Network.HTTP.Types
import Text.Printf
import Coinbase.Exchange.Types
import Debug.Trace
type Signed = Bool
type IsForExchange = Bool
voidBody :: Maybe ()
voidBody = Nothing
coinbaseGet :: ( ToJSON a
, FromJSON b
, MonadResource m
, MonadReader ExchangeConf m
, MonadError ExchangeFailure m )
=> Signed -> Path -> Maybe a -> m b
coinbaseGet sgn p ma = coinbaseRequest "GET" sgn p ma >>= processResponse True
coinbasePost :: ( ToJSON a
, FromJSON b
, MonadResource m
, MonadReader ExchangeConf m
, MonadError ExchangeFailure m )
=> Signed -> Path -> Maybe a -> m b
coinbasePost sgn p ma = coinbaseRequest "POST" sgn p ma >>= processResponse True
coinbaseDelete :: ( ToJSON a
, FromJSON b
, MonadResource m
, MonadReader ExchangeConf m
, MonadError ExchangeFailure m )
=> Signed -> Path -> Maybe a -> m b
coinbaseDelete sgn p ma = coinbaseRequest "DELETE" sgn p ma >>= processResponse True
coinbaseDeleteDiscardBody :: ( ToJSON a
, MonadResource m
, MonadReader ExchangeConf m
, MonadError ExchangeFailure m )
=> Signed -> Path -> Maybe a -> m ()
coinbaseDeleteDiscardBody sgn p ma = coinbaseRequest "DELETE" sgn p ma >>= processEmpty
coinbaseRequest :: ( ToJSON a
, MonadResource m
, MonadReader ExchangeConf m
, MonadError ExchangeFailure m )
=> Method -> Signed -> Path -> Maybe a -> m (Response (ResumableSource m BS.ByteString))
coinbaseRequest meth sgn p ma = do
conf <- ask
req <- case apiType conf of
Sandbox -> parseUrlThrow $ sandboxRest ++ p
Live -> parseUrlThrow $ liveRest ++ p
let req' = req { method = meth
, requestHeaders = [ ("user-agent", "haskell")
, ("accept", "application/json")
]
}
flip http (manager conf) =<< signMessage True sgn meth p
=<< encodeBody ma req'
realCoinbaseRequest :: ( ToJSON a
, MonadResource m
, MonadReader ExchangeConf m
, MonadError ExchangeFailure m )
=> Method -> Signed -> Path -> Maybe a -> m (Response (ResumableSource m BS.ByteString))
realCoinbaseRequest meth sgn p ma = do
conf <- ask
req <- case apiType conf of
Sandbox -> parseUrlThrow $ sandboxRealCoinbaseRest ++ p
Live -> parseUrlThrow $ liveRealCoinbaseRest ++ p
let req' = req { method = meth
, requestHeaders = [ ("user-agent", "haskell")
, ("accept", "application/json")
, ("content-type", "application/json")
]
}
flip http (manager conf) =<< signMessage False sgn meth p
=<< encodeBody ma req'
encodeBody :: (ToJSON a, Monad m)
=> Maybe a -> Request -> m Request
encodeBody (Just a) req = return req
{ requestHeaders = requestHeaders req ++
[ ("content-type", "application/json") ]
, requestBody = RequestBodyBS $ LBS.toStrict $ encode a
}
encodeBody Nothing req = return req
signMessage :: (MonadIO m, MonadReader ExchangeConf m, MonadError ExchangeFailure m)
=> IsForExchange -> Signed -> Method -> Path -> Request -> m Request
signMessage isForExchange True meth p req = do
conf <- ask
case authToken conf of
Just tok -> do time <- liftM (realToFrac . utcTimeToPOSIXSeconds) (liftIO getCurrentTime)
>>= \t -> return . CBS.pack $ printf "%.0f" (t::Double)
rBody <- pullBody $ requestBody req
let presign = CBS.concat [time, meth, CBS.pack p, rBody]
sign = if isForExchange
then Base64.encode $ toBytes (hmac (secret tok) presign :: HMAC SHA256)
else digestToHexByteString $ hmacGetDigest (hmac (Base64.encode $ secret tok) presign :: HMAC SHA256)
return req
{ requestBody = RequestBodyBS rBody
, requestHeaders = requestHeaders req ++
[ ("cb-access-key", key tok)
, ("cb-access-sign", sign )
, ("cb-access-timestamp", time)
] ++ if isForExchange
then [("cb-access-passphrase", passphrase tok)]
else [("cb-version", "2016-05-11")]
}
Nothing -> throwError $ AuthenticationRequiredFailure $ T.pack p
where pullBody (RequestBodyBS b) = return b
pullBody (RequestBodyLBS b) = return $ LBS.toStrict b
pullBody _ = throwError AuthenticationRequiresByteStrings
signMessage _ False _ _ req = return req
--
processResponse :: ( FromJSON b
, MonadResource m
, MonadReader ExchangeConf m
, MonadError ExchangeFailure m )
=> IsForExchange -> Response (ResumableSource m BS.ByteString) -> m b
processResponse isForExchange res =
case responseStatus res of
s | s == status200 || (s == created201 && not isForExchange) ->
do body <- responseBody res $$+- sinkParser (fmap (\x -> {-trace (show x)-} fromJSON x) json)
case body of
Success b -> return b
Error er -> throwError $ ParseFailure $ T.pack er
| otherwise -> do body <- responseBody res $$+- CB.sinkLbs
throwError $ ApiFailure $ T.decodeUtf8 $ LBS.toStrict body
processEmpty :: ( MonadResource m
, MonadReader ExchangeConf m
, MonadError ExchangeFailure m )
=> Response (ResumableSource m BS.ByteString) -> m ()
processEmpty res =
case responseStatus res of
s | s == status200 -> return ()
| otherwise -> do body <- responseBody res $$+- CB.sinkLbs
throwError $ ApiFailure $ T.decodeUtf8 $ LBS.toStrict body
| AndrewRademacher/coinbase-exchange | src/Coinbase/Exchange/Rest.hs | mit | 8,084 | 0 | 22 | 3,345 | 1,922 | 993 | 929 | 149 | 6 |
import Drawing
import Exercises
import Geometry
main = drawPicture myPicture
myPicture points =
message ("Rotations" ++ ", angle(AOA')=" ++ shownum (angle a o a')
++ ", angle(XOX')=" ++ shownum (angle x o x'))
& red ( drawSegment (o,a)
& drawSegment (a,x)
& drawSegment (x,o)
)
& blue ( drawSegment (o,a')
& drawSegment (a',x')
& drawSegment (o,x')
)
& faint ( drawArc (x,o,x')
& drawArc (a,o,a')
)
& drawPointsLabels [o,a,a',x,x'] ["O","A","A'","X","X'"]
where [a,o,b,x] = take 4 points
Just a' = find (not . beyond (b,o)) $ line_circle (o,b) (o,a)
bs = angle_bisector (a,o,a')
x' = mirror (o,a') $ mirror (o,bs) x
| alphalambda/hsmath | src/Learn/Geometry/ex14rotate.hs | gpl-2.0 | 785 | 0 | 16 | 269 | 361 | 196 | 165 | 20 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="sq-AL">
<title>Support for the Open API Specification | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/openapi/src/main/javahelp/org/zaproxy/zap/extension/openapi/resources/help_sq_AL/helpset_sq_AL.hs | apache-2.0 | 1,000 | 80 | 66 | 164 | 423 | 214 | 209 | -1 | -1 |
{-
Copyright 2010-2012 Cognimeta Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is
distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the License for the specific language governing permissions and limitations under the License.
-}
module Database.Perdure.Space (
Space(..),
module Cgm.Data.SortedPair,
module Data.Monoid,
Span
) where
import Prelude()
import Cgm.Prelude
import Data.Monoid
import Data.Word
import Cgm.Data.SortedPair
import Database.Perdure.StoreFile(Span)
-- Users must ensure they do not add overlapping spans, or remove spans unless all of its contents has already been added (perhaps as seperate spans)
class Space a where
emptySpace :: a
removeSpan :: Span -> a -> a
addSpan :: Span -> a -> a
findSpan :: Word64 -> a -> [Span]
isFreeSpace :: Word64 -> a -> Bool
| Cognimeta/perdure | src/Database/Perdure/Space.hs | apache-2.0 | 1,179 | 0 | 9 | 235 | 141 | 85 | 56 | 17 | 0 |
module Propellor.Property.Ssh (
PubKeyText,
setSshdConfig,
permitRootLogin,
passwordAuthentication,
hasAuthorizedKeys,
authorizedKey,
restarted,
randomHostKeys,
hostKeys,
hostKey,
pubKey,
getPubKey,
keyImported,
keyImported',
knownHost,
authorizedKeys,
listenPort
) where
import Propellor
import qualified Propellor.Property.File as File
import qualified Propellor.Property.Service as Service
import Propellor.Property.User
import Utility.SafeCommand
import Utility.FileMode
import System.PosixCompat
import qualified Data.Map as M
type PubKeyText = String
sshBool :: Bool -> String
sshBool True = "yes"
sshBool False = "no"
sshdConfig :: FilePath
sshdConfig = "/etc/ssh/sshd_config"
setSshdConfig :: String -> Bool -> Property NoInfo
setSshdConfig setting allowed = combineProperties "sshd config"
[ sshdConfig `File.lacksLine` (sshline $ not allowed)
, sshdConfig `File.containsLine` (sshline allowed)
]
`onChange` restarted
`describe` unwords [ "ssh config:", setting, sshBool allowed ]
where
sshline v = setting ++ " " ++ sshBool v
permitRootLogin :: Bool -> Property NoInfo
permitRootLogin = setSshdConfig "PermitRootLogin"
passwordAuthentication :: Bool -> Property NoInfo
passwordAuthentication = setSshdConfig "PasswordAuthentication"
dotDir :: UserName -> IO FilePath
dotDir user = do
h <- homedir user
return $ h </> ".ssh"
dotFile :: FilePath -> UserName -> IO FilePath
dotFile f user = do
d <- dotDir user
return $ d </> f
hasAuthorizedKeys :: UserName -> IO Bool
hasAuthorizedKeys = go <=< dotFile "authorized_keys"
where
go f = not . null <$> catchDefaultIO "" (readFile f)
restarted :: Property NoInfo
restarted = Service.restarted "ssh"
-- | Blows away existing host keys and make new ones.
-- Useful for systems installed from an image that might reuse host keys.
-- A flag file is used to only ever do this once.
randomHostKeys :: Property NoInfo
randomHostKeys = flagFile prop "/etc/ssh/.unique_host_keys"
`onChange` restarted
where
prop = property "ssh random host keys" $ do
void $ liftIO $ boolSystem "sh"
[ Param "-c"
, Param "rm -f /etc/ssh/ssh_host_*"
]
ensureProperty $ scriptProperty
[ "DPKG_MAINTSCRIPT_NAME=postinst DPKG_MAINTSCRIPT_PACKAGE=openssh-server /var/lib/dpkg/info/openssh-server.postinst configure" ]
-- | Installs the specified list of ssh host keys.
--
-- The corresponding private keys come from the privdata.
--
-- Any host keysthat are not in the list are removed from the host.
hostKeys :: IsContext c => c -> [(SshKeyType, PubKeyText)] -> Property HasInfo
hostKeys ctx l = propertyList desc $ catMaybes $
map (\(t, pub) -> Just $ hostKey ctx t pub) l ++ [cleanup]
where
desc = "ssh host keys configured " ++ typelist (map fst l)
typelist tl = "(" ++ unwords (map fromKeyType tl) ++ ")"
alltypes = [minBound..maxBound]
staletypes = let have = map fst l in filter (`notElem` have) alltypes
removestale b = map (File.notPresent . flip keyFile b) staletypes
cleanup
| null staletypes || null l = Nothing
| otherwise = Just $ toProp $
property ("any other ssh host keys removed " ++ typelist staletypes) $
ensureProperty $
combineProperties desc (removestale True ++ removestale False)
`onChange` restarted
-- | Installs a single ssh host key of a particular type.
--
-- The public key is provided to this function;
-- the private key comes from the privdata;
hostKey :: IsContext c => c -> SshKeyType -> PubKeyText -> Property HasInfo
hostKey context keytype pub = combineProperties desc
[ pubKey keytype pub
, toProp $ property desc $ install writeFile True pub
, withPrivData (keysrc "" (SshPrivKey keytype "")) context $ \getkey ->
property desc $ getkey $ install writeFileProtected False
]
`onChange` restarted
where
desc = "ssh host key configured (" ++ fromKeyType keytype ++ ")"
install writer ispub key = do
let f = keyFile keytype ispub
s <- liftIO $ catchDefaultIO "" $ readFileStrict f
if s == key
then noChange
else makeChange $ writer f key
keysrc ext field = PrivDataSourceFileFromCommand field ("sshkey"++ext)
("ssh-keygen -t " ++ sshKeyTypeParam keytype ++ " -f sshkey")
keyFile :: SshKeyType -> Bool -> FilePath
keyFile keytype ispub = "/etc/ssh/ssh_host_" ++ fromKeyType keytype ++ "_key" ++ ext
where
ext = if ispub then ".pub" else ""
-- | Indicates the host key that is used by a Host, but does not actually
-- configure the host to use it. Normally this does not need to be used;
-- use 'hostKey' instead.
pubKey :: SshKeyType -> PubKeyText -> Property HasInfo
pubKey t k = pureInfoProperty ("ssh pubkey known") $
mempty { _sshPubKey = M.singleton t k }
getPubKey :: Propellor (M.Map SshKeyType String)
getPubKey = asks (_sshPubKey . hostInfo)
-- | Sets up a user with a ssh private key and public key pair from the
-- PrivData.
--
-- If the user already has a private/public key, it is left unchanged.
keyImported :: IsContext c => SshKeyType -> UserName -> c -> Property HasInfo
keyImported = keyImported' Nothing
-- | A file can be speficied to write the key to somewhere other than
-- usual. Allows a user to have multiple keys for different roles.
keyImported' :: IsContext c => Maybe FilePath -> SshKeyType -> UserName -> c -> Property HasInfo
keyImported' dest keytype user context = combineProperties desc
[ installkey (SshPubKey keytype user) (install writeFile ".pub")
, installkey (SshPrivKey keytype user) (install writeFileProtected "")
]
where
desc = unwords $ catMaybes
[ Just user
, Just "has ssh key"
, dest
, Just $ "(" ++ fromKeyType keytype ++ ")"
]
installkey p a = withPrivData p context $ \getkey ->
property desc $ getkey a
install writer ext key = do
f <- liftIO $ keyfile ext
ifM (liftIO $ doesFileExist f)
( noChange
, ensureProperties
[ property desc $ makeChange $ do
createDirectoryIfMissing True (takeDirectory f)
writer f key
, File.ownerGroup f user user
, File.ownerGroup (takeDirectory f) user user
]
)
keyfile ext = case dest of
Nothing -> do
home <- homeDirectory <$> getUserEntryForName user
return $ home </> ".ssh" </> "id_" ++ fromKeyType keytype ++ ext
Just f -> return $ f ++ ext
fromKeyType :: SshKeyType -> String
fromKeyType SshRsa = "rsa"
fromKeyType SshDsa = "dsa"
fromKeyType SshEcdsa = "ecdsa"
fromKeyType SshEd25519 = "ed25519"
-- | Puts some host's ssh public key(s), as set using 'pubKey' or 'hostKey'
-- into the known_hosts file for a user.
knownHost :: [Host] -> HostName -> UserName -> Property NoInfo
knownHost hosts hn user = property desc $
go =<< fromHost hosts hn getPubKey
where
desc = user ++ " knows ssh key for " ++ hn
go (Just m) | not (M.null m) = do
f <- liftIO $ dotFile "known_hosts" user
ensureProperty $ combineProperties desc
[ File.dirExists (takeDirectory f)
, f `File.containsLines`
(map (\k -> hn ++ " " ++ k) (M.elems m))
, File.ownerGroup f user user
, File.ownerGroup (takeDirectory f) user user
]
go _ = do
warningMessage $ "no configred pubKey for " ++ hn
return FailedChange
-- | Makes a user have authorized_keys from the PrivData
--
-- This removes any other lines from the file.
authorizedKeys :: IsContext c => UserName -> c -> Property HasInfo
authorizedKeys user context = withPrivData (SshAuthorizedKeys user) context $ \get ->
property (user ++ " has authorized_keys") $ get $ \v -> do
f <- liftIO $ dotFile "authorized_keys" user
liftIO $ do
createDirectoryIfMissing True (takeDirectory f)
writeFileProtected f v
ensureProperties
[ File.ownerGroup f user user
, File.ownerGroup (takeDirectory f) user user
]
-- | Ensures that a user's authorized_keys contains a line.
-- Any other lines in the file are preserved as-is.
authorizedKey :: UserName -> String -> Property NoInfo
authorizedKey user l = property desc $ do
f <- liftIO $ dotFile "authorized_keys" user
ensureProperty $ combineProperties desc
[ f `File.containsLine` l
`requires` File.dirExists (takeDirectory f)
`onChange` File.mode f (combineModes [ownerWriteMode, ownerReadMode])
, File.ownerGroup f user user
, File.ownerGroup (takeDirectory f) user user
]
where
desc = user ++ " has autorized_keys line " ++ l
-- | Makes the ssh server listen on a given port, in addition to any other
-- ports it is configured to listen on.
--
-- Revert to prevent it listening on a particular port.
listenPort :: Int -> RevertableProperty
listenPort port = enable <!> disable
where
portline = "Port " ++ show port
enable = sshdConfig `File.containsLine` portline
`describe` ("ssh listening on " ++ portline)
`onChange` restarted
disable = sshdConfig `File.lacksLine` portline
`describe` ("ssh not listening on " ++ portline)
`onChange` restarted
| avengerpenguin/propellor | src/Propellor/Property/Ssh.hs | bsd-2-clause | 8,752 | 124 | 19 | 1,638 | 2,491 | 1,279 | 1,212 | 181 | 2 |
module ParsecToken
{-# DEPRECATED "This module has moved to Text.ParserCombinators.Parsec.Token" #-}
(module Text.ParserCombinators.Parsec.Token) where
import Text.ParserCombinators.Parsec.Token
| alekar/hugs | fptools/hslibs/text/parsec/ParsecToken.hs | bsd-3-clause | 195 | 0 | 5 | 16 | 22 | 16 | 6 | 4 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE CPP #-}
module Codec.Picture.VectorByteConversion( blitVector
, toByteString
, imageFromUnsafePtr ) where
import Data.Word( Word8 )
import Data.Vector.Storable( Vector, unsafeToForeignPtr, unsafeFromForeignPtr0 )
import Foreign.Storable( Storable, sizeOf )
#if !MIN_VERSION_base(4,8,0)
import Foreign.ForeignPtr.Safe( ForeignPtr, castForeignPtr )
#else
import Foreign.ForeignPtr( ForeignPtr, castForeignPtr )
#endif
import qualified Data.ByteString as B
import qualified Data.ByteString.Internal as S
import Codec.Picture.Types
blitVector :: Vector Word8 -> Int -> Int -> B.ByteString
blitVector vec atIndex = S.PS ptr (offset + atIndex)
where (ptr, offset, _length) = unsafeToForeignPtr vec
toByteString :: forall a. (Storable a) => Vector a -> B.ByteString
toByteString vec = S.PS (castForeignPtr ptr) offset (len * size)
where (ptr, offset, len) = unsafeToForeignPtr vec
size = sizeOf (undefined :: a)
-- | Import a image from an unsafe pointer
-- The pointer must have a size of width * height * componentCount px
imageFromUnsafePtr :: forall px
. (Pixel px, (PixelBaseComponent px) ~ Word8)
=> Int -- ^ Width in pixels
-> Int -- ^ Height in pixels
-> ForeignPtr Word8 -- ^ Pointer to the raw data
-> Image px
imageFromUnsafePtr width height ptr =
Image width height $ unsafeFromForeignPtr0 ptr size
where compCount = componentCount (undefined :: px)
size = width * height * compCount
| Chobbes/Juicy.Pixels | src/Codec/Picture/VectorByteConversion.hs | bsd-3-clause | 1,690 | 0 | 10 | 430 | 370 | 211 | 159 | 30 | 1 |
module Main (main) where
import Text.ParserCombinators.Parsec ( parse )
import Text.ParserCombinators.Parsec.Rfc2822
-- Read an Internet message from standard input, parse it,
-- and return the result.
main :: IO ()
main = do
input <- getContents
print $ parse message "<stdin>" (fixEol input)
return ()
-- Make sure all lines are terminated by CRLF.
fixEol :: String -> String
fixEol ('\r':'\n':xs) = '\r' : '\n' : fixEol xs
fixEol ('\n':xs) = '\r' : '\n' : fixEol xs
fixEol (x:xs) = x : fixEol xs
fixEol [] = []
| meteogrid/mime-mail-parser | example/message-test.hs | bsd-3-clause | 558 | 0 | 10 | 132 | 175 | 92 | 83 | 13 | 1 |
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
import Data.Functor.Identity
data HideArg f where
HideArg :: f a -> HideArg f
data family App :: tF -> tF
data instance App f x = App1 (f x)
class WrappedIn s a | s -> a where
unwrap :: s -> a
instance WrappedIn (App f a) (f a) where
unwrap (App1 fa) = fa
pattern Unwrapped :: WrappedIn s a => a -> s
pattern Unwrapped x <- (unwrap -> x)
{-# COMPLETE Unwrapped :: App #-}
boom :: HideArg (App Identity) -> Bool
boom (HideArg (Unwrapped (Identity _))) = True
main :: IO ()
main = print ":("
| sdiehl/ghc | testsuite/tests/pmcheck/should_compile/T17112.hs | bsd-3-clause | 761 | 0 | 11 | 147 | 233 | 125 | 108 | 24 | 1 |
module NN.Backend.Torch.Lua where
import Data.Word
import Language.Lua.Syntax
newtype LS = L String
-- |Handy typeclass for converting arguments
class ToLua a where
toLua :: a -> Exp
instance ToLua Word32 where
toLua = Number . show
instance ToLua LS where
toLua (L s') = String s'
instance ToLua Float where
toLua = Number . show
instance (ToLua a) => ToLua (Maybe a) where
toLua Nothing = Nil
toLua (Just a) = toLua a
-- Helpers for Lua code generation
assign :: Name -> Exp -> Stat
assign lval exp' = LocalAssign [lval] (Just [exp'])
require :: Name -> Stat
require module' = funCall "require" [toLua $ L module']
funCall :: Name -> [Exp] -> Stat
funCall name' args = FunCall (NormalFunCall (var name') (Args args))
methCall :: Name -> Name -> [Exp] -> Stat
methCall table field args = FunCall (MethodCall (var table) field (Args args))
return' :: Name -> Exp
return' name' = PrefixExp (var name')
var :: Name -> PrefixExp
var name' = PEVar (VarName name')
var' :: Name -> Exp
var' name' = PrefixExp (var name')
| sjfloat/dnngraph | NN/Backend/Torch/Lua.hs | bsd-3-clause | 1,073 | 0 | 9 | 237 | 410 | 213 | 197 | 29 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : RefacMerge
-- Copyright : (c) Christopher Brown 2006
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- This module contains a transformation for HaRe.
-- Merging definitions together.
module RefacMerge where
import System.IO.Unsafe
import PrettyPrint
import RefacTypeSyn
import RefacLocUtils
-- import GHC (Session)
import Data.Char
import GHC.Unicode
import AbstractIO
import Data.Maybe
import Data.List
import RefacUtils
import RefacRedunDec
import SlicingUtils
import System.Directory
import LocalSettings (mergeFilePath)
data FunEntity = Guard [HsPatP] [(SrcLoc, HsExpP, HsExpP)] [HsDeclP] PNT | Match [HsPatP] HsExpP [HsDeclP] PNT | Null
deriving (Eq, Show)
type Fun = [ FunEntity ]
-- we can "pair" an arbitrary number of matches together so
-- we can't use a tuple. We don't know the number of
-- function to fuse!
type PairFun = [ Fun ]
refacMerge args
= do
let fileName = args!!0
name = args!!1
AbstractIO.putStrLn "refacMerge"
fileContent <- AbstractIO.readFile mergeFilePath
AbstractIO.removeFile mergeFilePath
AbstractIO.putStrLn "Cache flushed."
unless (isVarId name)
$ error "The new name is invalid!\n"
modName1 <- fileNameToModName fileName
let modName = modNameToStr modName1
modInfo@(inscps, exps, mod, tokList) <- parseSourceFile fileName
inscopeNames <- hsFDNamesFromInside mod
unless (not (name `elem` ((\(x,y) -> x ++ y) inscopeNames)))
$ error ("the use of the name: " ++ name ++ " is already in scope!")
let parse = force (parseFile fileContent mod)
let n = (parseName fileContent mod)
let extractedDecs = map fst (map extractDecs parse)
-- b is the boolean value for whether we
-- are fusing together functions within a where clause or not.
-- b == True = not in where clause
-- b == False = in where clause
let b = or (map snd (map extractDecs parse))
-- let extractedDecs' = pad extractedDecs
newDecl <- doFusing fileName extractedDecs b parse modName mod name
if b
then do
res3 <- applyRefac (addDecls1 newDecl) (Just (inscps, exps, mod, tokList)) fileName
writeRefactoredFiles True [res3]
(inscps2, exps2, mod2, tokList2) <- parseSourceFile fileName
let fusedDecl = declToName newDecl
let newRefactoredDecls1 = hsDecls mod2
let newRefactoredDecls2 = definingDecls (map (declToPName [fusedDecl]) newRefactoredDecls1) newRefactoredDecls1 False False
-- AbstractIO.putStrLn "parsed again"
sigs <- getSig fileName modName fusedDecl
res <- applyRefac (addTypes (map (declToPName [fusedDecl]) newRefactoredDecls1) [sigs]) (Just (inscps2, exps2, mod2, tokList2)) fileName
-- AbstractIO.putStrLn $ show res
writeRefactoredFiles True [res]
(inscps5, exps5, mod5, tokList5) <- parseSourceFile fileName
--(mod',((tokList'',modified),_))<-(doCommenting ( (map (declToPName [fusedDecl]) newRefactoredDecls1))) fileName mod5 tokList5
--writeRefactoredFiles True [((fileName, True), (tokList'', mod'))]
AbstractIO.putStrLn "Completed."
else do
-- where clause
-- recurse through each declaration selected.
-- replace the RHS with a call to the newly
-- created function
let selectedDecs = map extractDecs' parse
res3 <- applyRefac (addDecls2 n newDecl) (Just (inscps, exps, mod, tokList)) fileName
writeRefactoredFiles True [res3]
(inscps3, exps3, mod3, tokList3) <- parseSourceFile fileName
--
-- AbstractIO.putStrLn $ show newMod
res4 <- applyRefac (renameCallInSelected selectedDecs name) (Just (inscps3,exps3, mod3, tokList3)) fileName
writeRefactoredFiles True [res4]
-- writeRefactoredFiles False [((fileName, m), (newToks, newMod))]
AbstractIO.putStrLn "Completed."
doFusing fileName extractedDecs b parse modName mod name
= do
(decl, newDecl) <- doFusing' fileName b extractedDecs parse modName mod name
-- we need to recurse into the where clauses and perform
-- a fusion if necessary.
-- x <- fuseWheres decl modName mod ses1
return decl
fuseExprs :: HsExpP -> HsExpP
fuseExprs exp@(Exp (HsTuple [e1,e2]))
| (isFst e1 && isSnd e2) && (extractCall e1 == extractCall e2) = extractCall e1
| otherwise = exp
where
isFst :: HsExpP -> Bool
isFst (Exp (HsInfixApp e1 o e2))
= (render.ppi) e1 == "fst"
isFst x = False
isSnd :: HsExpP -> Bool
isSnd (Exp (HsInfixApp e1 o e2))
= (render.ppi) e1 == "snd"
isSnd x = False
extractCall :: HsExpP -> HsExpP
extractCall (Exp (HsInfixApp e1 s e2)) = e2
extractCall x = x
fuseExprs e = e
fuseWheres d@(Dec (HsFunBind loc0 ms)) modName mod ses1
= do matches <- lookInMatches ms
return (Dec (HsFunBind loc0 matches))
where
lookInMatches [] = return ([]::[HsMatchP])
lookInMatches (m@(HsMatch l n p exp@(HsBody exp'@(Exp (HsTuple [e,e1]))) ds):ms)
= do
let declNames = filter (/="") (map declToName ds)
(inscopeNames, inscopeNames2) <- hsFDNamesFromInside d
let name = ((expAppName e) ++ (expAppName e1))
let newName = mkNewName name (inscopeNames ++ inscopeNames2) 1
AbstractIO.putStrLn $ show (expAppName e)
if (expAppName e) `elem` declNames && (expAppName e1) `elem` declNames
then do
-- find the decl associated with e and e1
let decl1 = findDecl (expAppName e) ds
let decl2 = findDecl (expAppName e1) ds
let extractedDecs = map extractDecs [Right (decl1, False), Right (decl2, False)]
-- decl' <- doFusing extractedDecs False [Right decl1, Right decl2] modName ses1 mod newName
newExp <- renameFusionInstances exp' newName (map declToPName2 (map extractDecs' [Right (decl1, False), Right (decl2,False)]))
rest <- lookInMatches ms
-- createTypeSig (retieveTypeSig
-- return ((HsMatch l n p (HsBody newExp)
return ((HsMatch l n p (HsBody newExp) ds) : rest)
else do
AbstractIO.putStrLn (">" ++ (show e))
rest <- lookInMatches ms
AbstractIO.putStrLn $ show rest
return (m : rest)
lookInMatches (m:ms)
= do
rest <- lookInMatches ms
return (m : rest)
rmDecls :: [HsDeclP] -> (HsDeclP, HsDeclP) -> [HsDeclP]
rmDecls [] _ = []
rmDecls (d@(Dec (HsTypeSig _ i _ _)):ds) (d1,d2)
| declToPName2 d1 `elem` (map pNTtoPN i) = rmDecls ds (d1, d2)
| declToPName2 d2 `elem` (map pNTtoPN i) = rmDecls ds (d1, d2)
| otherwise = d : (rmDecls ds (d1, d2))
rmDecls (d:ds) (d1, d2)
| d == d1 = rmDecls ds (d1,d2)
| d == d2 = rmDecls ds (d1,d2)
| otherwise = d : (rmDecls ds (d1,d2))
findDecl :: String -> [HsDeclP] -> HsDeclP
findDecl _ [] = error "error in findDecl!"
findDecl name (d:ds)
| (declToName d) == name = d
| otherwise = findDecl name ds
expAppName :: HsExpP -> String
expAppName (Exp (HsApp e1 e2)) = expAppName e1
expAppName e@(Exp (HsId e1)) = pNtoName (expToPN e)
expAppName x = ""
doFusing' fileName b extractedDecs parse modName mod name
= do
let pairedDecs = pairMatches extractedDecs
if b
then do
converged <- isConvergeArguments fileName modName pairedDecs
newRhs' <- mapM ( tidyLetClauses mod converged) pairedDecs
let newDecl = createDecl newRhs' name
decl' <- renameFusionInstances newDecl name (map declToPName2 (map extractDecs' parse))
return (decl', newDecl)
else do
newRhs' <- mapM ( tidyLetClauses mod True) pairedDecs
let newDecl = createDecl newRhs' name
decl' <- renameFusionInstances newDecl name (map declToPName2 (map extractDecs' parse))
return (decl', newDecl)
-- renameFusionInstances :: (MonadState (([PosToken], Bool), t1) m) => HsDeclP -> String -> PName -> m HsDeclP
renameFusionInstances d _ [] = error "No functions have been selected for fusion!"
renameFusionInstances d n [a,b]
= do
dec <- renamePNNoState' a ("fst $ " ++ n) d
dec2 <- renamePNNoState' b ("snd $ " ++n) dec
return dec2
renameFusionInstances _ _ (x:xs) = error "Only two functions may be fused together!"
force :: Eq a => a -> a
force x = if x == x then x else x
{- tidy up arrangments of let clauses.
viz:
f x y = (let ls = x + 1 in ls, let rs = y - 1 in rs)
should be:
f x y = let ls = x + 1 ; rs = y - 1 in (ls, rs)
The idea is that it takes a list of expressions which are
fused together to form a tuple -- or a list of let declarations
and a tuple as a body.
-}
isDupDec :: [String] -> Bool
isDupDec [] = False
isDupDec (x:xs) = (x `elem` xs) || (isDupDec xs)
getDupDec :: [String] -> String
getDupDec [] = ""
getDupDec (x:xs)
| x `elem` xs = x
| otherwise = getDupDec xs
-- tidyLetClauses :: (Monad m, Term t) => t -> Bool -> Fun -> m FunEntity
tidyLetClauses _ _ [] = return Null
tidyLetClauses mod True (m@(Guard p (g@(s,e1,_):gs) ds pnt) : es)
= do
let allPats = extractPats' (m:es)
if all (==(head allPats)) allPats
then do
let guards = filterGuards (m:es)
let guards2 = map (map (\(x,y,z) -> y)) guards
let decs = map filterLet' guards
let exps = pairExps (map filterExp' guards)
let guardExp = filterGuardExp (g:gs)
-- let (decs, exps) = (filterLet (m:es), (filterExp (m:es)))
let ds' = filterDec (m:es)
let dsNames = filter (/="") (map declToName2 ds')
when (isDupDec dsNames) $ error ( "Please rename " ++ (getDupDec dsNames) ++ " as it conflicts during the merge." )
when (all (/=(head guards2)) (tail guards2))
$ error "The guards between the two functions do not match!"
-- AbstractIO.putStrLn $ show (decs, length decs)
if length (filter (/=[]) decs) > 0
then do
let returnGuard = createGuardLet guardExp decs exps
return (Guard p returnGuard ds' pnt)
else do
let returnGuard = createGuard guardExp exps
return (Guard p returnGuard ds' pnt)
-- return (Match p gs' ds' pnt)
else do
error "The patterns between the functions are not the same set! The patterns must be the same and have the same type."
where
pairExps :: [ [HsExpP] ] -> [ (HsExpP, HsExpP) ]
pairExps [] = []
pairExps [x::[HsExpP],y::[HsExpP]] = zip x y
pairExps x = error "Only two functions may be fused together!"
createGuardLet :: [HsExpP] -> [ [HsDeclP] ] -> [ (HsExpP, HsExpP) ] -> [ (SrcLoc, HsExpP, HsExpP) ]
createGuardLet [] [] [] = []
createGuardLet (e:ess) (d:ds) ((x,y):es)
= (s, e, (Exp (HsLet d (Exp (HsTuple (x:[y])))))) : (createGuardLet ess ds es)
createGuard :: [HsExpP] -> [ (HsExpP, HsExpP) ] -> [ (SrcLoc, HsExpP, HsExpP) ]
createGuard [] [] = []
createGuard (e:ess) ((x,y):es) = (s,e, (Exp (HsTuple (x:[y])))) : (createGuard ess es)
tidyLetClauses mod True (m@(Match p e ds pnt) : es)
= do
-- find Let Clauses
let allPats = extractPats' (m:es)
if all (==(head allPats)) allPats
then do
let (decs, exps) = (filterLet (m:es), filterExp (m:es))
let ds' = filterDec (m:es)
let dsNames = (filter (/="") (map declToName2 ds'))
when (isDupDec dsNames) $ error ( "Please rename " ++ (getDupDec dsNames) ++ " as it conflicts during the merge." )
if length decs > 0
then do
return (Match p (Exp (HsLet decs (Exp (HsTuple exps)))) ds' pnt)
else do
return (Match p (Exp (HsTuple exps)) ds' pnt)
else do
error "The patterns between the functions are not the same set! The patterns must be the same and have the same type."
tidyLetClauses _ False _ = error "The types between the functions to be fused to do match. Please change the types first so that the arguments can converge."
{- tidyLetClauses mod False (m@(Match p e ds pnt) : es)
= do
let (decs, exps) = (filterLet (m:es), filterExp (m:es))
if length decs > 0
then do
let letExp = (Exp (HsLet decs (Exp (HsTuple exps))))
(params, exps', ds') <- renameArgs 1 (extractPats (m:es)) letExp ds mod
return (Match params exps' ds' pnt)
else do
let letExp = (Exp (HsTuple exps))
(params, exps', ds') <- renameArgs 1 (extractPats (m:es)) letExp ds mod
return (Match params exps' ds' pnt) -}
extractPats :: Fun -> [ [HsPatP] ]
extractPats [] = []
extractPats ((Match p _ _ _):ms) = p : (extractPats ms)
extractPats ((Guard p _ _ _):ms) = p : (extractPats ms)
extractPats' :: Fun -> [ [String] ]
extractPats' [] = []
extractPats' ((Match p _ _ _):ms) = (map pNtoName (hsPNs p)) : (extractPats' ms)
extractPats' ((Guard p _ _ _):gs) = (map pNtoName (hsPNs p)) : (extractPats' gs)
filterGuardExp :: [(SrcLoc, HsExpP, HsExpP)] -> [HsExpP]
filterGuardExp [] = []
filterGuardExp ((_,e,_):gs) = e : (filterGuardExp gs)
filterGuards :: Fun -> [ [(SrcLoc, HsExpP, HsExpP)] ]
filterGuards [] = []
filterGuards ((Guard _ g _ _):gs) = (rmAllLocs g) : (filterGuards gs)
filterGuards (m:ms) = filterGuards ms
filterDec :: Fun -> [HsDeclP]
filterDec [] = []
filterDec ((Match p e ds pnt):es)= ds ++ (filterDec es)
filterDec ((Guard p g ds pnt):es) = ds ++ (filterDec es)
filterLet' :: [ (SrcLoc, HsExpP, HsExpP) ] -> [HsDeclP]
filterLet' [] = []
filterLet' ((_, _, e@(Exp(HsLet ds _))):gs) = ds ++ (filterLet' gs)
filterLet' (g:gs) = filterLet' gs
filterLet :: Fun -> [HsDeclP]
filterLet [] = []
filterLet ((Match p e@(Exp (HsLet ds _)) _ pnt) : es) = ds ++ (filterLet es)
filterLet ((Match p e ds pnt):es)= filterLet es
filterLet ((Guard p gs ds pnt):es) = (findLetInGuard gs) ++ (filterLet es)
where
findLetInGuard [] = []
findLetInGuard ((_, _, (Exp (HsLet ds _))):gs)
= ds ++ (findLetInGuard gs)
findLetInGuard ((_,_, e):gs) = findLetInGuard gs
filterExp' :: [ (SrcLoc, HsExpP, HsExpP) ] -> [HsExpP]
filterExp' [] = []
filterExp' ((_,_, (Exp (HsLet _ e))):es) = e : (filterExp' es)
filterExp' ((_,_, e):es) = e : (filterExp' es)
filterExp :: Fun -> [HsExpP]
filterExp [] = []
filterExp ((Match p (Exp (HsLet _ e)) ds pnt ): es) = e : (filterExp es)
filterExp ((Match p e ds pnt):es) = e : (filterExp es)
filterExp ((Guard p gs ds pnt):es) = (filterExpGuard gs) ++ (filterExp es)
where
filterExpGuard [] = []
filterExpGuard ((_, _, (Exp (HsLet _ e))):gs) = e : (filterExpGuard gs)
filterExpGuard ((_,_, e):gs) = e : (filterExpGuard gs)
renameArgs index params rhs wheres t
= do
-- AbstractIO.putStrLn $ show (map toRelativeLocs (concat params))
let dups = findDups (concat params)
(newPats, newExps, newDS) <- doRenaming index dups (concat params) rhs wheres t
return (newPats, newExps, newDS)
doRenaming :: (Monad m, MonadPlus m, Term t) => Int -> [PName] -> [HsPatP] -> HsExpP -> [HsDeclP] -> t -> m ([HsPatP],HsExpP, [HsDeclP])
doRenaming _ [] a b c _ = return (a,b, c)
doRenaming index (p:ps) pats exps ds t
= do
-- names <- mapM (addName t) (concat exps)
names <- addName t exps
newPat <- mapM (renamePNNoState p names index) pats
-- newExp <- mapM (mapListListM (renamePNNoState p (concat names) index)) exps
newExp <- renamePNNoState p names index exps
-- newDS <- mapM (mapListListM (renamePNNoState p names index)) ds
newDS <- renamePNNoState p names index ds
-- newExp <- mapM (renamePNNoState p name) exps
rest <- doRenaming (index+1) ps newPat newExp newDS t
return rest
addName mod e = do
names <- hsVisibleNames e mod
return names
-- mapListListM :: [HsExpP] -> m [ [HsExpP] ]
mapListListM f e
= mapM f e
renamePNNoState oldPN names index t
= applyTP (full_tdTP (adhocTP idTP rename)) t
where
rename (pnt@(PNT pn ty (N (Just (SrcLoc fileName c row col)))))
| (pn == oldPN) && (srcLoc oldPN == srcLoc pn)
= return (PNT (replaceNameInPN Nothing pn (mkNewName (pNtoName oldPN) names index )) ty (N (Just (SrcLoc fileName c row col))))
rename x = return x
renamePNNoState' oldPN name t
= applyTP (full_tdTP (adhocTP idTP rename)) t
where
rename (pnt@(PNT pn ty (N (Just (SrcLoc fileName c row col)))))
| (pn == oldPN) && (srcLoc oldPN == srcLoc pn)
= return (PNT (replaceNameInPN Nothing pn name) ty (N (Just (SrcLoc fileName c row col))))
rename x = return x
renameNameNoState' oldPN name t
= applyTP (once_tdTP (adhocTP failTP rename)) t
where
rename (n::String)
| n == oldPN
= return name
rename x = mzero
findDups :: [HsPatP] -> [PName]
findDups [] = []
findDups (x:xs) -- do any elements of x occur through out xs?
| pNtoName (patToPN x) `elem` (map (pNtoName.patToPN) xs) = (patToPN x) : findDups xs
| otherwise = findDups xs
doCommenting (x:xs) fileName mod tokList
= runStateT (applyTP ((once_tdTP (failTP `adhocTP` (rmInMod (x:xs) )
))) mod)
((tokList,unmodified),fileName)
where
--1. The definition to be removed is one of the module's top level declarations.
rmInMod [] mod = return mod
rmInMod (p:ps) (mod@(HsModule loc name exps imps ds):: HsModuleP)
= do ds'<-commentOutTypeSig p ds
res2 <- rmInMod ps (HsModule loc name exps imps ds')
return res2
addTypes _ [] (_,_,mod) = return mod
addTypes (x:xs) (y:ys) (a,b,mod) = do
mod' <- addTypeSigDecl mod (Just x) ([y], Nothing) True
-- commentOutTypeSig x (y:ys)
res <- addTypes xs ys (a,b,mod')
return mod'
replaceInE :: (Monad m, MonadPlus m) => PNT -> String -> HsExpP -> m HsExpP
replaceInE origName newName e
= applyTP (once_tdTP (adhocTP idTP rename')) e
where
rename' (e'@(Exp (HsId (HsVar x)))::HsExpP)
| pNTtoName x == pNTtoName origName = return (Exp (HsId (HsVar (nameToPNT newName) )))
| otherwise = return e'
rename' e@(Exp (HsApp e1 e2))
= do res1 <- rename' e1
res2 <- rename' e2
return (Exp (HsApp res1 res2))
rename' e = return e
cleanWhere :: (Monad m, MonadPlus m) => HsDeclP -> [PNT] -> String -> m HsDeclP
cleanWhere (Dec (HsPatBind loc p (HsBody e) ds)) names theName
= do res2 <- (cleanWhere2 ds names theName)
return (Dec (HsPatBind loc p (HsBody e) res2))
cleanWhere (Dec (HsFunBind loc (d:dss))) names theName
= do res2 <- (cleanMatches (d:dss) names theName)
return (Dec (HsFunBind loc res2))
where
cleanMatches :: (Monad m, MonadPlus m) => [HsMatchP] -> [PNT] -> String -> m [HsMatchP]
cleanMatches [] _ _ = return []
cleanMatches ((HsMatch l i1 ps (HsBody e) ds):ms) names theName
= do res1 <- (cleanWhere2 ds names theName)
res2 <- (cleanMatches ms names theName)
return ((HsMatch l i1 ps (HsBody e) res1 ): res2)
cleanWhere2 :: (Monad m, MonadPlus m) => [HsDeclP] -> [PNT] -> String -> m [HsDeclP]
cleanWhere2 [] _ _ = return []
cleanWhere2 x [] _ = return x
cleanWhere2 (de@(Dec (HsPatBind loc p (HsBody e) ds)):dss) (name:names) theName
| pNTtoName (getNameFromExp e) == (pNTtoName name) = do newPats' <- newPats
newBody <- replaceInE (getNameFromExp e) theName e
let filteredDecs = filterDecs dss names
return ((Dec (HsPatBind loc newPats' (HsBody newBody) ds)):filteredDecs)
| otherwise = do res <- (cleanWhere2 dss names theName)
return (de : res )
where
newPats = do res <- findRest dss names
return (Pat (HsPTuple loc0 (p: res)))
findRest _ [] = return []
findRest [] _ = return []
findRest (de@(Dec (HsPatBind loc p (HsBody e) ds)):dss) (name:names)
| pNTtoName (getNameFromExp e) == (pNTtoName name) = do res <- (findRest dss names)
return (p : res)
| otherwise = do res <- findRest dss names
return res
filterDecs [] _ = []
filterDecs (de@(Dec (HsPatBind loc p (HsBody e) ds)):dss) (name:names)
| (getNameFromExp e) `elem` (name:names) = filterDecs dss (name:names)
| otherwise = de : (filterDecs dss (name:names))
getNameFromExp (Exp (HsApp e1 e2))
= getNameFromExp e1
getNameFromExp e1
= expToPNT e1
addDecls1 x (_,_,mod) = do
mod' <- addDecl mod Nothing ([x], Nothing) True
return mod'
addDecls2 n x (_,_,mod) = do
let newDec = convertExpr x
mod' <- doAdding newDec n mod
return mod'
where
doAdding x n mod = applyTP (once_tdTP (failTP `adhocTP` worker)) mod
where
worker (y::HsDeclP)
| y == n = addDecl n Nothing ([x], Nothing) False
worker _ = mzero
convertExpr :: HsDeclP -> HsDeclP
convertExpr (Dec (HsPatBind loc0 pat (HsBody e) ds))
= (Dec (HsPatBind loc0 pat (HsBody (fuseExprs e)) ds))
convertExpr (Dec (HsFunBind loc0 ms))
= (Dec (HsFunBind loc0 (newMatches ms)))
where
newMatches [] = []
newMatches (m@(HsMatch loc0 x pats (HsBody e) ds):ms)
= (HsMatch loc0 x pats (HsBody (fuseExprs e)) ds) : (newMatches ms)
newMatches (m:ms) = m : (newMatches ms)
convertExpr d = d
renameCallInSelected [d1, d2] name (_,_,mod)
= applyTP (stop_tdTP (failTP `adhocTP` worker)) mod
where
worker d@(Dec (HsPatBind loc0 pat (HsBody e) ds))
| d == d1 = do
newDec <- update e (Exp (HsInfixApp (nameToExp "fst") (nameToIdent "$") (nameToExp name))) d
return newDec
| d == d2 = do
newDec <- update e (Exp (HsInfixApp (nameToExp "snd") (nameToIdent "$") (nameToExp name))) d
return newDec
worker d@(Dec (HsFunBind loc0 ms))
| d == d1 = do matches <- editMatches ms "fst"
return (Dec (HsFunBind loc0 matches))
| d == d2 = do matches <- editMatches ms "snd"
return (Dec (HsFunBind loc0 matches))
where
editMatches [] _ = return []
editMatches (m@(HsMatch loc0 _ pats (HsBody e) ds):ms') par
= do
let newPats = map (pNtoExp . patToPN) pats
newMatch <- update e ((Exp (HsInfixApp (nameToExp par) (nameToIdent "$") (createFunc' (nameToExp name ) newPats ) ))) m
rest <- editMatches ms' par
return (newMatch : rest)
worker _ = mzero
-- | y == n = addDecl n Nothing ([x], Nothing) False
createDecl :: Fun -> String -> HsDeclP
{-createDecl ((Match [] exps ds _):ms) name
= (Dec (HsPatBind loc0 (nameToPat name) (HsBody exps) (concat ds))) -}
createDecl (m:ms) name
= (Dec (HsFunBind loc0 (createMatches (m:ms) )))
where
createMatches [] = []
createMatches ((Guard p guards d _):gs)
=
(HsMatch loc0 (nameToPNT name) p (HsGuard guards) d) : (createMatches gs)
createMatches ((Match p e d _):ms)
=
(HsMatch loc0 (nameToPNT name) p (HsBody e) d) : (createMatches ms)
extractDecs :: Either String (HsDeclP, Bool) -> (Fun, Bool)
extractDecs dec
= do
case dec of
Left errMsg -> error errMsg
Right (decl, b) -> extractDecl (decl, b)
extractDecs' :: Either String (HsDeclP, Bool) -> HsDeclP
extractDecs' dec
= do
case dec of
Left errMsg -> error errMsg
Right (decl,_) -> decl
-- pad :: [ [a] ] -> [ [a] ]
count list = maximum ( map length list )
pad list = map (pad' (count list)) list
where
pad' count entry = entry ++ (replicate count (head entry))
{- converge arguments attemps to converge the arguments of the pairs where ever possible.
The selected functions must have the same set of patterns, if not then
convergeArguments returns an error. Otherwise, the types of each function are taken,
if the types match then only use the first set of arguments. Otherwise, do nothing.
It is sensible to let the user instantiate patterns so that a more general instance converges
with a more specific type. -}
{-
data FunEntity = Match [HsPatP] HsExpP [HsDeclP] PNT | Null
deriving (Eq, Show)
type Fun = [ FunEntity ]
-- we can "pair" an arbitrary number of matches together so
-- we can't use a tuple. We don't know the number of
-- function to fuse!
type PairFun = [ Fun ] -}
-- convergeArguments :: (Monad m) => String -> GHC.Session -> PairFun -> m Bool
isConvergeArguments _ _ [] = return False
isConvergeArguments fileName modName (ent:ents)
= do -- entType <- typeOf modName ses ent
entsType <-typeOf fileName modName ent
AbstractIO.putStrLn $ show entsType
-- AbstractIO.putStrLn $ show (map hsPNs (settleTypes entsType))
--let res = myAll (map pNtoName (map hsPNs (settleTypes entsType)))
let res = myAll entsType
return res
where
-- myAll :: Eq a => [a] -> Bool
myAll :: [ [String] ] -> Bool
myAll [] = False
myAll (l:ls) = all (==l) ls
settleType :: HsDeclP -> HsDeclP
settleType (Dec (HsTypeSig x _ y types))
= (Dec (HsTypeSig x [] y types))
typeOf :: (Monad m) => String -> String -> [FunEntity] -> m [ [String] ]
typeOf fileName modName [] = return []
typeOf fileName modName (m:ms)
= do
sig <- getSigOmitLast modName (nameFromMatch m) fileName
let sig' = hsPNs (settleType sig)
let sig'' = map pNtoName sig'
sigs <- typeOf fileName modName ms
return (sig'' : sigs)
nameFromMatch :: FunEntity -> String
nameFromMatch (Match _ _ _ pnt) = pNTtoName pnt
nameFromMatch Null = ""
pairMatches :: [ Fun ] -> PairFun
pairMatches [] = []
pairMatches [ e] = [ e ]
pairMatches (e:es)
= ((take 1 e) ++ (concat (map (take 1) es))) : (pairMatches (filter (/=[]) (((gtail "e" e):(map (drop 1) es)))))
extractDecl :: (HsDeclP, Bool) -> (Fun, Bool)
-- extractDecl :: HsDeclP -> ( [[HsPatP]], [HsExpP], [HsDeclP] , PNT)
extractDecl (d@(Dec (HsPatBind loc p (HsBody e) ds)),b) = ([Match [] e ds (patToPNT p)],b)
extractDecl ((Dec (HsFunBind loc (d:dss ))),b) = (extractMatches (d:dss),b)
where
extractMatches [] = []
extractMatches ((HsMatch l i1 ps (HsBody e) ds):ms)
= (Match ps e ds i1) : extractMatches ms
extractMatches ((HsMatch l i1 ps (HsGuard g) ds):ms)
= (Guard ps g ds i1) : extractMatches ms
{-
= ((ps : ps2) , (e : e2) , (ds ++ ds2), i1)
where
(ps2, e2, ds2, _) = extractMatches ms -}
-- parseFile :: String -> HsModuleP -> [Either String (HsDeclP, Bool)]
parseName [] _ = error "error in parseName!"
parseName ('>':xs) mod = checkCursor' fileName (read row) (read col) mod
where
(fileName, rest) = parseFileName xs
(row, rest2) = parseRow rest
(col, rest3) = parseCol rest2
parseFile :: String -> HsModuleP -> [Either String (HsDeclP, Bool)]
parseFile [] _ = []
parseFile ('>':xs) mod = (checkCursor fileName (read row) (read col) mod) : (parseFile (tail rest3) mod)
where
(fileName, rest) = parseFileName xs
(row, rest2) = parseRow rest
(col, rest3) = parseCol rest2
parseFileName :: String -> (String, String)
parseFileName [] = ([], [])
parseFileName xs = ((takeWhile (/= '<') xs), (dropWhile (/= '<') xs))
parseRow :: String -> (String, String)
parseRow [] = ([], [])
parseRow xs = ((takeWhile (/= '%') (tail xs')), dropWhile (/= '%') (tail xs'))
where
xs' = dropWhile (/= '%') xs
parseCol :: String -> (String, String)
parseCol [] = ([], [])
parseCol xs = ((takeWhile (/= '&') (tail xs') ), dropWhile (/= '&') (tail xs') )
where
xs' = dropWhile (/= '&') xs
--check whether the cursor points to the beginning of the datatype declaration
--taken from RefacADT.hs
checkCursor :: String -> Int -> Int -> HsModuleP -> Either String (HsDeclP, Bool)
checkCursor fileName row col mod
= case locToPName of
(Nothing, _) -> Left ("Invalid cursor position. Please place cursor at the beginning of the definition!")
(Just decl, b) -> Right (decl, b)
where
locToPName
= case res of
Nothing -> (find (definesPNT (locToPNT fileName (row, col) mod)) (hsDecls mod), True)
_ -> (res, False)
res = find (defines (locToPN fileName (row, col) mod)) (concat (map hsDecls (hsModDecls mod)))
definesPNT pnt d@(Dec (HsPatBind loc p e ds))
= findPNT pnt d
definesPNT pnt d@(Dec (HsFunBind loc ms))
= findPNT pnt d
definesPNT _ _ = False
checkCursor' :: String -> Int -> Int -> HsModuleP -> HsDeclP
checkCursor' fileName row col mod
= case locToPName of
Nothing -> error "Invalid cursor position. Please place cursor at the beginning of the definition!"
Just decl -> decl
where
locToPName = find (definesPNT (locToPNT fileName (row, col) mod)) (hsDecls mod)
definesPNT pnt d@(Dec (HsPatBind loc p e ds))
= findPNT pnt d
definesPNT pnt d@(Dec (HsFunBind loc ms))
= findPNT pnt d
definesPNT _ _ = False
{-|
Takes the position of the highlighted code and returns
the function name, the list of arguments, the expression that has been
highlighted by the user, and any where\/let clauses associated with the
function.
-}
{-findDefNameAndExp :: Term t => [PosToken] -- ^ The token stream for the
-- file to be
-- refactored.
-> (Int, Int) -- ^ The beginning position of the highlighting.
-> (Int, Int) -- ^ The end position of the highlighting.
-> t -- ^ The abstract syntax tree.
-> (SrcLoc, PNT, FunctionPats, HsExpP, WhereDecls) -- ^ A tuple of,
-- (the function name, the list of arguments,
-- the expression highlighted, any where\/let clauses
-- associated with the function).
-}
--find the definition name whose sub-expression has been selected, and the selected sub-expression.
findDefNameAndExp toks beginPos endPos t
= fromMaybe (defaultPNT, defaultExp) (applyTU (once_tdTU (failTU `adhocTU` inMatch
`adhocTU` inPat)) t) --CAN NOT USE 'once_tdTU' here.
where --The selected sub-expression is in the rhs of a match
inMatch (match@(HsMatch loc1 pnt pats rhs ds)::HsMatchP)
| locToExp2 beginPos endPos toks rhs /= defaultExp
= Just (pnt, locToExp2 beginPos endPos toks rhs)
inMatch _ = Nothing
--The selected sub-expression is in the rhs of a pattern-binding
inPat (pat@(Dec (HsPatBind loc1 ps rhs ds))::HsDeclP)
| locToExp2 beginPos endPos toks rhs /= defaultExp
= if isSimplePatBind pat
then Just (patToPNT ps, locToExp2 beginPos endPos toks rhs)
else error "A complex pattern binding can not be generalised!"
inPat _ = Nothing
| kmate/HaRe | old/refactorer/RefacMerge.hs | bsd-3-clause | 35,107 | 397 | 24 | 12,150 | 10,186 | 5,420 | 4,766 | -1 | -1 |
import Control.Exception (assert)
main = assert False (putStrLn "OK")
| ezyang/ghc | testsuite/tests/driver/T13914/main.hs | bsd-3-clause | 71 | 1 | 7 | 10 | 30 | 14 | 16 | 2 | 1 |
main = putStrLn $ "Hello " ++ friends ++ family
where friends = "my friends"
family = " and family"
| beni55/ghcjs | test/fay/where.hs | mit | 110 | 2 | 7 | 30 | 36 | 16 | 20 | 3 | 1 |
-- file Spec.hs
import Test.Hspec
import Test.QuickCheck
import Control.Exception (evaluate)
import Euler40
-- 0.123456789101112131415161718192021...
main :: IO ()
main = hspec $ do
describe "Euler40" $ do
describe "digit" $ do
it "return correct digit in simple case" $ do
digit 1 `shouldBe` 1
digit 2 `shouldBe` 2
digit 3 `shouldBe` 3
digit 9 `shouldBe` 9
it "return correct digit in more complex case" $ do
digit 10 `shouldBe` 1
digit 11 `shouldBe` 0
digit 12 `shouldBe` 1
digit 13 `shouldBe` 1
digit 14 `shouldBe` 1
digit 15 `shouldBe` 2
digit 16 `shouldBe` 1
digit 17 `shouldBe` 3
digit 18 `shouldBe` 1
digit 19 `shouldBe` 4
it "return correct digit in more more complex case" $ do
digit 20 `shouldBe` 1
digit 21 `shouldBe` 5
digit 22 `shouldBe` 1
digit 23 `shouldBe` 6
digit 24 `shouldBe` 1
digit 25 `shouldBe` 7
digit 26 `shouldBe` 1
digit 27 `shouldBe` 8
digit 28 `shouldBe` 1
digit 29 `shouldBe` 9 | kirhgoff/haskell-sandbox | euler40/specs.hs | mit | 1,126 | 1 | 24 | 376 | 389 | 190 | 199 | 35 | 1 |
module LexerGen(lexerGen,OutputFun(..)) where
import RegExp(Trans(..),Transducer)
import DFA(DFA(..),renumberEdges,tokenClasses,showDFA)
import Minimalize
import CompileRegExp(compile)
import DetMachineToHaskell(dfaToHaskell,OutputFun(..))
import PPrint(pprint)
import qualified Data.Map as OM(fromList)
import Spec.HaskellChars(HaskellChar)
import Data.List(sort)
{-|
The lexer generator takes the name of the module to generate, the
name of the lexer function to export from that module and the regular
expression that defines the lexical syntax. It returns the generated Haskell
module as a string.
'lexerGen' also consults the command line arguments. If
the word nocc is present, it does not use character classes to reduce
the size of the code.
-}
lexerGen :: (Ord o,Show o,OutputFun o) =>
String -> String -> Transducer HaskellChar o -> [String] -> String
lexerGen moduleName functionName program args =
outputDFA (dfa2old (compile program))
where
outDFA = "dfa" `elem` args -- output the DFA or generate Haskell?
useCC = "nocc" `notElem` args -- use character classes?
outputDFA = if useCC then outputWithCharClasses else outputDetm Nothing
outputWithCharClasses (n,dfa) =
outputDetm (Just ccs) (n,renumberEdges ccs dfa)
where
charclasses = sort $ tokenClasses dfa
ccs = [(c,n)|(n,(cs,_))<-zip [(1::Int)..] charclasses,c<-cs]
outputDetm optccs dfa0 =
if outDFA
then showDFA dfa
else "\n-- Automatically generated code for a DFA follows:\n" ++
"--Equal states: "++show eqs++"\n"++
"{-# OPTIONS_GHC -O #-}\n" ++
pprint haskellCode
where
(eqs,dfa) = minimalize dfa0
haskellCode =
dfaToHaskell optccs moduleName
["Data.Char"
,"Language.Haskell.Lexer.Utils"] functionName dfa
{-+
A function to convert from the new to the old DFA represenation...
-}
dfa2old dfa = ((1::Int,final),DFA (OM.fromList states))
where
final = [s|(s,(True,_))<-dfa]
states = map state dfa
state (n,(_,edges)) = (n,(input,output))
where
input = [(i,n)|(I i,n)<-edges]
output = [(o,n)|(O o,n)<-edges]
| yav/haskell-lexer | generator/src/LexerGen.hs | mit | 2,222 | 10 | 14 | 503 | 537 | 324 | 213 | 39 | 3 |
-- Intermission: Exercises
-- Given the following datatype, answer the following questions:
data Mood = Blah | Woot deriving Show
-- 1. What is the type constructor, or name of this type?
-- Mood
-- 2. If the function requires a Mood value, what are the values you could possibly use there?
-- Blah, or Woot
-- 3. We are trying to write a function changeMood to change Chris’s mood
-- instantaneously. So far, we’ve written a type signature
-- changeMood :: Mood -> Woot.
-- What’s wrong with that?
-- This type signature requires that you always return Woot no matter what. This
-- doesn't enable you to ever return Blah.
-- 4. Now we want to write the function that changes his mood. Given an input
-- mood, it gives us the other one. Fix any mistakes and complete the function:
changeMood Blah = Woot
changeMood _ = Blah
-- 5. Enter all of the above — datatype (including the “deriving Show” bit),
-- your corrected type signature, and the corrected function into a source file.
-- Load it and run it in GHCi to make sure you got it right.
| diminishedprime/.org | reading-list/haskell_programming_from_first_principles/04_02.hs | mit | 1,069 | 0 | 5 | 207 | 47 | 33 | 14 | 3 | 1 |
module Data.CSV.Table.Types (
-- * Representation
Table (..)
, Row (..)
, Col (..)
, RowInfo
, TField (..)
, Order (..)
-- * Accessors
, getCols
, getRows
, lookupCol
-- * Parsing
, fromFile
, fromString
-- * Saving
, toFile
) where
import Text.Printf
import Text.CSV
import System.FilePath
import Control.Applicative ((<$>))
import Data.Maybe
import Data.List (sort, elemIndex)
import qualified Data.Map.Strict as M
newtype Col = C Field deriving (Eq, Ord, Show)
newtype Row = R [Field] deriving (Eq, Ord, Show)
type RowInfo = [(Col, Field)]
-----------------------------------------------------------------------------------
-- | Types
-----------------------------------------------------------------------------------
data Table = T { dim :: Int, cols :: [Col], body :: [Row]}
{-@ measure width :: Row -> Int
width (R xs) = (len xs) @-}
{-@ type ColsN N = {v:[Col] | (len v) = N} @-}
{-@ type RowN N = {r:Row | (width r) = N} @-}
{-@ data Table = T (dim :: Nat) (cols :: (ColsN dim)) (body :: [(RowN dim)]) @-}
{-@ getCols :: t:Table -> ListN Field {(dim t)} @-}
getCols t = [c | C c <- cols t]
{-@ getRows :: t:Table -> ListN Field {(dim t)} @-}
getRows t = [r | R r <- body t]
lookupCol :: Col -> RowInfo -> Field
lookupCol c cxs = fromMaybe err $ lookup c cxs
where
err = printf "lookupCol: cannot find %s in %s" (show c) (show cxs)
--------------------------------------------------------------------------------
-- | Field Sorts
--------------------------------------------------------------------------------
data TField = FStr | FInt | FDbl
deriving (Eq, Ord, Show)
data Order = Asc | Dsc
deriving (Eq, Ord, Show)
----------------------------------------------------------------------
-- | Converting to CSV
----------------------------------------------------------------------
fromCSV :: CSV -> Table
fromCSV [] = error "fromCSV: Empty CSV with no rows!"
fromCSV (r:rs) = T n cs b
where
n = length r
cs = [C x | x <- r]
b = mapMaybe (makeRow n) $ zip [0..] rs
makeRow :: Int -> (Int, Record) -> Maybe Row
makeRow n (i, xs)
| length xs == n = Just $ R xs
| empty xs = Nothing
| otherwise = error $ printf "Row %d does not have %d columns:\n%s" i n (show xs)
empty :: Record -> Bool
empty = null . unwords
toCSV :: Table -> CSV
toCSV t = [c | C c <- cols t] : [xs | R xs <- body t]
--------------------------------------------------------------------------------
-- | Parsing
--------------------------------------------------------------------------------
toFile :: FilePath -> Table -> IO ()
toFile f = writeFile f . show
fromFile :: FilePath -> IO Table
fromFile f = fromString f <$> readFile f
fromString :: FilePath -> String -> Table
fromString fp s = fromCSV $ parseCSV' fp s
parseCSV' fp s = case parseCSV fp s of
Right c -> c
Left e -> error $ printf "parseCSV': %s" (show e)
--------------------------------------------------------------------------------
-- | Printing
--------------------------------------------------------------------------------
instance Show Table where
show = printCSV . toCSV
| ucsd-progsys/csv-table | src/Data/CSV/Table/Types.hs | mit | 3,451 | 0 | 11 | 906 | 840 | 462 | 378 | 59 | 2 |
module OptCracker
( OptionSet
, crack
, usage
) where
import Data.List (find)
import System.Console.GetOpt
( OptDescr(..)
, ArgDescr (..)
, ArgOrder (..)
, getOpt
, usageInfo )
type OptionSet = (Bool, Maybe Int)
data CommandOption = Help | Port !String
deriving (Eq, Show)
crack :: [String] -> Either String OptionSet
crack args =
case getOpt Permute options args of
([], [], []) -> Right (True, Nothing)
(opts, [], []) -> Right $ determineOptions opts
(_, nonOpts, []) -> Left $ unwords nonOpts
(_, _, errors) -> Left $ unwords errors
usage :: String
usage = usageInfo "Usage: astral-mining-server [OPTIONS ...]\n" options
determineOptions :: [CommandOption] -> OptionSet
determineOptions opts =
let help = Help `elem` opts
port = maybePort opts
in (help, port)
maybePort :: [CommandOption] -> Maybe Int
maybePort opts = do
Port portString <- port `find` opts
maybeRead portString
where
port (Port _) = True
port _ = False
maybeRead :: Read a => String -> Maybe a
maybeRead x =
case reads x of
[(val, "")] -> Just val
_ -> Nothing
options :: [OptDescr CommandOption]
options =
[ Option ['h', '?'] ["help"] (NoArg Help) "This help screen"
, Option ['p'] ["port"] (ReqArg Port "NUM") "Server port number"
]
| AstralMining/astral-mining-server | src/OptCracker.hs | mit | 1,379 | 0 | 9 | 375 | 499 | 273 | 226 | 45 | 4 |
module Target
( Target
, load
, render
, expand
, theme
, ext
, debug
, write
, writeTemp
) where
import Control.Applicative
import Control.Monad
import Control.Dangerous hiding ( Warning, result )
import Control.Monad.Trans
import Data.Configger ( Config )
import Data.Focus hiding ( parse )
import Data.List
import Data.Maybe
import System.Directory
import System.FilePath
import System.FilePath.Utils hiding ( from )
import Text.Printf
import Text.Pandoc ( Pandoc )
-- Bookbuilder modules
import qualified Target.Config as Config
import qualified Target.Pandoc as Pandoc
import qualified Target.PDF as PDF
import Target.Template ( Template )
import qualified Target.Template as Template
data Target = Target
{ _name :: String
, _innerExt :: String
, _config :: Config
, _templates :: [Template] }
instance Show Target where
show t = printf "%s(%s→%s)" name from to where
(name, to) = splitExtension $ _name t
from = takeExtension $ _innerExt t
-- | Constants
defaultTheme :: String
defaultTheme = "default"
-- | Target loading
load :: Config -> FilePath -> DangerousT IO Target
load conf path = do
isDirectory <- liftIO $ doesDirectoryExist path
target <- if isDirectory
then loadDir conf path
else return $ simpleTarget conf path
let to = Pandoc.writerName $ _name target
when (null to) (warn $ UnrecognizedFormat $ _name target)
return target
simpleTarget :: Config -> FilePath -> Target
simpleTarget conf path = Target{ _name = takeFileName path
, _innerExt = takeExtension path
, _config = conf
, _templates = [] }
loadDir :: Config -> FilePath -> DangerousT IO Target
loadDir base dir = do
files <- filter (not . Config.isSpecial) <$> liftIO (ls dir)
templates <- catMaybes <$> mapM loadTemplate files
format <- pickFormat dir templates
let name = takeFileName dir
let to = Pandoc.writerName name
conf <- Config.merge base dir to
return Target{ _name = name
, _innerExt = format
, _config = conf
, _templates = templates }
loadTemplate :: FilePath -> DangerousT IO (Maybe Template)
loadTemplate path = do
result <- liftIO $ Template.load path
case result of
Left err -> warn (BadTemplate err) >> return Nothing
Right tmpl -> return $ Just tmpl
pickFormat :: String -> [Template] -> DangerousT IO String
pickFormat _ [] = return ""
pickFormat path tmpls = do
let name = takeFileName path
let allexts = map (takeExtension . Template.name) tmpls
let exts = sort $ nub $ filter (not . null) allexts
let tops = filter Template.isTop tmpls
when (length tops > 1) (warn $ MultipleTops name tops)
when (length exts > 1) (warn $ NonUniformExtensions name exts)
let fullname = if null tops then "" else Template.name $ head tops
let result = if null exts then fullname else head exts
when (null $ takeExtension result) (warn $ NoExtension path tmpls)
return $ takeExtension result
-- | Target manipulation
debug :: Target -> Bool
debug = Config.debug . _config
theme :: Target -> Maybe String
theme t = let name = dropExtension (_name t) in
if name == defaultTheme then Nothing else Just name
ext :: Target -> String
ext = takeExtension . _name
-- | Section affectors
expand :: Target -> Focus -> [(String, String)] -> String
expand t foc vs = Template.render vars tmpl where
tmpl = Template.get (_templates t) foc
vars = vs ++ Config.vars (_config t)
render :: Target -> Pandoc -> String
render t doc = Pandoc.render to doc where
to = Pandoc.writerName $ _innerExt t
-- | Target output
write :: FilePath -> String -> Target -> IO ()
write dest text target = output text where
output | (to, from) == ("pdf", "latex") = PDF.outputLaTeX conf dest
| to == from = writeFile dest
| otherwise = Pandoc.write to conf dest . parse
conf = _config target
from = Pandoc.readerName $ _innerExt target
to = Pandoc.writerName $ _name target
parse = Pandoc.parse from
writeTemp :: FilePath -> String -> Target -> IO ()
writeTemp path text target = writeFile (path <.> _innerExt target) text
data Warning = BadTemplate Template.Error
| UnrecognizedFormat String
| MultipleTops String [Template]
| NonUniformExtensions String [String]
| NoExtension String [Template]
| IgnoringStyle String
| IgnoringResources String
instance Show Warning where
show (BadTemplate te) = show te
show (UnrecognizedFormat name) = "WARNING: " ++
printf "Unrecognized output format: %s\n" (takeExtension name) ++
printf "\tIn profile: %s\n" name ++
"\tOutput format .tex will be used."
show (MultipleTops name tops) = "WARNING: " ++
printf "Found multiple top-level templates in profile: %s\n" name ++
printf "\tThe following templates will always be ignored: %s\n"
(intercalate ", " $ map Template.name $ tail tops)
show (NonUniformExtensions name exts) = "WARNING: " ++
printf "Found multiple extensions in profile: %s\n" name ++
printf "\tUsing '%s', ignoring %s\n" (head exts)
(intercalate ", " $ tail exts)
show (NoExtension name tmpls) = "WARNING: " ++
printf "Unrecognized extension for profile: %s\n" name ++
printf "\tArising from the files: %s\n"
(intercalate ", " $ map Template.name tmpls)
show (IgnoringStyle name) = "WARNING: " ++
printf "Ignoring style file in %s\n" name
show (IgnoringResources name) = "WARNING: " ++
printf "Ignoring resource file in %s\n" name
| Soares/Bookbuilder | src/Target.hs | mit | 5,686 | 28 | 13 | 1,381 | 1,784 | 910 | 874 | 138 | 3 |
import Data.ByteString.Delta (diff, patch)
import Control.Applicative ((<$>))
import Control.Exception (evaluate)
import Control.Monad (replicateM, forM_)
import Data.Function (on)
import Data.List (sortBy)
import Data.String (IsString(fromString))
import Data.Word (Word8)
import Test.QuickCheck (Arbitrary(arbitrary), CoArbitrary(coarbitrary),
quickCheckWith, stdArgs, Args(maxSize),
choose)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as C
newtype ByteString = Wrap {unwrap :: B.ByteString}
pack :: [Word8] -> ByteString
pack = Wrap . B.pack
unpack :: ByteString -> [Word8]
unpack = B.unpack . unwrap
instance Show ByteString where
show = show . unwrap
instance IsString ByteString where
fromString = Wrap . C.pack
instance Arbitrary ByteString where
arbitrary = pack <$> arbitrary
instance CoArbitrary ByteString where
coarbitrary = coarbitrary . unpack
prop_match :: ByteString -> ByteString -> Bool
prop_match (Wrap a) (Wrap b) = patch a (diff a b) == Right b
prop_equal :: ByteString -> Bool
prop_equal (Wrap s) =
let d = diff s s
in B.length d < 10 && patch s d == Right s
data Edit = Insert Int Word8
| Delete Int
type EditString = [Edit]
applyEditString :: EditString -> ByteString -> ByteString
applyEditString editString (Wrap string) =
let loop _ str [] = [str]
loop pos str (Insert i c : edits) =
let (a, b) = B.splitAt (i - pos) str
in a : B.singleton c : loop i b edits
loop pos str (Delete i : edits)
| pos <= i =
let (a, b) = B.splitAt (i - pos) str
in a : loop (i+1) (B.tail b) edits
| otherwise = loop pos str edits
editKey (Insert idx _) = (idx, 0 :: Int)
editKey (Delete idx) = (idx, 1 :: Int)
in Wrap $ B.concat $ loop 0 string (sortBy (compare `on` editKey) editString)
data Similar = Similar ByteString ByteString
deriving Show
instance Arbitrary Similar where
arbitrary = do
old <- arbitrary
let len = B.length $ unwrap old
-- Choose length of edit string, favoring small sizes.
c <- choose (0, len)
c' <- choose (0, c)
editString <- replicateM c' $ do
-- This is a little tricky. op = 0 means insert, while op = 1 means delete.
-- We can insert on indices 0..len, but we can only delete on 0..len-1.
-- If the string is empty, we can't delete.
op <- choose (0 :: Int, if len > 0 then 1 else 0)
pos <- choose (0, len - op)
case op of
0 -> Insert pos <$> arbitrary
_ -> return (Delete pos)
return $ Similar old (applyEditString editString old)
prop_match_similar :: Similar -> Bool
prop_match_similar (Similar (Wrap a) (Wrap b)) = patch a (diff a b) == Right b
try_to_leak :: IO ()
try_to_leak = forM_ [1..100 :: Int] $ \i ->
evaluate $ diff (B.empty) (B.replicate 1000000 (fromIntegral i))
main :: IO ()
main = do
quickCheckWith stdArgs {maxSize = 1000} prop_match
quickCheckWith stdArgs {maxSize = 1000} prop_match_similar
quickCheckWith stdArgs {maxSize = 1000} prop_equal
try_to_leak
| joeyadams/haskell-bytestring-delta | Tests.hs | mit | 3,309 | 9 | 16 | 943 | 1,123 | 598 | 525 | 75 | 4 |
module Main where
import Bio.Data.Bed
import Bio.Data.Bed.Utils
import Bio.Motif
import Bio.Seq.IO
import Conduit
import Data.Default (def)
import Data.Semigroup ((<>))
import Data.Version (showVersion)
import Options.Applicative
import Paths_bioinformatics_toolkit_apps (version)
import System.IO (stdout)
import Text.Printf
data Options = Options
{ genomeFile :: FilePath
, motifFile :: FilePath
, input :: FilePath
, p :: Double
} deriving (Show, Read)
parser :: Parser Options
parser = Options
<$> strArgument (metavar "GENOME")
<*> strArgument (metavar "MOTIF_MEME")
<*> strArgument (metavar "INPUT")
<*> option auto
( long "p-value"
<> short 'p'
<> value 1e-5
<> metavar "P-Value"
<> help "p-value cutoff. (default: 1e-5)" )
defaultMain :: Options -> IO ()
defaultMain opts = do
withGenome (genomeFile opts) $ \genome -> do
motifs <- map (mkCutoffMotif def (p opts)) <$> readMEME (motifFile opts)
runResourceT $ runConduit $
(streamBed (input opts) :: Source (ResourceT IO) BED3) .|
scanMotif genome motifs .| sinkHandleBed stdout
main :: IO ()
main = execParser opts >>= defaultMain
where
opts = info (helper <*> parser) ( fullDesc <>
header (printf "bioinformatics-toolkit-apps-v%s" (showVersion version)) )
| kaizhang/bioinformatics-toolkit | bioinformatics-toolkit-apps/app/MotifScan.hs | mit | 1,591 | 0 | 17 | 545 | 428 | 225 | 203 | 41 | 1 |
module TeX.Parser.Util
( unimplemented
, optionalSpace, optionalSpaces
, equals
, number, eightBitNumber, count
, integerVariable, IntegerVariable(IntegerParameter, CountDefToken, LiteralCount)
)
where
import Text.Parsec ((<|>), (<?>), choice)
import TeX.Category
import TeX.Count
import TeX.Parser.Expand
import TeX.Parser.Parser
import TeX.Parser.Prim
import TeX.Token
unimplemented :: TeXParser a
unimplemented = fail "not implemented"
optionalSpaces :: TeXParser ()
optionalSpaces =
(expand (categoryToken Space) >> optionalSpaces) <|>
(return ()) <?>
"optional spaces"
optionalSpace :: TeXParser ()
optionalSpace =
(expand (categoryToken Space) >> return ()) <|>
(return ()) <?>
"optional space"
equals :: TeXParser ()
equals =
(optionalSpaces >> expand (exactToken (CharToken '=' Other)) >> return ()) <|>
optionalSpaces <?>
"optional equals"
plusOrMinus :: TeXParser Integer
plusOrMinus =
(expand (exactToken (CharToken '+' Other)) >> return 1) <|>
(expand (exactToken (CharToken '-' Other)) >> return (-1)) <?>
"plus or minus"
optionalSigns :: TeXParser Integer
optionalSigns =
(((*) <$> plusOrMinus <*> optionalSigns) <* optionalSpaces) <|>
(optionalSpaces >> return 1) <?>
"optional sign"
internalInteger :: TeXParser Integer
internalInteger = unimplemented
integerConstant :: TeXParser Integer
integerConstant =
fst <$> recInteger <?> "integer constant"
where
digit = expand $ choice [exactToken (CharToken x Other) | x <- ['0'..'9']]
recInteger :: TeXParser (Integer, Integer)
recInteger =
(do
Just d <- charCode <$> digit
(rest, level) <- recInteger
return $ (d * 10 ^ level + rest, level + 1)) <|>
(return (0, 0))
hexadecimalConstant :: TeXParser Integer
hexadecimalConstant = unimplemented
octalConstant :: TeXParser Integer
octalConstant = unimplemented
characterToken :: TeXParser Integer
characterToken = unimplemented
normalInteger :: TeXParser Integer
normalInteger =
internalInteger <|>
(integerConstant <* optionalSpace) <|>
(expand (exactToken (CharToken '\'' Other)) >> octalConstant <* optionalSpace) <|>
(expand (exactToken (CharToken '"' Other)) >> hexadecimalConstant <* optionalSpace) <|>
(expand (exactToken (CharToken '`' Other)) >> characterToken <* optionalSpace) <?>
"normal integer"
coercedInteger :: TeXParser Integer
coercedInteger = unimplemented
unsignedNumber :: TeXParser Integer
unsignedNumber = normalInteger <|> coercedInteger <?> "unsigned number"
number :: TeXParser Integer
number = (*) <$> optionalSigns <*> unsignedNumber <?> "number"
eightBitNumber :: TeXParser Integer
eightBitNumber = do
value <- number
if value < 0 || value >= 256
then fail $ "number too large: " ++ (show value)
else return value
count :: TeXParser Count
count = do
value <- fromInteger <$> number
case value of
CountOverflow -> fail "number too large"
_ -> return value
data IntegerVariable =
IntegerParameter String |
CountDefToken String |
LiteralCount Integer
integerVariable :: TeXParser IntegerVariable
integerVariable =
integerParameter <|>
countDefToken <|>
literalCount <?>
"integer variable"
where
integerParameter = unimplemented
countDefToken = unimplemented
literalCount =
LiteralCount <$> (expand (exactToken (ControlSequence "count")) >> eightBitNumber)
| spicyj/tex-parser | src/TeX/Parser/Util.hs | mit | 3,361 | 0 | 16 | 582 | 988 | 526 | 462 | 101 | 2 |
module QuickSort where
-- http://en.literateprograms.org/Quicksort_(Haskell)
-- http://en.wikipedia.org/wiki/Quicksort
quickSort [] = []
quickSort [x] = [x]
quickSort (pivot:xs) = (quickSort smaller) ++ [pivot] ++ (quickSort larger)
where smaller = filter (<= pivot) xs
larger = filter (> pivot) xs
| millertime-homework/cs350report | haskell/QuickSort.hs | mit | 305 | 1 | 9 | 44 | 101 | 56 | 45 | 6 | 1 |
module TSPGraph (
presentUI
) where
import Graphics.Gloss
import TSPLib
import Data.Monoid
import Data.Functor
import Control.Arrow ((***))
imgWidth, imgHeight :: Int
imgWidth = 600
imgHeight = 600
main :: IO ()
main = do
(nodes, edges) <- parseInput
presentUI nodes edges
presentUI :: [Node] -> [Edge] -> IO ()
presentUI nodes edges =
let window = InWindow "TSP Visualize" (imgWidth, imgHeight) (10, 10)
(halfW, halfH) = (fromIntegral (-imgWidth)/2, fromIntegral (-imgHeight)/2)
translatedPic = scale 0.9 (-0.9) $ translate halfW halfH pic
(xrng, yrng) = (xRange nodes, yRange nodes)
pic = paintNodes nodes xrng yrng <>
paintEdges edges xrng yrng
in display window white translatedPic
scaleVal :: (Fractional a) => (a, a) -> a -> a -> a
scaleVal (xmin, xmax) scl x = scl * (x - xmin) / (xmax - xmin)
scaleVal' :: (Int, Int) -> Int -> Int -> Float
scaleVal' rng scl = scaleVal fltrng fltscl . fromIntegral
where fltrng = tupleFromIntegral rng
fltscl = fromIntegral scl
scaleTuple :: (Int, Int) -> (Int, Int) -> Int -> Int ->
(Int, Int) -> (Float, Float)
scaleTuple xrng yrng xmax ymax = scaleX *** scaleY
where scaleX = scaleVal' xrng xmax
scaleY = scaleVal' yrng ymax
paintNodes :: [Node] -> (Int,Int) -> (Int,Int) -> Picture
paintNodes [] _ _ = blank
paintNodes [_] _ _ = blank
paintNodes ns xr yr = pictures $ zipWith paintNode [(0::Int)..] ns
where scalePoint = scaleTuple xr yr imgWidth imgHeight
getcolor n = if n == 0 then red else black
paintNode n node = uncurry translate (scalePoint node) $
color (getcolor n) $
circle 5
paintEdges :: [Edge] -> (Int,Int) -> (Int,Int) -> Picture
paintEdges [] _ _ = blank
paintEdges [_] _ _ = blank
paintEdges ns xr yr = pictures $ map paintEdge ns
where scalePoint = scaleTuple xr yr imgWidth imgHeight
paintEdge (p1, p2) = color blue $ line [scalePoint p1, scalePoint p2]
parseInput :: IO ([Node], [Edge])
parseInput = do
(cfg:raw) <- lines <$> getContents
let [nn, _] = map read $ words cfg
rest = map (map read . words) raw
(ns, es) = splitAt nn rest
nodes = map (\[a,b] -> (a,b)) ns
edges = map (\[a,b,c,d] -> ((a,b), (c,d))) es
in return (nodes, edges)
| shouya/thinking-dumps | tsp/TSPGraph.hs | mit | 2,327 | 0 | 15 | 592 | 1,001 | 540 | 461 | 58 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Web.TodoMVC.Servant.API (
runServer
) where
import Network.Wai
import Network.Wai.Handler.Warp hiding (Connection)
import Servant
import Todo
import Web.TodoMVC.Servant.Shared
-- | server
--
server :: Store -> Server LnAPI
server store =
serveDirectory "./html"
:<|> serveDirectory "./dist"
:<|> serveDirectory "./bower_components"
:<|> runApp store listTodos
:<|> runApp store . addTodo
:<|> runApp store clearTodos
:<|> runApp_Maybe store . findTodoById
:<|> runApp_Maybe store . removeTodo
:<|> apply2 updateTodo store -- bleh
-- | app
--
app :: Store -> Application
app store = serve todoAPI $ server store
-- | runServer
--
-- runs the API servers on:
-- http://localhost:1080
--
runServer :: IO ()
runServer = do
store <- newBigState
run 1080 $ app store
| adarqui/todomvc-haskell-servant-purescript | haskell_src/Web/TodoMVC/Servant/API.hs | mit | 1,064 | 0 | 16 | 246 | 219 | 119 | 100 | -1 | -1 |
module GHCJS.DOM.IDBFactory (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/IDBFactory.hs | mit | 40 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
module Target (
Target, target, context, builder, inputs, outputs, trackArgument
) where
import Data.Char
import Data.List.Extra
import qualified Hadrian.Target as H
import Hadrian.Target hiding (Target)
import Builder
import Context
type Target = H.Target Context Builder
-- | Some arguments do not affect build results and therefore do not need to be
-- tracked by the build system. A notable example is "-jN" that controls Make's
-- parallelism. Given a 'Target' and an argument, this function should return
-- 'True' only if the argument needs to be tracked.
trackArgument :: Target -> String -> Bool
trackArgument target arg = case builder target of
(Make _) -> not $ threadArg arg
_ -> True
where
threadArg s = dropWhileEnd isDigit s `elem` ["-j", "MAKEFLAGS=-j", "THREADS="]
| izgzhen/hadrian | src/Target.hs | mit | 816 | 0 | 9 | 157 | 165 | 97 | 68 | 14 | 2 |
{-# LANGUAGE CPP, NoImplicitPrelude, PackageImports #-}
module Foreign.Marshal.Unsafe.Compat (
module Base
) where
import "base-compat" Foreign.Marshal.Unsafe.Compat as Base
| haskell-compat/base-compat | base-compat-batteries/src/Foreign/Marshal/Unsafe/Compat.hs | mit | 177 | 0 | 4 | 21 | 25 | 19 | 6 | 4 | 0 |
data BalTree a = Empty
| TwoNode (BalTree a) a (BalTree a) a (BalTree a)
| OneNode (BalTree a) a (BalTree a)
| TwoLeaf a a
| OneLeaf a
-- deriving(Show)
-- declare BinTree a to be an instance of Show
instance (Show a) => Show (BalTree a) where
-- will start by a '<' before the root
-- and put a : a begining of line
show t = "< " ++ replace '\n' "\n: " (treeshow "" t)
where
-- treeshow pref Tree
-- shows a tree and starts each line with pref
-- We don't display the Empty tree
treeshow pref Empty = ""
-- Leaves
treeshow pref (OneLeaf leftVal) =
(pshow pref leftVal)
treeshow pref (TwoLeaf leftVal rightVal) =
(pshow pref leftVal)
++ "\n" ++
(pshow pref rightVal)
-- -- Nodes
treeshow pref (OneNode leftTree middle rightTree) =
(pshow pref middle)
++ "\n" ++
(showSon pref "|--" "| " leftTree)
++ "\n" ++
(showSon pref "`--" " " rightTree)
treeshow pref (TwoNode leftTree leftVal middleTree rightVal rightTree) =
(pshow pref leftVal)
++ "\n" ++
(showSon pref "`--" " " rightTree)
++ "\n" ++
(showSon pref "|--" "| " leftTree)
++ "\n" ++
(pshow pref rightVal)
++ "\n" ++
(showSon pref "|--" "| " middleTree)
++ "\n" ++
(showSon pref "`--" " " rightTree)
-- shows a tree using some prefixes to make it nice
showSon pref before next t =
pref ++ before ++ treeshow (pref ++ next) t
-- pshow replaces "\n" by "\n"++pref
pshow :: String -> a -> w -> IO()
pshow pref x = replace '\n' ("\n"++pref) (show x)
-- replaces one char by another string
replace c new string =
concatMap (change c new) string
where
change c new x
| x == c = new
| otherwise = x:[] -- "x"
main :: IO()
main = do
-- Creation
print $ (OneNode Empty 10 Empty)
-- Deletion
| Saevon/Recipes | haskell/23tree.hs | mit | 2,006 | 0 | 19 | 685 | 594 | 305 | 289 | 44 | 1 |
{-# LANGUAGE LambdaCase #-}
-- |
-- Module : Butter.Core.Torrent
-- Copyright : Pedro Tacla Yamada
-- License : MIT (see LICENSE)
--
-- Maintainer : Pedro Tacla Yamada <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- The main `Torrent` data type
module Butter.Core.Torrent where
import Butter.Core.Block
import Butter.Core.MetaInfo
import Butter.Core.Peer
import Butter.Core.PeerWire
import Control.Applicative ((<$>), (<*>))
import Control.Concurrent (ThreadId)
import Control.Concurrent.STM -- (TVar, newTVarIO, readTVarIO)
import Control.Monad.IO.Class
import qualified Data.ByteString as ByteString (ByteString)
import qualified Data.ByteString.Char8 as Char8 (unpack)
import qualified Data.Map as Map (Map, empty)
import qualified Data.Vector as Vector -- ((!))
import Network.HTTP.Client (Manager, defaultManagerSettings, newManager)
import Network.Socket (PortNumber(..))
data TorrentStage = TStopped | TDownloading | TCompleted
deriving(Eq, Ord, Show)
data TorrentStatus = TorrentStatus { tsStage :: TorrentStage
, tsDownloaded :: Integer
, tsUploaded :: Integer
}
deriving(Eq, Ord, Show)
-- |
-- Options for tracker announce querying
data TrackerClientOptions =
TrackerClientOptions { cManager :: Manager
-- ^ An @HTTP.Client@ 'Manager'
, cPeerId :: PeerId
-- ^ The local peer's id
, cPort :: PortNumber
-- ^ The port the local peer is listening at
, cAnnounceUrl :: String
-- ^ The announce URL to hit
, cInfoHash :: ByteString.ByteString
-- ^ The "info_hash" to query for
, cInterval :: Maybe Int
-- ^ The interval between queries in seconds. Will use
-- the interval -- suggested by the tracker if set to
-- @Nothing@. Will be ignored if bigger than the
-- tracker's minimum
, cNumwant :: Integer
-- ^ The number of peers to ask for, on each query
, cLimit :: Int
-- ^ The limit of peers to ask for see
-- 'startTrackerClient' for more information
}
data TorrentDownload =
TorrentDownload { tdOptions :: TrackerClientOptions
, tdMetaInfo :: MetaInfo
, tdStatus :: TVar TorrentStatus
, tdPeers :: TVar (Map.Map PeerId Peer)
, tdPeersState :: TVar (Map.Map PeerId PeerState)
, tdPieceStatus :: Vector.Vector (TVar PieceStatus)
}
data PeerState = PeerState { psConnection :: TChan PeerMessage
}
data PieceStatus = PieceStatus { pPeersHave :: [PeerId]
, pBlocks :: Map.Map BlockId (PeerId, BlockStatus)
, pCompleted :: Bool
}
data BlockStatus = BSDownloading ThreadId
| BSErrored
| BSDone
newDownloadFromFile :: MonadIO m => FilePath -> m TorrentDownload
newDownloadFromFile fp = do
(mi, pid, st, m1, m2) <- liftIO $ (,,,,) <$> readMetaInfoFile fp
<*> newPeerId
<*> newTStatusTVar
<*> newTVarIO Map.empty
<*> newTVarIO Map.empty
opts <- clientOptions pid (PortNum 3000) mi
return $ TorrentDownload opts mi st m1 m2 Vector.empty
-- |
-- For most use cases, using this helper function should give you sensible
-- defaults for the 'ClientOptions'
clientOptions :: MonadIO m => PeerId -> PortNumber -> MetaInfo
-> m TrackerClientOptions
clientOptions pid port metaInfo = do
m <- liftIO $ newManager defaultManagerSettings
return $ TrackerClientOptions m pid port announce hash Nothing 30 100
where announce = Char8.unpack $ miAnnounce metaInfo
hash = fiHash $ miInfo metaInfo
newTStatusTVar :: IO (TVar TorrentStatus)
newTStatusTVar = newTVarIO $ TorrentStatus TDownloading 0 0
| yamadapc/butter-core | src/Butter/Core/Torrent.hs | mit | 4,457 | 0 | 14 | 1,628 | 745 | 438 | 307 | 63 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
module BBQ.Sitemap where
import Prelude hiding (id, (.))
import Control.Category (Category(id, (.)))
import Control.Monad (liftM)
import Data.Data (Data, Typeable)
import Data.Text (Text)
import qualified Data.Text as T
import Text.Hamlet
import Text.Boomerang.TH (makeBoomerangs)
import Web.Routes (PathInfo(..), MonadRoute, URL, askRouteFn)
import Web.Routes.Boomerang
import Data.Accounts
import Data.RecordPool
import Data.Sheets
deriving instance Read VCode
deriving instance PathInfo VCode
data Sitemap
= Home
| NewRegistration
| Registration VCode
| Authentication
| Dashboard
| IgniteFire
| Upload ProblemId
| Problem ProblemId
| ViewSheets ProblemId
| ViewASheet ProblemId SheetId
deriving (Eq, Ord, Read, Show, Data, Typeable)
$(makeBoomerangs ''Sitemap)
sitemap :: Router () (Sitemap :- ())
sitemap =
( rHome
<> lit "register" . registration
<> lit "auth" . rAuthentication
<> lit "dashboard" . rDashboard
<> lit "bbq" . bbqOps
<> lit "problems" . rProblem </> problemIdParams
<> rViewASheet . ("sheet" </> problemIdParams </> sheetIdParams)
)
where registration = rNewRegistration </> "new"
<> rRegistration </> vcodeParams
bbqOps = rIgniteFire </> "start"
<> rUpload </> "upload" </> problemIdParams
<> rViewSheets </> "sheets" </> problemIdParams
vcodeParams :: Router r (VCode :- r)
vcodeParams = xmaph (VCode . T.unpack) (Just . T.pack . unVCode) anyText
--problemIdParams :: Router () (ProblemId :- ())
problemIdParams :: Router r (ProblemId :- r)
problemIdParams = xmaph (ProblemId) pidToInt int
where pidToInt (ProblemId x) = if and [x >= 1, x <= 9]
then Just x
else Nothing
sheetIdParams :: Router r (SheetId :- r)
sheetIdParams = xmaph (SheetId) (Just . unSheetId) int
askRouteFn' :: MonadRoute m => m (URL m -> [(Text, Text)] -> Text)
askRouteFn' = liftM convert $ askRouteFn
where convert routeFn = (\url params -> routeFn url $ map (\(t1, t2) -> (t1, Just t2)) params)
siteLayout' :: String -> HtmlUrl Sitemap -> [String] -> [String] -> HtmlUrl Sitemap
siteLayout' title body stylesheets' scripts' = $(hamletFile "views/hamlets/layout.hamlet")
where stylesheets = "/static/css/general.css"
: stylesheets'
scripts = "/static/js/plugins.js"
: scripts'
siteLayout title body = siteLayout' title body [] []
| yan-ac/bbq.yan.ac | BBQ/Sitemap.hs | mit | 2,783 | 0 | 19 | 680 | 774 | 426 | 348 | -1 | -1 |
module Latex2MathML.Utils.Functions(parseArguments,processContentOfFile,saveToFiles,readContentAndProcess) where
import Latex2MathML.Generator.Main
import Control.Monad.Trans.Either
import Control.Monad.Error.Class
import Data.Algorithms.KMP
import Latex2MathML.Scanner.Main
import Latex2MathML.Parser.Main
-- ARGUMENTS PROCESSING
parseArguments :: [String] -> EitherT String IO [(String,String)]
parseArguments [] = throwError $ "Empty argument list. " ++ programUsage
parseArguments lst
| length lst `mod` 2 == 0 = return $ parseArguments' lst
| otherwise = throwError $ "Bad argument list. " ++ programUsage
parseArguments' :: [String] -> [(String,String)]
parseArguments' [] = []
parseArguments' (filename:rootname:xs) = (filename,rootname) : parseArguments' xs
programUsage :: String
programUsage = "Program usage: ./program firstTexFilePath firstOutputRootFilename [secondTexFilePath ...]"
-- FILES PROCESSING
readContentAndProcess :: (String,String) -> IO ()
readContentAndProcess (filename,rootoutputname) = do
content <- readFile filename
eitherT print (saveToFiles rootoutputname) (processContentOfFile content)
processContentOfFile :: String -> EitherT String IO [String]
processContentOfFile content =
case getMathElementsIndices content of
[] -> throwError "No math elements in passed file"
lst -> splitFileAt content lst
getMathElementsIndices :: String -> [(Int,Int)]
getMathElementsIndices content = zip (concat $ map (\x -> map (+ length x) (match (build x) content)) ["begin{math}","\\["]) (concat $ map (\x -> match (build x) content) ["\\end{math}","\\]"]) ++ (getIndicesFromShorthand content)
getIndicesFromShorthand :: String -> [(Int,Int)]
getIndicesFromShorthand content = getTuplesFromList $ match (build "$$") content
getTuplesFromList :: [Int] -> [(Int,Int)]
getTuplesFromList [] = []
getTuplesFromList (x1:x2:xs) = (x1+2,x2) : getTuplesFromList xs
splitFileAt :: String -> [(Int,Int)] -> EitherT String IO [String]
splitFileAt content lst = generateMLFromStrings $ splitFileAt' content lst
splitFileAt' :: String -> [(Int,Int)] -> [String]
splitFileAt' _ [] = []
splitFileAt' content ((x1,x2):xs) = take (x2-x1) (drop (x1) content) : splitFileAt' content xs
generateMLFromStrings :: [String] -> EitherT String IO [String]
generateMLFromStrings = mapM (\x -> scan x >>= parse >>= generate)
saveToFiles :: String -> [String] -> IO ()
saveToFiles rootName lst = saveToFiles' lst rootName 1
saveToFiles' :: [String] -> String -> Int -> IO ()
saveToFiles' [] _ _ = return ()
saveToFiles' (x:xs) fileRoot number = do
saveToFile (fileRoot ++ show number ++ ".xhtml") x
saveToFiles' xs fileRoot (number+1)
saveToFile :: String -> String -> IO ()
saveToFile filename content = do
writeFile filename content
print $ "File " ++ filename ++ " generated!" | fortun13/latex2mathml | src/Latex2MathML/Utils/Functions.hs | gpl-2.0 | 2,839 | 0 | 16 | 415 | 980 | 520 | 460 | 51 | 2 |
#!/usr/bin/runhugs
-- Calculates basic LCD monitor parameters from diagonals.
--
-- Copyright (c) bkil, 2009
-- License: GNU GPL v2 (or later), see the
-- attached gpl-2.0.txt for details.
--
--Changelog:
-- 2009.01.20 v0.0 first release
-- 2009.01.22 v0.1 refactoring: cleanup, typo in comments
-- 2009.01.29 v0.2 refactoring: friendlier names, Html type
-- 2009.02.04 v0.3 refactoring: tidied comments, improved formatting,
-- fixed a few names (CamelCase), used html more
-- 2010.04.07 v0.3.1 DPI, EEE, widescreens added
-- 2013.08.29 v0.3.2 HD added
--HTML substring type
newtype HtmlChar = HtmlChar Char
type Html = [HtmlChar]
instance Show HtmlChar where
show (HtmlChar ch) = [ch]
showHtml :: Html -> String
showHtml = concat . map show
--does conversion with simple HTML escaping of main body text
toHtml :: String -> Html
toHtml = concat . map code where
code '&' = html "&"
code '<' = html "<"
code '>' = html ">"
code c = html [c]
--constructor for constants (no conversion!)
html :: String -> Html
html = map HtmlChar
--similar to unlines
unlinesHtml :: [Html] -> Html
unlinesHtml s = concat . zipWith (++) s $ repeat $ html ['\n']
--the type of the data structure used by the parameter routine
data UnionParameter =
ParamInt Int String | -- an Int with a measure
ParamFloat Float Int String | -- a Float with a measure
ParamRatio Int Int -- a ratio
instance Show UnionParameter where
show (ParamInt i s) = show i ++ s
show (ParamFloat x digits s) = showPrecision x digits ++ s
show (ParamRatio a b) = show a ++ ":" ++ show b
--an optional (ugly) routine for outputting in limited precision
showPrecision :: Float -> Int -> String
showPrecision x digits
| digits == 0 = show $ myRound x
| otherwise = int ++ "." ++ frac where
(int,frac) = if shifted_x < shift -- almost: x < 1
then replacehd0 $ int_to_2str (shifted_x + shift)
else int_to_2str shifted_x
shift = 10 ^ digits
shifted_x = myRound $ x * fromIntegral shift
replacehd0 ([],b) = ([],b) -- never happens
replacehd0 ((_:a),b) = (('0':a),b)
int_to_2str y = splitAt (length sy - digits) sy where
sy = show y
--a helper for showPrecision
myRound :: Float -> Int
myRound = floor . (0.5+)
--the routine that calculates all the parameters
-- input: diagonal in inches, horizontal pixels, vertical pixels
--
getParams :: (Float, Int, Int) -> [UnionParameter]
getParams (diag_inch, xi, yi) =
[ParamFloat diag_inch 1 "in", ParamRatio a b,
ParamInt xi "px", ParamInt yi "px",
ParamFloat dpi 0 "dpi", ParamFloat dotPitch 3 "mm",
ParamFloat megaPix 2 "Mpx",
ParamFloat area_cm 0 "cm^2",
ParamFloat w 1 "mm", ParamFloat h 1 "mm"
] where
a = xi `div` gcd xi yi ; b = yi `div` gcd xi yi
x = fromIntegral xi ; y = fromIntegral yi
diag_mm = diag_inch * inch_mm
inch_mm = 25.4
aspect = x/y
calcDiag = sqrt $ aspect*aspect + 1*1
scale = diag_mm/calcDiag
w = scale*aspect ; h = scale*1
area_cm = w*h / 1e2
megaPix = x*y / 1e6
dotPitch = w/x
dpi = x / (w/inch_mm)
--presentation alternative #1: list format in plain text
listTable :: Show a=> [[a]] -> String
listTable = unlines . map (listSemi . map show) where
listSemi = foldl1 (\a b -> a ++ "; " ++ b)
--presentation alternative #2: HTML (uses toHtml, html, unlinesHtml)
-- (you could use concat instead of unlines to suppress line breaks)
htmlTable :: Show a=> [[a]] -> Html
htmlTable table = unlinesHtml lines where
lines = [html "<table>"] ++ rows table ++ [html "</table>"]
rows = map (\r-> html " <tr>" ++ (concat . cols)r ++ html "</tr>")
cols = map (\c-> html " <td>" ++ (toHtml . show)c ++ html "</td>")
--outputs a few parameters of common computer displays
main = putStr result where
result = ( showHtml $ htmlTable results ) ++ listTable results
results = map getParams modes
modes = [
(7, 800, 480),
(13, 640, 480), (13, 800, 600),
(15, 1024, 768),
(15.6,1360, 768),
(17, 1280,1024),
(18.5,1360, 768),
(19, 1440, 900), (19,1280,1024),
(19.5,1600, 900),
(20, 1600,1200),
(22, 1680,1050), (21.5,1920,1080), (22,1600,1200),
(23.6,1920,1080),
(24, 1920,1200)
]
| google-code/bkil-open | volatile/calc/lcd_geom.hs | gpl-2.0 | 4,460 | 0 | 14 | 1,168 | 1,344 | 748 | 596 | 83 | 4 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module Rewriting.Roller where
import Rewriting.TRS
import Autolib.Util.Zufall
import Autolib.Set
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
roll_trs :: ( Symbol v, Symbol c )
=> Config v c
-> IO ( TRS v c )
roll_trs conf = do
rs <- sequence $ replicate ( num_rules conf ) $ roll_rule conf
let vs = unionManySets $ do
r <- rs
[ vars $ lhs r, vars $ rhs r ]
return $ TRS
{ variablen = setToList vs
, regeln = rs
}
roll_rule :: ( Symbol v, Symbol c )
=> Config v c
-> IO ( Rule ( Term v c ))
roll_rule conf = do
num_lvars <- randomRIO ( 0, num_vars conf )
lvars <- sequence $ replicate num_lvars $ eins $ variables conf
num_rvars <- randomRIO ( 0, num_vars conf )
rvars <- if 0 == num_lvars then return []
else sequence $ replicate num_rvars $ eins lvars
let sig = mksig $ signature conf
ls <- randomRIO ( num_lvars + 1, max_size conf )
lt <- roll_ground_term sig ls
l <- put_vars lvars lt
rs <- randomRIO ( num_rvars + 1, max_size conf )
rt <- roll_ground_term sig rs
r <- put_vars rvars rt
return $ Rule { lhs = l, rhs = r, strict = True }
roll_ground_term :: Symbol c
=> [ c ]
-> Int -- ^ size
-> IO ( Term v c )
roll_ground_term sig s | s <= 1 = do
f <- eins $ filter ( (== 0) . arity ) sig
return $ Node f []
roll_ground_term sig s = do
f <- eins $ filter ( ( > 0) . arity ) sig
xs <- sequence $ do
k <- [ 1 .. arity f ]
return $ do
s <- randomRIO ( 1, pred s `div` arity f )
roll_ground_term sig s
return $ Node f xs
-- | put variables at random positions
put_vars vs t = do
let leaves = leafpos t
ps <- selektion ( length vs `min` length leaves ) $ leaves
return $ pokes t $ zip ps $ map Var vs
-------------------------------------------------------------------------
data ( Symbol v, Symbol c ) => Config v c =
Config { signature :: [ ( c, Int) ] -- ^ with arity
, variables :: [ v ]
, num_rules :: Int
, max_size :: Int -- ^ of term in rules
, num_vars :: Int -- ^ of variables in single rule
}
deriving ( Eq, Ord, Typeable )
example :: Config Identifier Identifier
example = Config
{ signature = read "[(a,0),(b,0),(f,1),(g,2)]"
, variables = read "[X,Y,Z]"
, num_rules = 3
, max_size = 8
, num_vars = 2
}
mksig :: Symbol c => [(c,Int)] -> [c]
mksig cas = do ( c,a) <- cas ; return $ set_arity a c
$(derives [makeReader, makeToDoc] [''Config])
-- local variables:
-- mode: haskell
-- end;
| Erdwolf/autotool-bonn | src/Rewriting/Roller.hs | gpl-2.0 | 2,780 | 0 | 18 | 894 | 993 | 506 | 487 | 72 | 2 |
{-# LANGUAGE CPP, TypeFamilies, DeriveDataTypeable #-}
module PGIP.GraphQL.Result.SignatureMorphism where
import PGIP.GraphQL.Result.IdReference
import PGIP.GraphQL.Result.LogicMapping
import PGIP.GraphQL.Result.Mapping
import PGIP.GraphQL.Result.SymbolMapping
import Data.Data
data SignatureMorphism =
SignatureMorphism { id :: Int
, logicMapping :: LogicMapping
, mappings :: [Mapping]
, source :: IdReference
, symbolMappings :: [SymbolMapping]
, target :: IdReference
} deriving (Show, Typeable, Data)
| spechub/Hets | PGIP/GraphQL/Result/SignatureMorphism.hs | gpl-2.0 | 632 | 0 | 9 | 178 | 108 | 71 | 37 | 15 | 0 |
import Prelude
import System.Environment ( getArgs )
import Data.List
import Data.Maybe
import Functions(makemove)
-- The main method that will be used for testing / command line access
main = do
args <- getArgs
pacFile <- readFile (head args)
map <- mapTuple pacFile
let
pac = map
in yourMain pac
partFourOutput :: (Int, Int, Int, [[Char]]) -> IO ()
partFourOutput pac = do
print pac
-- Converts a string to a tuple representing the pac-man map
mapTuple :: String -> IO (Int, Int, Int, [[Char]], [Char])
mapTuple = readIO
-- YOUR CODE SHOULD COME AFTER THIS POINT
-- yourMain
yourMain pac =
partFourOutput $ makemove pac
--makemove :: (Int, Int, Int, [[Char]], [Char]) -> (Int, Int, Int, [[Char]])
| cable729/plc2 | src/csce322a2p4.hs | gpl-3.0 | 718 | 2 | 10 | 136 | 196 | 106 | 90 | 19 | 1 |
{-# OPTIONS -Wall #-}
{-# LANGUAGE FlexibleInstances #-}
{-- Naive evaluation of SecPAL
-}
module Logic.SecPAL.Evaluable where
import Control.Applicative
import Control.Monad (when, forM)
import Data.Array.IO
import Data.List
import Data.Maybe
import Logic.General.Entities
import Logic.General.Constraints
import Logic.General.Vars (ground)
import Logic.SecPAL.AssertionSafety (flat)
import Logic.SecPAL.Context
import Logic.General.ConstraintEvaluation
import Logic.SecPAL.Language
import Logic.SecPAL.Pretty
import Logic.SecPAL.Proof hiding (constraint, delegation)
import Logic.SecPAL.Substitutions hiding (interferes, interferent)
import System.Console.ANSI
import System.Random
import System.IO
--import Debug.Trace
{- A result is a proof of an assertion -}
type Result = (Proof Assertion)
{- Something is evaluable in a context if it reduces to a series of proofs -}
class Evaluable x where
(||-) :: Context -> x -> IO [Proof x]
instance Evaluable C where
ctx ||- c
| not (ground c) = fail $ "ungrounded constraint '"++pShow c++"'"
| otherwise =
case c of
(Boolean True) -> return [PStated (ctx,c)]
(Boolean False) -> return []
(Equals a b) -> do
a' <- evaluate ctx a
b' <- evaluate ctx b
return $
if hasFailed [a',b']
then []
else [PStated (ctx, c) | a' == b']
(LessThan a b) -> do
a' <- evaluate ctx a
b' <- evaluate ctx b
return $
if hasFailed [a',b']
then []
else [PStated (ctx, c) | a' < b']
(GreaterThan a b) -> do
a' <- evaluate ctx a
b' <- evaluate ctx b
return $
if hasFailed [a',b']
then []
else [PStated (ctx, c) | a' > b']
(Not c') -> do
p <- null <$> ctx ||- c'
return [PStated (ctx, c) | p]
(Conj x y) -> do
pX <- not.null <$> ctx ||- x
pY <- not.null <$> ctx ||- y
return [PStated (ctx,c) | pX && pY]
instance Evaluable Assertion where
ctx ||- x = do
{- We're shuffling them so that we try and avoid infinite loops where a
- cond statement will loop but a cansay will terminate.
-
- Probably a neater way of doing this but randomization is cheap and
- usually good!
-}
strategies <- shuffle [statedStrategy, condStrategy, canSayStrategy, canActAsStrategy]
tryStrategies ctx x strategies
where
-- | Randomly shuffle a list
-- /O(N)/
shuffle :: [a] -> IO [a]
shuffle xs = do
ar <- makeArray len xs
forM [1..len] $ \i -> do
j <- randomRIO (i,len)
vi <- readArray ar i
vj <- readArray ar j
writeArray ar j vi
return vj
where
len = length xs
makeArray :: Int -> [a] -> IO (IOArray Int a)
makeArray l = newListArray (1,l)
{- Try and apply a SecPAL evaluation rule to an assertion in a context -}
type Strategy = Context -> Assertion -> IO [Result]
statedStrategy :: Strategy
statedStrategy = strategy tryStated "known fact"
condStrategy :: Strategy
condStrategy = strategy tryCond "derivable fact"
canSayStrategy :: Strategy
canSayStrategy = strategy tryCanSay "delegatable"
canActAsStrategy :: Strategy
canActAsStrategy = strategy tryCanActAs "renameable"
{- Apply a strategy, if it succeeds give the proof else log the rule failure -}
strategy :: Strategy
-> String
-> Context
-> Assertion
-> IO [Result]
strategy f msg ctx x = do
result <- f ctx x
let isSuccessful = not.null$result
when (debug ctx) $
putStrLn . useColor isSuccessful $
"@ '"++pShow x++"' "++usePrefix isSuccessful++" "++msg
return $ if isSuccessful
then result
else []
where
inColor c str = setSGRCode [SetColor Foreground Dull c] ++ str ++ setSGRCode[Reset]
useColor True = inColor Green
useColor False = inColor Red
usePrefix True = "is"
usePrefix False = "is not"
tryStrategies :: Context -> Assertion -> [Strategy] -> IO [Result]
tryStrategies _ _ [] = return []
tryStrategies ctx x (s:ss) = do
result <- s ctx x
if not . null $ result
then return result
else tryStrategies ctx x ss
isIn :: Assertion -> AC -> Bool
x `isIn` (AC xs) = x `elem` xs
-- The Glorious Cond Rule!
--
-- (A says f if f_1, .., f_n, c) ∈ AC
-- AC,D |= A says f_i, ∀i ∈ (1..n) |= c flat(f)
-- --------------------------------------------------
-- AC, D |= A says f
--
cond' :: Context -> Assertion -> Assertion -> IO [Result]
cond' ctx result query@Assertion{ says=Claim{ conditions=[] }} =
let c = constraint . says $ query
in do
(_, cs) <- proofWithConstraint ctx c []
return $
makeCond
(ctx, result)
[]
cs
(flat . fact . says $ query)
cond' ctx result query =
let w = who query
c = constraint . says $ query
whoSays = asserts w
fs = conditions (says query)
aSaysFs = map whoSays fs
AC theAC = getAC ctx
--ac' = AC $ query `delete` theAC -- removes cond infinite loop -- no it doesnt
ac' = AC theAC
ctx' = ctx{theta=[], ac=ac'}
in do
ifStatements <- mapM (ctx' ||-) aSaysFs
let pfs = proofSets ifStatements
(ps, cs) <- unzip <$> proofsWithConstraint ctx pfs c
--traceIO ">>> QUERY"
--traceIO $ pShow query
--traceIO $ ">>> IFS"
--traceIO $ pShow ifStatements
--traceIO $ ">>> PFS"
--traceIO $ pShow pfs
--traceIO $ ">>> PS"
--traceIO $ pShow ps
--traceIO $ ">>> CS"
--traceIO $ pShow cs
--traceIO $ "<<< END"
return $
makeCond
(ctx,result)
ps
cs
(flat . fact . says $ query)
{- Find sets of proofs where a renaming in one proof does not contradict the
- renaming in any of the others
-}
proofSets :: PShow b => [[Proof b]] -> [[Proof b]]
proofSets [] = []
proofSets [x] = map (:[]) x
proofSets (ps:qs) =
let proofs = [ p:q
| p <- ps
, q <- proofSets qs
, not (interferent [p] q)
]
in proofs
proofsWithConstraint :: Context -> [[Proof Assertion]] -> C -> IO [([Proof Assertion], Proof C)]
proofsWithConstraint ctx pfs c = do
p <- mapM (proofWithConstraint ctx c) pfs
return [ (p', head c') | (p', c') <- p, not . null $ c' ]
proofWithConstraint :: Context -> C -> [Proof Assertion] -> IO ([Proof Assertion], [Proof C])
proofWithConstraint ctx c p = do
let θ = concatMap (theta.fst.conclusion) p
let c' = c `subAll` θ
if ground c'
then do
cp <- ctx ||- c'
return (p, cp)
else do
hPutStrLn stderr$ "@@@@ unground " ++ pShow c'
return (p,[])
cond :: Context -> Assertion -> Assertion -> IO [Result]
cond ctx result query =
let query' = simplify query
delta = fromJust $ result ==? query'
renamedQuery = subAll query delta
renamedResult = subAll result delta
in do
--traceIO $ "!delta " ++ pShow delta
cond' ctx{theta=delta} renamedResult renamedQuery
where
simplify q =
let says' = (says q){ conditions=[], constraint=Boolean True }
q' = q{ says=says' }
in q'
-- The Mysterious Can-Say Rule!
--
-- AC, oo |= A says B can-say D fact AC, D |= B says fact
-- ---------------------------------------------------------
-- AC, oo |= A says fact
canSay' :: Context -> Assertion -> Assertion -> Assertion -> IO [Result]
canSay' Context{d=Zero} _ _ _ = return []
canSay' ctx query canSayStm origCanSay =
let w = who query
f = fact . says $ query
cs = fact . says $ canSayStm
f' = what . verb $ cs
de = delegation . verb $ cs
b = subject cs
ctx' = ctx{theta=[]}
AC theAC = getAC ctx
ac' = AC $ origCanSay `delete` theAC -- to avoid infinite loop
in if f == f'
then do
del <- ctx' ||- delegates w b f de
ass <- ctx'{d=de, ac=ac'} ||- (b `asserts` f)
return $
makeCanSay
(ctx,query)
del
ass
else return []
canSay :: Context -> Assertion -> Assertion -> IO [Result]
canSay ctx result canSayStm =
let canSayStm' = simplify canSayStm
pRenaming = result ==? canSayStm'
delta = fromMaybe (error "can say statement failed to simplify") pRenaming
renamedCSS = subAll canSayStm delta
renamedResult = subAll result delta
in do
--traceIO $ "$RESULT: " ++ pShow result
--traceIO $ "$delta: " ++ pShow delta
canSay' ctx{theta=delta} renamedResult renamedCSS canSayStm
where
simplify q =
let f = what . verb . fact . says $ q
claim' = (says q){ fact = f, conditions=[], constraint=Boolean True }
in q{ says=claim' }
-- Sublime Can-act-as Rule
--
-- AC, D |= A says B can-act-as C AC, D |= A says C verbphrase
-- --------------------------------------------------------------
-- AC, D |= A says B verbphrase
canActAs' :: Context -> Assertion -> Assertion -> Assertion -> IO [Result]
canActAs' ctx query canActAsStm renamedQuery = do
delta <- ctx ||- canActAsStm
result <- ctx ||- renamedQuery
return $
makeCanActAs
(ctx,query)
delta
result
canActAs :: Context -> Assertion -> Assertion -> IO [Result]
canActAs ctx query canActAsStm =
let delta = whom . verb . fact . says $ canActAsStm
c = says query
f = fact c
renamedQuery = query{ says=c{ fact=f{ subject=delta } } }
in
canActAs' ctx query canActAsStm renamedQuery
asserts :: E -> Fact -> Assertion
a `asserts` f = Assertion { who=a
, says = Claim { fact=f
, conditions=[]
, constraint=Boolean True
}
}
delegates :: E -> E -> Fact -> D -> Assertion
delegates from to w level =
Assertion { who=from
, says = Claim { fact = Fact { subject = to
, verb = CanSay level w
}
, conditions=[]
, constraint=Boolean True
}
}
tryStated :: Context -> Assertion -> IO [Result]
tryStated ctx a =
let as = filter (isJust . (==? a)) (acs $ getAC ctx)
in case as of
[] -> return []
ps -> return $ map toStated ps
where
toStated p = let ts = fromJust $ p ==? a
in PStated (ctx{theta=ts}, p)
tryCond :: Context -> Assertion -> IO [Result]
tryCond ctx a =
let as = filter (isSpecific a) (acs (getAC ctx))
in do
candidates <- mapM (cond ctx a) as
return $ concat candidates
tryCanSay :: Context -> Assertion -> IO [Result]
tryCanSay ctx a =
let as = filter (isDelegation a) (acs (getAC ctx))
in do
candidates <- mapM (canSay ctx a) as
return $ concat candidates
tryCanActAs :: Context -> Assertion -> IO [Result]
tryCanActAs ctx a =
let as = filter (isRenaming a) (acs $ getAC ctx)
in do
candidates <- mapM (canActAs ctx a) as
return $ concat candidates
getSubstitutions :: Assertion -> [Assertion] -> [[Substitution]]
getSubstitutions _ [] = []
getSubstitutions a (b:bs) =
let sp = getSpecific a b
de = getDelegation a b
re = getRenaming a b
deltas = catMaybes [sp, de, re]
in deltas ++ getSubstitutions a bs
isSpecific :: Assertion -> Assertion -> Bool
isSpecific a = isJust . getSpecific a
getSpecific :: Assertion -> Assertion -> Maybe [Substitution]
getSpecific a b = (++) <$> who a ==? who b
<*> (fact.says$a) ==? (fact.says$b)
isDelegation :: Assertion -> Assertion -> Bool
isDelegation a = isJust . getDelegation a
getDelegation :: Assertion -> Assertion -> Maybe [Substitution]
getDelegation
Assertion{ who=a
, says=Claim{fact=f}
}
Assertion{ who=a'
, says=Claim{ fact=Fact{ verb=CanSay{what=f'} } }
}
= (++) <$> (a ==? a') <*> (f ==? f')
getDelegation _ _ = Nothing
isRenaming :: Assertion -> Assertion -> Bool
isRenaming a = isJust . getRenaming a
getRenaming :: Assertion -> Assertion -> Maybe [Substitution]
getRenaming
Assertion{ who=a
, says=Claim{ fact=Fact{ subject=b } }
}
Assertion{ who=a'
, says=Claim{ fact=Fact{ subject=b'
, verb=CanActAs{ whom=_ }
}
}
}
= (++) <$> (a ==? a') <*> (b ==? b')
getRenaming _ _ = Nothing
| bogwonch/SecPAL | src/Logic/SecPAL/Evaluable.hs | gpl-3.0 | 12,908 | 0 | 17 | 4,167 | 4,086 | 2,123 | 1,963 | 301 | 4 |
{-# LANGUAGE RecordWildCards, DeriveDataTypeable, FlexibleInstances, ScopedTypeVariables #-}
{-|
Multi-column balance reports, used by the balance command.
-}
module Hledger.Reports.MultiBalanceReports (
MultiBalanceReport(..),
MultiBalanceReportRow,
multiBalanceReport
-- -- * Tests
-- tests_Hledger_Reports_MultiBalanceReport
)
where
import Data.List
import Data.Maybe
import Data.Ord
import Safe
-- import Test.HUnit
import Hledger.Data
import Hledger.Query
import Hledger.Utils
import Hledger.Reports.ReportOptions
import Hledger.Reports.BalanceReport
-- | A multi balance report is a balance report with one or more columns. It has:
--
-- 1. a list of each column's date span
--
-- 2. a list of rows, each containing a renderable account name and the amounts to show in each column
--
-- 3. a list of each column's final total
--
-- The meaning of the amounts depends on the type of multi balance
-- report, of which there are three: periodic, cumulative and historical
-- (see 'BalanceType' and "Hledger.Cli.Balance").
newtype MultiBalanceReport = MultiBalanceReport ([DateSpan]
,[MultiBalanceReportRow]
,MultiBalanceTotalsRow
)
-- | A row in a multi balance report has
--
-- * An account name, with rendering hints
--
-- * A list of amounts to be shown in each of the report's columns.
--
-- * The total of the row amounts.
--
-- * The average of the row amounts.
type MultiBalanceReportRow = (RenderableAccountName, [MixedAmount], MixedAmount, MixedAmount)
type MultiBalanceTotalsRow = ([MixedAmount], MixedAmount, MixedAmount)
instance Show MultiBalanceReport where
-- use ppShow to break long lists onto multiple lines
-- we add some bogus extra shows here to help ppShow parse the output
-- and wrap tuples and lists properly
show (MultiBalanceReport (spans, items, totals)) =
"MultiBalanceReport (ignore extra quotes):\n" ++ ppShow (show spans, map show items, totals)
-- type alias just to remind us which AccountNames might be depth-clipped, below.
type ClippedAccountName = AccountName
-- | Generate a multicolumn balance report for the matched accounts,
-- showing the change of balance, accumulated balance, or historical balance
-- in each of the specified periods.
multiBalanceReport :: ReportOpts -> Query -> Journal -> MultiBalanceReport
multiBalanceReport opts q j = MultiBalanceReport (displayspans, items, totalsrow)
where
symq = dbg1 "symq" $ filterQuery queryIsSym $ dbg1 "requested q" q
depthq = dbg1 "depthq" $ filterQuery queryIsDepth q
depth = queryDepth depthq
depthless = dbg1 "depthless" . filterQuery (not . queryIsDepth)
datelessq = dbg1 "datelessq" $ filterQuery (not . queryIsDateOrDate2) q
dateqcons = if date2_ opts then Date2 else Date
precedingq = dbg1 "precedingq" $ And [datelessq, dateqcons $ DateSpan Nothing (spanStart reportspan)]
requestedspan = dbg1 "requestedspan" $ queryDateSpan (date2_ opts) q -- span specified by -b/-e/-p options and query args
requestedspan' = dbg1 "requestedspan'" $ requestedspan `spanDefaultsFrom` journalDateSpan (date2_ opts) j -- if open-ended, close it using the journal's end dates
intervalspans = dbg1 "intervalspans" $ splitSpan (intervalFromOpts opts) requestedspan' -- interval spans enclosing it
reportspan = dbg1 "reportspan" $ DateSpan (maybe Nothing spanStart $ headMay intervalspans) -- the requested span enlarged to a whole number of intervals
(maybe Nothing spanEnd $ lastMay intervalspans)
newdatesq = dbg1 "newdateq" $ dateqcons reportspan
reportq = dbg1 "reportq" $ depthless $ And [datelessq, newdatesq] -- user's query enlarged to whole intervals and with no depth limit
ps :: [Posting] =
dbg1 "ps" $
journalPostings $
filterJournalAmounts symq $ -- remove amount parts excluded by cur:
filterJournalPostings reportq $ -- remove postings not matched by (adjusted) query
journalSelectingAmountFromOpts opts j
displayspans = dbg1 "displayspans" $ splitSpan (intervalFromOpts opts) displayspan
where
displayspan
| empty_ opts = dbg1 "displayspan (-E)" $ reportspan -- all the requested intervals
| otherwise = dbg1 "displayspan" $ requestedspan `spanIntersect` matchedspan -- exclude leading/trailing empty intervals
matchedspan = dbg1 "matchedspan" $ postingsDateSpan' (whichDateFromOpts opts) ps
psPerSpan :: [[Posting]] =
dbg1 "psPerSpan" $
[filter (isPostingInDateSpan' (whichDateFromOpts opts) s) ps | s <- displayspans]
postedAcctBalChangesPerSpan :: [[(ClippedAccountName, MixedAmount)]] =
dbg1 "postedAcctBalChangesPerSpan" $
map postingAcctBals psPerSpan
where
postingAcctBals :: [Posting] -> [(ClippedAccountName, MixedAmount)]
postingAcctBals ps = [(aname a, (if tree_ opts then aibalance else aebalance) a) | a <- as]
where
as = depthLimit $
(if tree_ opts then id else filter ((>0).anumpostings)) $
drop 1 $ accountsFromPostings ps
depthLimit
| tree_ opts = filter ((depthq `matchesAccount`).aname) -- exclude deeper balances
| otherwise = clipAccountsAndAggregate depth -- aggregate deeper balances at the depth limit
postedAccts :: [AccountName] = dbg1 "postedAccts" $ sort $ accountNamesFromPostings ps
-- starting balances and accounts from transactions before the report start date
startacctbals = dbg1 "startacctbals" $ map (\((a,_,_),b) -> (a,b)) startbalanceitems
where
(startbalanceitems,_) = dbg1 "starting balance report" $ balanceReport opts' precedingq j
where
opts' | tree_ opts = opts{no_elide_=True}
| otherwise = opts{accountlistmode_=ALFlat}
startingBalanceFor a = fromMaybe nullmixedamt $ lookup a startacctbals
startAccts = dbg1 "startAccts" $ map fst startacctbals
displayedAccts :: [ClippedAccountName] =
dbg1 "displayedAccts" $
(if tree_ opts then expandAccountNames else id) $
nub $ map (clipOrEllipsifyAccountName depth) $
if empty_ opts then nub $ sort $ startAccts ++ postedAccts else postedAccts
acctBalChangesPerSpan :: [[(ClippedAccountName, MixedAmount)]] =
dbg1 "acctBalChangesPerSpan" $
[sortBy (comparing fst) $ unionBy (\(a,_) (a',_) -> a == a') postedacctbals zeroes
| postedacctbals <- postedAcctBalChangesPerSpan]
where zeroes = [(a, nullmixedamt) | a <- displayedAccts]
acctBalChanges :: [(ClippedAccountName, [MixedAmount])] =
dbg1 "acctBalChanges" $
[(a, map snd abs) | abs@((a,_):_) <- transpose acctBalChangesPerSpan] -- never null, or used when null...
items :: [MultiBalanceReportRow] =
dbg1 "items" $
[((a, accountLeafName a, accountNameLevel a), displayedBals, rowtot, rowavg)
| (a,changes) <- acctBalChanges
, let displayedBals = case balancetype_ opts of
HistoricalBalance -> drop 1 $ scanl (+) (startingBalanceFor a) changes
CumulativeBalance -> drop 1 $ scanl (+) nullmixedamt changes
_ -> changes
, let rowtot = sum displayedBals
, let rowavg = averageMixedAmounts displayedBals
, empty_ opts || depth == 0 || any (not . isZeroMixedAmount) displayedBals
]
totals :: [MixedAmount] =
-- dbg1 "totals" $
map sum balsbycol
where
balsbycol = transpose [bs | ((a,_,_),bs,_,_) <- items, not (tree_ opts) || a `elem` highestlevelaccts]
highestlevelaccts =
dbg1 "highestlevelaccts" $
[a | a <- displayedAccts, not $ any (`elem` displayedAccts) $ init $ expandAccountName a]
totalsrow :: MultiBalanceTotalsRow =
dbg1 "totalsrow" $
(totals, sum totals, averageMixedAmounts totals)
dbg1 s = let p = "multiBalanceReport" in Hledger.Utils.dbg1 (p++" "++s) -- add prefix in this function's debug output
-- dbg1 = const id -- exclude this function from debug output
| kmels/hledger | hledger-lib/Hledger/Reports/MultiBalanceReports.hs | gpl-3.0 | 8,756 | 0 | 19 | 2,461 | 1,803 | 971 | 832 | 105 | 8 |
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE Rank2Types #-}
module Shakugan.Util where
import Control.Lens
import qualified Data.Map as M
import qualified Data.Vector as V
import FreeGame
import FreeGame.Class (keyStates)
import Shakugan.Types
-- | Gets next sprite in the animation, bumping the meta-data.
animate ∷ Double -- ^ How many times a second should the whole thing animate?
→ Getter Resources CharacterSprites
→ Lens' CharacterSprites Sprite
→ GameLoop Bitmap
animate t f g = do
cs ← use $ resources.f
case cs ^. g of
Sprite v s d → do
-- framerate / t = how many times faster/slower we need to go to play
-- the whole animation once every 60 rendering frames
-- framerate / t / length v = slice of time we have for each bitmap
fps ← fromIntegral <$> use targetFramerate
let timeForFrame = fps / t / fromIntegral (V.length v)
(resources.charSprites .=) $ cs & g .~
if d < floor timeForFrame
then Sprite v s (d + 1)
else if s + 1 >= V.length v
then Sprite v 0 0
else Sprite v (s + 1) 0
runBitmap (1 / timeForFrame) $ v V.! s
-- | Updates player position based on how many rendering frames it
-- visible and how much we need to have moved by the end of this time.
-- Returns the frame that needs to be currently displayed.
--
-- Moving a little bit every rendered frame rather than moving a
-- big chunk at the start of a single sprite makes movement much
-- smoother.
runBitmap ∷ Double -- ^ How big of a of the position we should use.
→ MovingBitmap -- ^ Bitmap to use
→ GameLoop Bitmap
runBitmap s (MovingBitmap b pd) =
field.player.position %= (^+^ pd ^* s) >> return b
pressedKeys ∷ GameLoop [Key]
pressedKeys = M.keys . M.filter id <$> keyStates
-- | Draws a series of 'Bitmap's from 'Sprite' either horizontally or
-- vertically including the offsets of the sprites.
drawSeries ∷ Bool -- ^ True = horizontal, False = vertical
→ Lens' GameFrame Sprite
→ GameLoop ()
drawSeries way l = do
Sprite v _ _ ← use l
let bms = zip [0 .. ] $ V.toList v
fv p = if way then V2 (100 + 80 * p) 100 else V2 100 (100 + 110 * p)
f (x, MovingBitmap b offs) = translate (offs ^+^ fv x) $ bitmap b
mapM_ f bms
| Fuuzetsu/shakugan-no-haskell | src/Shakugan/Util.hs | gpl-3.0 | 2,389 | 0 | 19 | 609 | 560 | 292 | 268 | -1 | -1 |
module Language.SMTLib2.Strategy where
data Tactic
= Skip
| AndThen [Tactic]
| OrElse [Tactic]
| ParOr [Tactic]
| ParThen Tactic Tactic
| TryFor Tactic Integer
| If (Probe Bool) Tactic Tactic
| FailIf (Probe Bool)
| forall p. Show p => UsingParams (BuiltInTactic p) [p]
data Probe a where
ProbeBoolConst :: Bool -> Probe Bool
ProbeIntConst :: Integer -> Probe Integer
ProbeAnd :: [Probe Bool] -> Probe Bool
ProbeOr :: [Probe Bool] -> Probe Bool
ProbeNot :: Probe Bool -> Probe Bool
ProbeEq :: Show a => Probe a -> Probe a -> Probe Bool
ProbeGt :: Probe Integer -> Probe Integer -> Probe Bool
ProbeGe :: Probe Integer -> Probe Integer -> Probe Bool
ProbeLt :: Probe Integer -> Probe Integer -> Probe Bool
ProbeLe :: Probe Integer -> Probe Integer -> Probe Bool
IsPB :: Probe Bool
ArithMaxDeg :: Probe Integer
ArithAvgDeg :: Probe Integer
ArithMaxBW :: Probe Integer
ArithAvgBW :: Probe Integer
IsQFLIA :: Probe Bool
IsQFLRA :: Probe Bool
IsQFLIRA :: Probe Bool
IsILP :: Probe Bool
IsQFNIA :: Probe Bool
IsQFNRA :: Probe Bool
IsNIA :: Probe Bool
IsNRA :: Probe Bool
IsUnbounded :: Probe Bool
Memory :: Probe Integer
Depth :: Probe Integer
Size :: Probe Integer
NumExprs :: Probe Integer
NumConsts :: Probe Integer
NumBoolConsts :: Probe Integer
NumArithConsts :: Probe Integer
NumBVConsts :: Probe Integer
ProduceProofs :: Probe Bool
ProduceModel :: Probe Bool
ProduceUnsatCores :: Probe Bool
HasPatterns :: Probe Bool
IsPropositional :: Probe Bool
IsQFBV :: Probe Bool
IsQFBVEQ :: Probe Bool
data AnyPar = ParBool Bool
| ParInt Integer
| ParDouble Double
deriving Show
data BuiltInTactic p where
QFLRATactic :: BuiltInTactic QFLRATacticP
CustomTactic :: String -> BuiltInTactic (String,AnyPar)
data QFLRATacticP
= ArithBranchCutRatio Integer
deriving Show
instance Show Tactic where
showsPrec _ Skip = showString "Skip"
showsPrec p (AndThen ts) = showParen (p>10) (showString "AndThen " .
showsPrec 0 ts)
showsPrec p (OrElse ts) = showParen (p>10) (showString "OrElse " .
showsPrec 0 ts)
showsPrec p (ParOr ts) = showParen (p>10) (showString "ParOr " .
showsPrec 0 ts)
showsPrec p (ParThen t1 t2) = showParen (p>10) (showString "ParThen " .
showsPrec 11 t1 .
showChar ' ' .
showsPrec 11 t2)
showsPrec p (TryFor t n) = showParen (p>10) (showString "TryFor " .
showsPrec 11 t .
showChar ' ' .
showsPrec 11 n)
showsPrec p (If c t1 t2) = showParen (p>10) (showString "If " .
showsPrec 11 c .
showChar ' ' .
showsPrec 11 t1 .
showChar ' ' .
showsPrec 11 t2)
showsPrec p (FailIf c) = showParen (p>10) (showString "FailIf " .
showsPrec 11 c)
showsPrec p (UsingParams t []) = showsPrec p t
showsPrec p (UsingParams t pars) = showParen (p>10) (showString "UsingParams " .
showsPrec 11 t .
showChar ' ' .
showsPrec 11 pars)
instance Show (BuiltInTactic p) where
showsPrec _ QFLRATactic = showString "QFLRATactic"
showsPrec _ (CustomTactic name) = showString name
instance Show a => Show (Probe a) where
showsPrec p (ProbeBoolConst c) = showParen (p>10) (showString "ProbeBoolConst " .
showsPrec 11 c)
showsPrec p (ProbeIntConst c) = showParen (p>10) (showString "ProbeIntConst " .
showsPrec 11 c)
showsPrec p (ProbeAnd ps) = showParen (p>10) (showString "ProbeAnd " .
showsPrec 11 ps)
showsPrec p (ProbeOr ps) = showParen (p>10) (showString "ProbeOr " .
showsPrec 11 ps)
showsPrec p (ProbeNot c) = showParen (p>10) (showString "ProbeNot " .
showsPrec 11 c)
showsPrec p (ProbeEq p1 p2) = showParen (p>10) (showString "ProbeEq " .
showsPrec 11 p1 .
showChar ' ' .
showsPrec 11 p2)
showsPrec p (ProbeGe p1 p2) = showParen (p>10) (showString "ProbeGe " .
showsPrec 11 p1 .
showChar ' ' .
showsPrec 11 p2)
showsPrec p (ProbeGt p1 p2) = showParen (p>10) (showString "ProbeGt " .
showsPrec 11 p1 .
showChar ' ' .
showsPrec 11 p2)
showsPrec p (ProbeLe p1 p2) = showParen (p>10) (showString "ProbeLe " .
showsPrec 11 p1 .
showChar ' ' .
showsPrec 11 p2)
showsPrec p (ProbeLt p1 p2) = showParen (p>10) (showString "ProbeLt " .
showsPrec 11 p1 .
showChar ' ' .
showsPrec 11 p2)
showsPrec _ IsPB = showString "IsPB"
showsPrec _ ArithMaxDeg = showString "ArithMaxDeg"
showsPrec _ ArithAvgDeg = showString "ArithAvgDeg"
showsPrec _ ArithMaxBW = showString "ArithMaxBW"
showsPrec _ ArithAvgBW = showString "ArithAvgBW"
showsPrec _ IsQFLIA = showString "IsQFLIA"
showsPrec _ IsQFLRA = showString "IsQFLRA"
showsPrec _ IsQFLIRA = showString "IsQFLIRA"
showsPrec _ IsILP = showString "IsILP"
showsPrec _ IsQFNIA = showString "IsQFNIA"
showsPrec _ IsQFNRA = showString "IsQFNRA"
showsPrec _ IsNIA = showString "IsNIA"
showsPrec _ IsNRA = showString "IsNRA"
showsPrec _ IsUnbounded = showString "IsUnbounded"
showsPrec _ Memory = showString "Memory"
showsPrec _ Depth = showString "Depth"
showsPrec _ Size = showString "Size"
showsPrec _ NumExprs = showString "NumExprs"
showsPrec _ NumConsts = showString "NumConsts"
showsPrec _ NumBoolConsts = showString "NumBoolConsts"
showsPrec _ NumArithConsts = showString "NumArithConsts"
showsPrec _ NumBVConsts = showString "NumBVConsts"
showsPrec _ ProduceProofs = showString "ProduceProofs"
showsPrec _ ProduceModel = showString "ProduceModel"
showsPrec _ ProduceUnsatCores = showString "ProduceUnsatCores"
showsPrec _ HasPatterns = showString "HasPatterns"
showsPrec _ IsPropositional = showString "IsPropositional"
showsPrec _ IsQFBV = showString "IsQFBV"
showsPrec _ IsQFBVEQ = showString "IsQFBVEQ"
| hguenther/smtlib2 | Language/SMTLib2/Strategy.hs | gpl-3.0 | 7,439 | 0 | 13 | 2,967 | 2,044 | 995 | 1,049 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Games.Applications.Verify
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Verifies the auth token provided with this request is for the
-- application with the specified ID, and returns the ID of the player it
-- was granted for.
--
-- /See:/ <https://developers.google.com/games/ Google Play Game Services Reference> for @games.applications.verify@.
module Network.Google.Resource.Games.Applications.Verify
(
-- * REST Resource
ApplicationsVerifyResource
-- * Creating a Request
, applicationsVerify
, ApplicationsVerify
-- * Request Lenses
, avXgafv
, avUploadProtocol
, avAccessToken
, avUploadType
, avApplicationId
, avCallback
) where
import Network.Google.Games.Types
import Network.Google.Prelude
-- | A resource alias for @games.applications.verify@ method which the
-- 'ApplicationsVerify' request conforms to.
type ApplicationsVerifyResource =
"games" :>
"v1" :>
"applications" :>
Capture "applicationId" Text :>
"verify" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ApplicationVerifyResponse
-- | Verifies the auth token provided with this request is for the
-- application with the specified ID, and returns the ID of the player it
-- was granted for.
--
-- /See:/ 'applicationsVerify' smart constructor.
data ApplicationsVerify =
ApplicationsVerify'
{ _avXgafv :: !(Maybe Xgafv)
, _avUploadProtocol :: !(Maybe Text)
, _avAccessToken :: !(Maybe Text)
, _avUploadType :: !(Maybe Text)
, _avApplicationId :: !Text
, _avCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ApplicationsVerify' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'avXgafv'
--
-- * 'avUploadProtocol'
--
-- * 'avAccessToken'
--
-- * 'avUploadType'
--
-- * 'avApplicationId'
--
-- * 'avCallback'
applicationsVerify
:: Text -- ^ 'avApplicationId'
-> ApplicationsVerify
applicationsVerify pAvApplicationId_ =
ApplicationsVerify'
{ _avXgafv = Nothing
, _avUploadProtocol = Nothing
, _avAccessToken = Nothing
, _avUploadType = Nothing
, _avApplicationId = pAvApplicationId_
, _avCallback = Nothing
}
-- | V1 error format.
avXgafv :: Lens' ApplicationsVerify (Maybe Xgafv)
avXgafv = lens _avXgafv (\ s a -> s{_avXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
avUploadProtocol :: Lens' ApplicationsVerify (Maybe Text)
avUploadProtocol
= lens _avUploadProtocol
(\ s a -> s{_avUploadProtocol = a})
-- | OAuth access token.
avAccessToken :: Lens' ApplicationsVerify (Maybe Text)
avAccessToken
= lens _avAccessToken
(\ s a -> s{_avAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
avUploadType :: Lens' ApplicationsVerify (Maybe Text)
avUploadType
= lens _avUploadType (\ s a -> s{_avUploadType = a})
-- | The application ID from the Google Play developer console.
avApplicationId :: Lens' ApplicationsVerify Text
avApplicationId
= lens _avApplicationId
(\ s a -> s{_avApplicationId = a})
-- | JSONP
avCallback :: Lens' ApplicationsVerify (Maybe Text)
avCallback
= lens _avCallback (\ s a -> s{_avCallback = a})
instance GoogleRequest ApplicationsVerify where
type Rs ApplicationsVerify =
ApplicationVerifyResponse
type Scopes ApplicationsVerify =
'["https://www.googleapis.com/auth/games"]
requestClient ApplicationsVerify'{..}
= go _avApplicationId _avXgafv _avUploadProtocol
_avAccessToken
_avUploadType
_avCallback
(Just AltJSON)
gamesService
where go
= buildClient
(Proxy :: Proxy ApplicationsVerifyResource)
mempty
| brendanhay/gogol | gogol-games/gen/Network/Google/Resource/Games/Applications/Verify.hs | mpl-2.0 | 4,919 | 0 | 18 | 1,191 | 710 | 415 | 295 | 106 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Collections.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a collection from your Merchant Center account.
--
-- /See:/ <https://developers.google.com/shopping-content/v2/ Content API for Shopping Reference> for @content.collections.delete@.
module Network.Google.Resource.Content.Collections.Delete
(
-- * REST Resource
CollectionsDeleteResource
-- * Creating a Request
, collectionsDelete
, CollectionsDelete
-- * Request Lenses
, cdXgafv
, cdMerchantId
, cdUploadProtocol
, cdAccessToken
, cdCollectionId
, cdUploadType
, cdCallback
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.collections.delete@ method which the
-- 'CollectionsDelete' request conforms to.
type CollectionsDeleteResource =
"content" :>
"v2.1" :>
Capture "merchantId" (Textual Int64) :>
"collections" :>
Capture "collectionId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes a collection from your Merchant Center account.
--
-- /See:/ 'collectionsDelete' smart constructor.
data CollectionsDelete =
CollectionsDelete'
{ _cdXgafv :: !(Maybe Xgafv)
, _cdMerchantId :: !(Textual Int64)
, _cdUploadProtocol :: !(Maybe Text)
, _cdAccessToken :: !(Maybe Text)
, _cdCollectionId :: !Text
, _cdUploadType :: !(Maybe Text)
, _cdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CollectionsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cdXgafv'
--
-- * 'cdMerchantId'
--
-- * 'cdUploadProtocol'
--
-- * 'cdAccessToken'
--
-- * 'cdCollectionId'
--
-- * 'cdUploadType'
--
-- * 'cdCallback'
collectionsDelete
:: Int64 -- ^ 'cdMerchantId'
-> Text -- ^ 'cdCollectionId'
-> CollectionsDelete
collectionsDelete pCdMerchantId_ pCdCollectionId_ =
CollectionsDelete'
{ _cdXgafv = Nothing
, _cdMerchantId = _Coerce # pCdMerchantId_
, _cdUploadProtocol = Nothing
, _cdAccessToken = Nothing
, _cdCollectionId = pCdCollectionId_
, _cdUploadType = Nothing
, _cdCallback = Nothing
}
-- | V1 error format.
cdXgafv :: Lens' CollectionsDelete (Maybe Xgafv)
cdXgafv = lens _cdXgafv (\ s a -> s{_cdXgafv = a})
-- | Required. The ID of the account that contains the collection. This
-- account cannot be a multi-client account.
cdMerchantId :: Lens' CollectionsDelete Int64
cdMerchantId
= lens _cdMerchantId (\ s a -> s{_cdMerchantId = a})
. _Coerce
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cdUploadProtocol :: Lens' CollectionsDelete (Maybe Text)
cdUploadProtocol
= lens _cdUploadProtocol
(\ s a -> s{_cdUploadProtocol = a})
-- | OAuth access token.
cdAccessToken :: Lens' CollectionsDelete (Maybe Text)
cdAccessToken
= lens _cdAccessToken
(\ s a -> s{_cdAccessToken = a})
-- | Required. The collectionId of the collection. CollectionId is the same
-- as the REST ID of the collection.
cdCollectionId :: Lens' CollectionsDelete Text
cdCollectionId
= lens _cdCollectionId
(\ s a -> s{_cdCollectionId = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cdUploadType :: Lens' CollectionsDelete (Maybe Text)
cdUploadType
= lens _cdUploadType (\ s a -> s{_cdUploadType = a})
-- | JSONP
cdCallback :: Lens' CollectionsDelete (Maybe Text)
cdCallback
= lens _cdCallback (\ s a -> s{_cdCallback = a})
instance GoogleRequest CollectionsDelete where
type Rs CollectionsDelete = ()
type Scopes CollectionsDelete =
'["https://www.googleapis.com/auth/content"]
requestClient CollectionsDelete'{..}
= go _cdMerchantId _cdCollectionId _cdXgafv
_cdUploadProtocol
_cdAccessToken
_cdUploadType
_cdCallback
(Just AltJSON)
shoppingContentService
where go
= buildClient
(Proxy :: Proxy CollectionsDeleteResource)
mempty
| brendanhay/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/Collections/Delete.hs | mpl-2.0 | 5,171 | 0 | 18 | 1,222 | 803 | 466 | 337 | 115 | 1 |
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, ScopedTypeVariables, TypeSynonymInstances, FlexibleInstances, RecordWildCards #-}
module Store.Config
( Path(..)
, pathKey
, keyPath
, Value(..)
, ConfigMap
, Config
, configMap
, configPath
, load
, Configurable(..)
, (!)
) where
import Prelude hiding (lookup)
import Control.Applicative ((<|>))
import Control.Arrow (first)
import Control.Exception (Exception, throw)
import Control.Monad ((<=<))
import qualified Data.Aeson.Types as JSON
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import Data.Foldable (fold)
import qualified Data.HashMap.Strict as HM
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import Data.String (IsString(..))
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Typeable (Typeable, TypeRep, typeRep)
import qualified Data.Vector as V
import qualified Text.Parsec as P
import qualified Text.Parsec.ByteString.Lazy as P
import qualified Text.Parsec.Token as PT
import JSON ()
import Ops
type Key = BS.ByteString
newtype Path = Path { pathList :: [Key] } deriving (Monoid)
pathKey :: Path -> Key
pathKey (Path p) = BS.intercalate (BSC.singleton '.') p
keyPath :: Key -> Path
keyPath = Path . BSC.split '.'
pathSnoc :: Path -> Key -> Path
pathSnoc (Path l) k = Path (l ++ [k])
instance Show Path where
showsPrec p = showsPrec p . pathKey
instance IsString Path where
fromString = keyPath . fromString
data ConfigError
= ParseError P.ParseError
| ConflictError
{ errorPath :: Path
, errorValue1, errorValue2 :: Value
}
| ValueError
{ errorPath :: Path
, errorValue :: Value
, errorNeeded :: TypeRep
}
deriving (Typeable, Show)
instance Exception ConfigError
data Value
= Empty
| Boolean !Bool
| Integer !Integer
| String !BS.ByteString
| List [Value]
| Sub !ConfigMap
deriving (Typeable, Eq, Show)
type ConfigMap = HM.HashMap BS.ByteString Value
data Config = Config
{ configPath :: !Path
, configMap :: !ConfigMap
} deriving (Typeable) -- , Show)
topConfig :: ConfigMap -> Config
topConfig = Config (Path [])
unionValue :: Path -> Value -> Value -> Value
unionValue _ Empty v = v
unionValue _ v Empty = v
unionValue p (Sub m1) (Sub m2) = Sub $ unionConfig p m1 m2
unionValue p v1 v2
| v1 == v2 = v1
| otherwise = throw $ ConflictError{ errorPath = p, errorValue1 = v1, errorValue2 = v2 }
unionConfig :: Path -> ConfigMap -> ConfigMap -> ConfigMap
unionConfig p = HM.foldrWithKey $ \k -> HM.insertWith (flip $ unionValue (pathSnoc p k)) k
-- |Merge two configs, throwing 'ConflictError' on conflicts
instance Monoid Config where
mempty = topConfig HM.empty
Config (Path p1) m1 `mappend` Config (Path p2) m2 = Config p m where
(p', (p1', p2')) = cpfx p1 p2
p = Path p'
m = unionConfig p (nest m1 p1') (nest m2 p2')
cpfx (a:al) (b:bl) | a == b = first (a :) $ cpfx al bl
cpfx al bl = ([], (al, bl))
nest = foldr (\k -> HM.singleton k . Sub)
lookup :: Path -> ConfigMap -> Value
lookup (Path []) m = Sub m
lookup (Path [k]) m | Just v <- HM.lookup k m = v
lookup (Path (k:l)) m | Just (Sub km) <- HM.lookup k m = lookup (Path l) km
lookup _ _ = Empty
parser :: P.Parser ConfigMap
parser = whiteSpace *> block mempty HM.empty <* P.eof where
block p m = (block p =<< pair p m) <|> return m
pair p m = do
ks <- identifier P.<?> "key"
let k = BSC.pack ks
kp = pathSnoc p k
km <- case HM.lookupDefault Empty k m of
Empty -> return Nothing
Sub km -> return $ Just km
_ -> fail $ "Duplicate key value: " ++ show kp
kv <- lexeme dot *> (Sub <$> pair kp (fold km)) <|> rhs kp km
return $ HM.insert k kv m
rhs p Nothing = sub p HM.empty <|>
lexeme (P.char '=') *> val
rhs p (Just m) = sub p m
sub p m = Sub <$> braces (block p m)
val = P.choice
[ Boolean True <$ reserved "true"
, Boolean False <$ reserved "false"
, Integer <$> integer
, String . BSC.pack <$> stringLiteral
, List <$> brackets (commaSep val)
] P.<?> "value"
PT.TokenParser{..} = PT.makeTokenParser PT.LanguageDef
{ PT.commentStart = ""
, PT.commentEnd = ""
, PT.commentLine = "#"
, PT.nestedComments = False
, PT.identStart = P.letter
, PT.identLetter = P.alphaNum <|> P.oneOf "-_"
, PT.opStart = P.unexpected "operator"
, PT.opLetter = P.unexpected "operator"
, PT.reservedNames = []
, PT.reservedOpNames = ["="]
, PT.caseSensitive = True
}
load :: FilePath -> IO Config
load f = either (throw . ParseError) (return . topConfig) =<< P.parseFromFile parser f
class Typeable a => Configurable a where
get :: Path -> Config -> a
get p (Config cp m) = fromMaybe (throw ValueError{ errorPath = cp <> p, errorValue = v, errorNeeded = typeRep r}) r where
v = lookup p m
r = config v
config :: Value -> Maybe a
instance Configurable Value where
get p (Config _ m) = lookup p m
config = Just
instance Configurable ConfigMap where
config (Sub m) = Just m
config Empty = Just HM.empty
config _ = Nothing
instance Configurable Config where
get p c = Config (configPath c <> p) $ get p c
config v = topConfig <$> config v
instance Configurable a => Configurable (Maybe a) where
config Empty = Just Nothing
config v = Just <$> config v
instance Configurable Bool where
config (Boolean b) = Just b
config _ = Nothing
instance Configurable Integer where
config (Integer i) = Just i
config _ = Nothing
instance Configurable BS.ByteString where
config (String s) = Just s
config _ = Nothing
instance {-# OVERLAPPABLE #-} Configurable a => Configurable [a] where
config (List l) = mapM config l
config _ = Nothing
instance Configurable T.Text where
config = rightJust . TE.decodeUtf8' <=< config
instance {-# OVERLAPPING #-} Configurable String where
config v = BSC.unpack <$> config v
configBoundedInt :: forall a . (Integral a, Bounded a) => Value -> Maybe a
configBoundedInt = f <=< config where
f i = (i >= toInteger (minBound :: a) && i <= toInteger (maxBound :: a)) `thenUse` fromInteger i
instance Configurable Int where
config = configBoundedInt
infixl 9 !
(!) :: Configurable a => Config -> Path -> a
(!) = flip get
instance JSON.ToJSON Config where
toJSON = JSON.toJSON . configMap
toEncoding = JSON.toEncoding . configMap
-- instance JSON.ToJSON ConfigMap where
-- toJSON = JSON.object . map (TE.decodeUtf8 *** JSON.toJSON) . HM.toList
-- toEncoding = JSON.pairs . HM.foldrWithKey (\k v -> (TE.decodeUtf8 k JSON..= v <>)) mempty
instance JSON.ToJSON Value where
toJSON Empty = JSON.Null
toJSON (Boolean b) = JSON.Bool b
toJSON (String s) = JSON.String $ TE.decodeUtf8 s
toJSON (Integer i) = JSON.Number $ fromInteger i
toJSON (List l) = JSON.Array $ V.fromList $ map JSON.toJSON l
toJSON (Sub c) = JSON.toJSON c
toEncoding (List l) = JSON.foldable l
toEncoding (Sub c) = JSON.toEncoding c
toEncoding v = JSON.toEncoding $ JSON.toJSON v
| databrary/databrary | src/Store/Config.hs | agpl-3.0 | 7,023 | 0 | 16 | 1,511 | 2,650 | 1,399 | 1,251 | -1 | -1 |
module View.Html
( unsafeBuilder
, lazyByteStringHtml
, byteStringHtml
, byteStringValue
, builderValue
, actionLink
, Query
, actionValue
, actionForm
, (!?)
) where
import qualified Data.ByteString as BS
import qualified Data.ByteString.Builder as BSB
import qualified Data.ByteString.Lazy as BSL
import Network.HTTP.Types (Query, QueryLike(..))
import qualified Text.Blaze.Internal as Markup
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as HA
import qualified Web.Route.Invertible as R
import Blaze.ByteString.Builder.Html.Word (fromHtmlEscapedByteString, fromHtmlEscapedLazyByteString)
import Action.Route
import HTTP.Route
import {-# SOURCE #-} Controller.Angular
unsafeBuilder :: BSB.Builder -> H.Markup
unsafeBuilder = H.unsafeLazyByteString . BSB.toLazyByteString
lazyByteStringHtml :: BSL.ByteString -> H.Markup
lazyByteStringHtml = unsafeBuilder . fromHtmlEscapedLazyByteString
byteStringHtml :: BS.ByteString -> H.Markup
byteStringHtml = unsafeBuilder . fromHtmlEscapedByteString
-- builderHtml :: BSB.Builder -> H.Markup
-- builderHtml = lazyByteStringHtml . BSB.toLazyByteString
unsafeBuilderValue :: BSB.Builder -> H.AttributeValue
unsafeBuilderValue = H.unsafeLazyByteStringValue . BSB.toLazyByteString
lazyByteStringValue :: BSL.ByteString -> H.AttributeValue
lazyByteStringValue = unsafeBuilderValue . fromHtmlEscapedLazyByteString
byteStringValue :: BS.ByteString -> H.AttributeValue
byteStringValue = unsafeBuilderValue . fromHtmlEscapedByteString
builderValue :: BSB.Builder -> H.AttributeValue
builderValue = lazyByteStringValue . BSB.toLazyByteString
actionValue :: QueryLike q => R.RouteAction r a -> r -> q -> H.AttributeValue
actionValue r a q = builderValue $ actionURL Nothing r a $ toQuery q
actionLink :: QueryLike q => R.RouteAction r a -> r -> q -> H.Attribute
actionLink r a = HA.href . actionValue r a
actionForm :: Route r a -> a -> JSOpt -> H.Html -> H.Html
actionForm r a j = H.form
H.! HA.method (H.unsafeByteStringValue $ R.renderParameter $ R.requestMethod rr)
H.!? (not $ BS.null $ R.requestContentType rr, HA.enctype $ byteStringValue $ R.requestContentType rr)
H.! HA.action (builderValue $ routeURL Nothing rr (toQuery j))
where rr = R.requestActionRoute r a
(!?) :: Markup.Attributable h => h -> Maybe H.Attribute -> h
h !? Nothing = h
h !? (Just a) = h H.! a
| databrary/databrary | src/View/Html.hs | agpl-3.0 | 2,389 | 0 | 12 | 334 | 661 | 367 | 294 | 50 | 1 |
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..), Counts(..))
import System.Exit (ExitCode(..), exitWith)
import Triangle (TriangleType(..), triangleType)
exitProperly :: IO Counts -> IO ()
exitProperly m = do
counts <- m
exitWith $ if failures counts /= 0 || errors counts /= 0 then ExitFailure 1 else ExitSuccess
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
main :: IO ()
main = exitProperly $ runTestTT $ TestList
[ TestList triangleTests ]
tri :: Int -> Int -> Int -> TriangleType
tri = triangleType
triangleTests :: [Test]
triangleTests = map TestCase
[ Equilateral @=? tri 2 2 2
, Equilateral @=? tri 10 10 10
, Isosceles @=? tri 3 4 4
, Isosceles @=? tri 4 3 4
, Scalene @=? tri 3 4 5
, Illogical @=? tri 1 1 50
, Illogical @=? tri 1 2 1
]
| mscoutermarsh/exercism_coveralls | assignments/haskell/triangle/triangle_test.hs | agpl-3.0 | 846 | 0 | 12 | 177 | 337 | 179 | 158 | 23 | 2 |
-- Copyright (C) 2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE TupleSections #-}
module BDCS.Depsolve(Formula(..),
CNFLiteral(..),
CNFAtom(..),
CNFFormula,
formulaToCnf,
solveCNF)
where
import Control.Applicative((<|>))
import Control.Monad.State(State, evalState, get, state)
import Data.Map.Strict(Map)
import qualified Data.Map.Strict as Map
import Data.Maybe(isNothing, mapMaybe)
import Data.Set(Set)
import qualified Data.Set as Set
import Utils.Monad(concatMapM)
-- A logical proposition in negation normal form
-- (i.e., NOT is applied only to atoms, not sub-formulas)
data Formula a = Atom a
| Not a
| Or [Formula a]
| And [Formula a]
deriving(Eq, Show)
-- Conjunctive Normal Form (CNF) is, essentially, and AND of ORs. The formula is of the form
-- (a1 OR a2 ...) AND (b1 OR b2 OR ...) AND ...
-- where each a1, b2, etc is an atom or a not-atom.
-- To keep the size of converted formulas under control, some extra variables are added to represent
-- sub-formulas from the original expression.
data CNFLiteral a = CNFOriginal a
| CNFSubstitute Int
deriving(Eq, Ord, Show)
data CNFAtom a = CNFAtom (CNFLiteral a)
| CNFNot (CNFLiteral a)
deriving(Eq, Ord, Show)
type CNFFormula a = [[CNFAtom a]]
formulaToCnf :: Formula a -> CNFFormula a
formulaToCnf f =
-- wrap the call in a State Int starting at 0 to create a counter for substitution variables
evalState (formulaToCnf' f) 0
where
formulaToCnf' :: Formula a -> State Int (CNFFormula a)
-- easy ones: a becomes AND(OR(a)), NOT(a) becomes AND(OR(NOT(a)))
formulaToCnf' (Atom x) = return [[CNFAtom (CNFOriginal x)]]
formulaToCnf' (Not x) = return [[CNFNot (CNFOriginal x)]]
-- -- for an expression of the form And [a1, a2, a3, ...], we need to convert
-- each a1, a2, ... to CNF and concatenate the results.
--
-- In other words, for And [a1, a2], map the list to something like
--
-- [And[Or[a1_or1_1, a1_or1_2],
-- Or[a1_or2_1, a1_or2_2]],
-- And[Or[a2_or1, a2_or1_2],
-- Or[a2_or2_1, a2_or2_2]]]
--
-- which is equivalent to
--
-- And[Or[a1_or1_1, a1_or1_2],
-- Or[a1_or2_1, a1_or2_2],
-- Or[a2_or1_1, a2_or1_2],
-- Or[a2_or2_1, a2_or2_2]]
formulaToCnf' (And andFormulas) = concatMapM formulaToCnf' andFormulas
-- For Or, the equivalent formula is exponentially larger than the original, so instead
-- create an equisatisfiable formula using new substitution variables, via Tseytin transformations.
--
-- For a given expression:
--
-- a1 OR a2 OR a3 ...
--
-- we start out by creating an equisatisfiable expression with new variables:
--
-- (Z1 -> a1) AND (NOT(Z1) -> (a2 OR a3 ...))
--
-- starting with the left side of the AND, the expression is equivalent to
--
-- (NOT(Z1) OR a1)
--
-- and if we can convert a1 into CNF, we get an expression of the form
--
-- NOT(Z1) OR (a1_1 AND a1_2 AND ...)
--
-- where each a1_1, a1_2 etc is an OR. We can then use the distributive property to create
--
-- (NOT(Z1) OR a1_1) AND (NOT(Z1) OR a1_2) AND ...
--
-- which is CNF. Then, for the right hand side of that original AND pair up there, we're
-- left with:
--
-- Z1 OR (a2 OR a3 OR ...)
--
-- so to recurse, we convert (a2 OR a3 OR ...) to CNF, and then convert (Z1 OR (CNF))
-- to CNF via distribution as above. We then have <cnf-of-head> AND <cnf-of-tail>, which is CNF.
-- end of recursion: OR of nothing is nothing, OR of 1 thing is just that thing
formulaToCnf' (Or []) = return [[]]
formulaToCnf' (Or [x]) = formulaToCnf' x
formulaToCnf' (Or (x:xs)) = do
-- Get and increment the counter
subVar <- state $ \i -> (CNFSubstitute i, i+1)
-- recurse on the left hand expression
lhCNF <- formulaToCnf' x
-- distribute NOT(subVar) AND lhCNF by adding NOT(subVar) into each of the OR lists
let lhSubCNF = map (CNFNot subVar:) lhCNF
-- recurse on the right hand side
rhCNF <- formulaToCnf' (Or xs)
-- distribute subVar across the right hand expression
let rhSubCNF = map (CNFAtom subVar:) rhCNF
-- combine the results
return (lhSubCNF ++ rhSubCNF)
-- assignments to literals that will satisfy a formula
type DepAssignment a = (a, Bool)
-- internal types for the variable=bool assignments
type AssignmentMap a = Map (CNFLiteral a) Bool
type AssignmentState a = State (AssignmentMap a)
-- if the formula is unsolvable, returns Nothing, other Just the list of assignments
-- This function uses the Davis-Putnam-Logemann-Loveman procedure for satisfying the formula, which is as follows:
-- Repeatedly simplify the formula using unit propagation and pure literal elimination:
-- unit propagation looks for a clause that contains only one literal, assigns it, and then removes clauses satisfied by the assignment
-- for example, in
-- (a OR b OR c) AND (a) AND (b OR ~c)
--
-- (a) appears alone, so it must be true. We can then remove both (a) and (a OR b OR c), as these clauses are now satisfied
-- by a=True.
--
-- pure literal elimation looks for literals that only appear as true or false. In the above example, b is only present
-- in the formula as True (there is no ~b in the formula), so we can assign b=True and then remove all clauses containing b.
--
-- once simplified, pick a literal and assign it to True and try to satisfy the formula. If that doesn't work, assign to to False.
--
-- Repeat until solved.
solveCNF :: Ord a => CNFFormula a -> Maybe [DepAssignment a]
solveCNF formula = evalState (solveCNF' formula) Map.empty
where
-- helper function that takes an assignment map and a formula
solveCNF' :: Ord a => CNFFormula a -> AssignmentState a (Maybe [DepAssignment a])
solveCNF' formula =
-- simplify the formula. simplify will recurse as necessary
simplify formula >>= \case
-- if things failed during simplify, the formula is unsatisfiable
Nothing -> return Nothing
-- All clauses have been satisfied, we're done. Return the assignments
Just [] -> Just <$> assignmentsToList
-- otherwise, try an assignment, or if that fails try the opposite assignment
Just formula' -> guessAndCheck formula'
guessAndCheck :: Ord a => CNFFormula a -> AssignmentState a (Maybe [DepAssignment a])
guessAndCheck f@((firstLiteral:_):_) = do
assignments <- get
return $ try assignments True <|> try assignments False
where
-- Run in a new state so we can backtrack
try assignments val = let
tryAssignments = Map.insert (atomToLiteral firstLiteral) val assignments
in
evalState (solveCNF' f) tryAssignments
simplify :: Ord a => CNFFormula a -> AssignmentState a (Maybe (CNFFormula a))
simplify formula = do
-- pureLiteralEliminate only updates the assignments, the assigned literals are actually
-- removed by unitPropagate.
pureLiteralEliminate Set.empty formula
unitPropagate formula >>= \case
Nothing -> return Nothing
result@(Just upFormula) ->
-- repeat until the formula doesn't change
if formula == upFormula then return result
else simplify upFormula
-- find pure literals and add them to the assignment map. This just updates assignments and does not make a decision as
-- to satisfiability. It works by assuming every new literal it finds is pure and then correcting as needed. The Set
-- argument is the literals that have been found to be unpure (i.e, they appear as both A and ~A)
pureLiteralEliminate :: Ord a => Set (CNFLiteral a) -> CNFFormula a -> AssignmentState a ()
-- end of recursion
pureLiteralEliminate _ [] = return ()
-- end of a clause, move on to the next one
pureLiteralEliminate unpure ([]:ys) = pureLiteralEliminate unpure ys
pureLiteralEliminate unpure ((x:xs):ys) = do
unpure' <- state updateAssignments
pureLiteralEliminate unpure' (xs:ys)
where
updateAssignments assignments = let
literalX = atomToLiteral x
in
case (x, Map.lookup literalX assignments, Set.member literalX unpure) of
-- something we've already marked as unpure, skip it
(_, _, True) -> (unpure, assignments)
-- Not in the map, add it
(CNFAtom a, Nothing, _) -> (unpure, Map.insert a True assignments)
(CNFNot a, Nothing, _) -> (unpure, Map.insert a False assignments)
-- In the map and matches our guess, keep it
(CNFAtom a, Just True, _) -> (unpure, assignments)
(CNFNot a, Just False, _) -> (unpure, assignments)
-- otherwise we guessed wrong. Remove from the map and add to unpure
_ -> (Set.insert literalX unpure, Map.delete literalX assignments)
unitPropagate :: Ord a => CNFFormula a -> AssignmentState a (Maybe (CNFFormula a))
-- We have a unit! If it's new, add it to assignments and eliminate the unit
-- If it's something in assignments, check that it matches
unitPropagate ([x]:ys) = do
isSatisfiable <- state satisfiable
if isSatisfiable then
unitPropagate ys
else
return Nothing
where
satisfiable assignments = let
literalX = atomToLiteral x
boolX = atomToBool x
lookup = Map.lookup literalX assignments
in
-- if lookup is Nothing, this is a new literal. add it to assignments
if | isNothing lookup -> (True, Map.insert literalX boolX assignments)
-- old literal, matches
| Just boolX == lookup -> (True, assignments)
-- old literal, does not match
| otherwise -> (False, assignments)
-- for clauses with more than one thing:
-- if the clause contains any literal that matches the map, the whole clause is true and we can remove it
-- otherwise, remove any literals that do not match the map, as they cannot be true. If, after removing
-- untrue literals, the clause is empty, the expression is unsolvable.
unitPropagate (clause:ys) = do
assignments <- get
let clauseTrue = any (\atom -> Just (atomToBool atom) == Map.lookup (atomToLiteral atom) assignments) clause
let clauseFiltered = filter (\atom -> Just (not (atomToBool atom)) == Map.lookup (atomToLiteral atom) assignments) clause
if | clauseTrue -> unitPropagate ys
| null clauseFiltered -> return Nothing
| otherwise -> (unitPropagate <$> (clauseFiltered:)) ys
assignmentsToList :: Ord a => AssignmentState a [DepAssignment a]
assignmentsToList = do
-- start by getting everything out of the map as a list of (CNFLiteral, Bool)
assignments <- get
let literalList = Map.foldlWithKey (\acc key val -> (key, val):acc) [] assignments
-- map each (literal, bool) to Maybe (orig, bool), mapMaybe will filter out the Nothings
return $ mapMaybe (\(literal, value) -> (,value) <$> literalToOriginal literal) literalList
-- unwrap an atom
atomToLiteral :: CNFAtom a -> CNFLiteral a
atomToLiteral (CNFAtom x) = x
atomToLiteral (CNFNot x) = x
atomToBool :: CNFAtom a -> Bool
atomToBool (CNFAtom _) = True
atomToBool (CNFNot _) = False
-- unwrap original values, discard substitutes
literalToOriginal :: CNFLiteral a -> Maybe a
literalToOriginal (CNFOriginal x) = Just x
literalToOriginal _ = Nothing
| dashea/bdcs | importer/BDCS/Depsolve.hs | lgpl-2.1 | 12,771 | 0 | 20 | 3,467 | 2,095 | 1,130 | 965 | 120 | 21 |
-- Copyright 2016 TensorFlow authors.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
-- | This module contains definitions for some built-in TensorFlow operations.
--
-- Note that certain, "stateful" ops like 'variable' and 'assign' return a
-- 'Build' action (e.g., @Build (Tensor Ref a)@ instead of a pure value; the
-- returned 'Tensor's are always rendered in the current 'Build' context. This
-- approach helps us avoid problems with inlining or common subexpression
-- elimination, by writing
--
-- > do
-- > v <- variable []
-- > w <- assign v 3
-- > render $ w * w
--
-- instead of
--
-- > let
-- > v = variable []
-- > w = assign v 3
-- > in w * w
--
-- since the latter could be reasonably transformed by the compiler into (or
-- vice versa)
--
-- > let
-- > v = variable []
-- > w = assign v 3
-- > w' = assign v 3
-- > in w * w'
--
-- Ops should return a 'Build' action if their original 'OpDef' marks them as
-- stateful, or if they take any Refs as input. (This mirrors the rules that
-- TensorFlow uses to avoid common subexpression elimination.)
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module TensorFlow.Ops
( CoreOps.add
, CoreOps.add'
, CoreOps.abs
, CoreOps.abs'
, CoreOps.addN
, CoreOps.addN'
, CoreOps.argMax
, CoreOps.argMax'
, CoreOps.assign
, CoreOps.assign'
, CoreOps.broadcastGradientArgs
, CoreOps.broadcastGradientArgs'
, CoreOps.cast
, CoreOps.cast'
, CoreOps.concat
, CoreOps.concat'
, constant
, constant'
, CoreOps.equal
, CoreOps.equal'
, expandDims
, expandDims'
, initializedVariable
, initializedVariable'
, zeroInitializedVariable
, zeroInitializedVariable'
, CoreOps.fill
, CoreOps.fill'
, CoreOps.identity
, CoreOps.identity'
, CoreOps.matMul
, CoreOps.matMul'
, CoreOps.einsum
, CoreOps.einsum'
, matTranspose
, matTranspose'
, CoreOps.mean
, CoreOps.mean'
, CoreOps.mul
, CoreOps.mul'
, CoreOps.neg
, CoreOps.neg'
, CoreOps.oneHot
, CoreOps.oneHot'
, CoreOps.pack
, CoreOps.pack'
, placeholder
, placeholder'
, CoreOps.range
, CoreOps.range'
, reducedShape
, reduceMean
, reduceMean'
, CoreOps.relu
, CoreOps.relu'
, CoreOps.reluGrad
, CoreOps.reluGrad'
, CoreOps.tanh
, CoreOps.tanhGrad
, CoreOps.reshape
, CoreOps.reshape'
, restore
, restoreFromName
, save
, scalar
, scalar'
, shape
, shape'
, CoreOps.sigmoid
, CoreOps.sigmoidGrad
, CoreOps.sign
, CoreOps.sign'
, CoreOps.size
, CoreOps.size'
, CoreOps.softmax
, CoreOps.softmax'
, CoreOps.softmaxCrossEntropyWithLogits
, CoreOps.softmaxCrossEntropyWithLogits'
, CoreOps.sparseToDense
, CoreOps.sparseToDense'
, CoreOps.sub
, CoreOps.sub'
, CoreOps.sum
, CoreOps.sum'
, reduceSum
, reduceSum'
, CoreOps.transpose
, CoreOps.transpose'
, truncatedNormal
, truncatedNormal'
, CoreOps.variable
, CoreOps.variable'
, vector
, vector'
, zeros
, CoreOps.zerosLike
, CoreOps.zerosLike'
, scalarize
) where
import Data.ByteString (ByteString)
import Data.Complex (Complex)
import Data.Int (Int32, Int64)
import Data.Word (Word16)
import Prelude hiding (abs, sum, concat)
import Data.ProtoLens.Default(def)
import Data.Text.Encoding (encodeUtf8)
import Lens.Family2 ((.~), (&))
import Text.Printf (printf)
import Proto.Tensorflow.Core.Framework.Tensor (TensorProto)
import Proto.Tensorflow.Core.Framework.Tensor_Fields
( dtype
, tensorShape
)
import qualified Proto.Tensorflow.Core.Framework.TensorShape_Fields
as TensorShape
import TensorFlow.Build
import TensorFlow.BuildOp
import TensorFlow.ControlFlow (group)
import TensorFlow.Tensor
import TensorFlow.Types
import qualified TensorFlow.GenOps.Core as CoreOps
import qualified Prelude (abs)
-- TODO: Look into hs-boot refactoring to allow mutually recursive imports.
-- | Must be defined as an orphan because of the dependency order between Ops
-- and Tensor.
--
-- The indirect constraint "v ~ Value" helps disambiguate types, for example in
-- "neg 1 :: Tensor Value Float", it helps find the type of the subexpression
-- "1".
instance ( TensorType a
, Num a
, v ~ Build
, OneOf '[ Double, Float, Int32, Int64
, Complex Float, Complex Double] a) => Num (Tensor v a) where
(+) = CoreOps.add
(*) = CoreOps.mul
(-) = CoreOps.sub
abs = CoreOps.abs
fromInteger = scalar . fromInteger
signum = CoreOps.sign
negate = CoreOps.neg
matTranspose :: TensorType a => Tensor e a -> Tensor Build a
matTranspose = matTranspose' id
matTranspose' :: TensorType a => OpParams -> Tensor v a -> Tensor Build a
matTranspose' params = flip (CoreOps.transpose' params) (vector [1, 0 :: Int32])
placeholder :: (MonadBuild m, TensorType a) => Shape -> m (Tensor Value a)
placeholder = placeholder' id
placeholder' :: forall m a . (MonadBuild m, TensorType a)
=> OpParams -> Shape -> m (Tensor Value a)
placeholder' params pShape
-- Note: we don't use CoreOps.placeholder' since that op isn't stateful,
-- and thus would be CSE'd.
= build $ buildOp [] $ opDef "Placeholder"
& opAttr "dtype" .~ tensorType (undefined :: a)
& opAttr "shape" .~ pShape
& params
-- | Creates a variable initialized to the given value.
-- Initialization happens next time session runs.
initializedVariable :: (MonadBuild m, TensorType a)
=> Tensor v a -> m (Tensor Ref a)
initializedVariable = initializedVariable' id
initializedVariable' :: (MonadBuild m, TensorType a)
=> OpParams -> Tensor v a -> m (Tensor Ref a)
initializedVariable' params initializer = do
v <- CoreOps.variable' params [] -- The shape is not known initially.
i <- CoreOps.assign' (opAttr "validate_shape" .~ False) v
initializer
addInitializer =<< group i
return v
-- | Creates a zero-initialized variable with the given shape.
zeroInitializedVariable
:: (MonadBuild m, TensorType a, Num a) =>
TensorFlow.Types.Shape -> m (Tensor TensorFlow.Tensor.Ref a)
zeroInitializedVariable = zeroInitializedVariable' id
zeroInitializedVariable'
:: (MonadBuild m, TensorType a, Num a) =>
OpParams -> TensorFlow.Types.Shape -> m (Tensor TensorFlow.Tensor.Ref a)
zeroInitializedVariable' params = initializedVariable' params . zeros
-- TODO: Support heterogeneous list of tensors.
save :: forall a m v . (Rendered (Tensor v), MonadBuild m, TensorType a)
=> ByteString -- ^ File path.
-> [Tensor v a] -- ^ Tensors to save.
-> m ControlNode
save path xs = build $ do
let toByteStringTensor = scalar . encodeUtf8 . encodeOutput . renderedOutput
let names = fmap toByteStringTensor xs
let types = replicate (length xs) (tensorType (undefined :: a))
names' <- buildInputs $ CoreOps.pack names
xs' <- buildInputs xs
path' <- buildInputs $ scalar path
buildOp [] $ opDef "Save"
& opAttr "T" .~ types
& opInputs .~ (path' ++ names' ++ xs')
-- | Restore a tensor's value from a checkpoint file.
--
-- This version allows restoring from a checkpoint file that uses a different
-- tensor name than the variable.
restoreFromName :: forall a m . (MonadBuild m, TensorType a)
=> ByteString -- ^ File path.
-> ByteString -- ^ Tensor name override.
-> Tensor Ref a -- ^ Tensor to restore.
-> m ControlNode
restoreFromName path name x = build $ do
path' <- buildInputs $ scalar path
name' <- buildInputs $ scalar name
restoreOp <- buildOp [] $ opDef "Restore"
& opAttr "dt" .~ tensorType (undefined :: a)
& opInputs .~ (path' ++ name')
group =<< CoreOps.assign x (restoreOp :: Tensor Value a)
-- | Restore a tensor's value from a checkpoint file.
restore :: forall a m . (MonadBuild m, TensorType a)
=> ByteString -- ^ File path.
-> Tensor Ref a -- ^ Tensor to restore.
-> m ControlNode
restore path x = restoreFromName path name x
where
name = encodeUtf8 $ encodeOutput $ renderedOutput x
-- | Create a constant tensor.
--
-- The values should be in row major order, e.g.,
--
-- element 0: index (0, ..., 0)
-- element 1: index (0, ..., 1)
-- ...
constant :: TensorType a => Shape -> [a] -> Tensor Build a
constant = constant' id
constant' :: forall a . TensorType a => OpParams -> Shape -> [a] -> Tensor Build a
constant' params (Shape cShape) values
| invalidLength = error invalidLengthMsg
| otherwise = CoreOps.const' (params . (opAttr "value" .~ typedNode))
where
invalidLength = product cShape /= fromIntegral (length values)
invalidLengthMsg = printf "invalid tensor length: expected %d got %d"
(product cShape)
(length values)
typedNode :: TensorProto
typedNode = def
& dtype .~ tensorType (undefined :: a)
& tensorShape.TensorShape.dim .~
[def & TensorShape.size .~ x | x <- cShape]
& tensorVal .~ values
-- | Reshape a N-D tensor down to a scalar.
--
-- See `TensorFlow.GenOps.Core.reshape`.
scalarize :: TensorType a => Tensor v a -> Tensor Build a
scalarize t = CoreOps.reshape t (vector scalarShape)
where
scalarShape = [] :: [Int32]
-- | Sum a tensor down to a scalar
-- Seee `TensorFlow.GenOps.Core.sum`
reduceSum :: (OneOf '[ Double, Float, Int32, Int64
, Complex Float, Complex Double] a) =>
Tensor v a -> Tensor Build a
reduceSum x = CoreOps.sum x allAxes
where allAxes = CoreOps.range 0 (CoreOps.rank x :: Tensor Build Int32) 1
reduceSum' :: (OneOf '[ Double, Float, Int32, Int64
, Complex Float, Complex Double] a) =>
OpParams -> Tensor v a -> Tensor Build a
reduceSum' params x = CoreOps.sum' params x allAxes
where allAxes = CoreOps.range 0 (CoreOps.rank x :: Tensor Build Int32) 1
-- | Computes the mean of elements across dimensions of a tensor.
-- See `TensorFlow.GenOps.Core.mean`
reduceMean
:: ( TensorType a
, OneOf '[ Double, Float, Complex Float, Complex Double] a
)
=> Tensor v a -> Tensor Build a
reduceMean = reduceMean' id
reduceMean'
:: ( TensorType a
, OneOf '[ Double, Float, Complex Float, Complex Double] a
)
=> OpParams -> Tensor v a -> Tensor Build a
reduceMean' params x = CoreOps.mean' params x allAxes
where allAxes = CoreOps.range 0 (CoreOps.rank x :: Tensor Build Int32) 1
-- | Create a constant vector.
vector :: TensorType a => [a] -> Tensor Build a
vector = vector' id
vector' :: TensorType a => OpParams -> [a] -> Tensor Build a
vector' params xs = constant' params [fromIntegral $ length xs] xs
-- | Create a constant scalar.
scalar :: TensorType a => a -> Tensor Build a
scalar = scalar' id
scalar' :: TensorType a => OpParams -> a -> Tensor Build a
scalar' params x = constant' params [] [x]
-- | Random tensor from the unit normal distribution with bounded values.
--
-- This is a type-restricted version of 'TensorFlow.GenOps.Core.truncatedNormal'.
truncatedNormal :: (MonadBuild m, OneOf '[Word16, Double, Float] a)
=> Tensor v Int64 -- ^ Shape.
-> m (Tensor Value a)
truncatedNormal = CoreOps.truncatedNormal
truncatedNormal' :: (MonadBuild m, OneOf '[Word16, Double, Float] a)
=> OpParams -> Tensor v Int64 -- ^ Shape.
-> m (Tensor Value a)
truncatedNormal' = CoreOps.truncatedNormal'
zeros :: forall a . (Num a, TensorType a) => Shape -> Tensor Build a
zeros (Shape s) = CoreOps.fill (vector s) (scalar 0)
shape :: TensorType t => Tensor v t -> Tensor Build Int32
shape = CoreOps.shape
shape' :: TensorType t => OpParams -> Tensor v t -> Tensor Build Int32
shape' = CoreOps.shape'
expandDims :: TensorType t => Tensor v1 t -> Tensor v2 Int32 -> Tensor Build t
expandDims = CoreOps.expandDims
expandDims' :: TensorType t => OpParams -> Tensor v1 t -> Tensor v2 Int32 -> Tensor Build t
expandDims' = CoreOps.expandDims'
-- | Helper function for reduction ops (translation of math_ops.reduced_shape).
reducedShape :: (OneOf '[ Int32, Int64 ] t1, OneOf '[ Int32, Int64 ] t2) =>
Tensor v1 t1 -> Tensor v2 t2 -> Tensor Build Int32
reducedShape inputShape axes =
let inputShape32 = toInt32 inputShape -- [2, 3, 5, 7]
axes32 = toInt32 axes -- [1, 2]
toInt32 x = CoreOps.cast x :: Tensor Build Int32
inputRank = CoreOps.size inputShape32 -- 4
axesMod = (axes32 + inputRank) `CoreOps.mod` inputRank
axesShape = shape axesMod -- [2]
in CoreOps.dynamicStitch -- [2, 1, 1, 7]
[CoreOps.range 0 inputRank 1, -- [0, 1, 2, 3]
axesMod] -- [1, 2]
[inputShape32, -- [2, 3, 5, 7]
CoreOps.fill axesShape 1] -- [1, 1]
| tensorflow/haskell | tensorflow-ops/src/TensorFlow/Ops.hs | apache-2.0 | 14,131 | 1 | 15 | 3,589 | 3,276 | 1,783 | 1,493 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveDataTypeable #-}
module SharedTypes where
import Prelude
import Data.Data
import Fay.Yesod
data Command = GetFib Int (Returns Int)
deriving (Typeable, Data)
| Reyu/CanidComics | fay-shared/SharedTypes.hs | apache-2.0 | 218 | 0 | 8 | 34 | 47 | 28 | 19 | 8 | 0 |
module S3E3 where
import FPPrac.Trees
data Tree1a = Leaf1a Int
| Node1a Int Tree1a Tree1a
data Tree1d = Leaf1d (Int, Int)
| Node1d [Tree1d]
pp1d :: Tree1d -> RoseTree
pp1d (Leaf1d l) = RoseNode (show l) []
pp1d (Node1d ts) = RoseNode "" (map pp1d ts)
-- Exercise 3a
binMirror :: Tree1a -> Tree1a
binMirror (Leaf1a l) = Leaf1a l
binMirror (Node1a n t1 t2) = Node1a n t2 t1
-- Exercise 3b
binMirror' :: Tree1d -> Tree1d
binMirror' (Leaf1d (i1, i2)) = Leaf1d (i2, i1)
binMirror' (Node1d ts) = Node1d (map binMirror' (reverse ts))
tree1 = Node1d [Leaf1d (1,2), (Node1d [Leaf1d (1,2), Leaf1d (5,6)]), Leaf1d (3,4)]
ex = showTreeList [pp1d tree1, pp1d $ binMirror' tree1, pp1d $ binMirror' $ binMirror' tree1]
| wouwouwou/module_8 | src/main/haskell/series3/exercise3.hs | apache-2.0 | 736 | 0 | 11 | 154 | 338 | 180 | 158 | 17 | 1 |
data Tree a = Node a (Tree a) (Tree a)
| Empty
deriving (Show)
simpleTree :: Tree String
simpleTree = Node "parent" (Node "left child" Empty Empty)
(Node "right child" Empty Empty)
| EricYT/real-world | src/chapter-3/Tree.hs | apache-2.0 | 237 | 0 | 8 | 88 | 77 | 40 | 37 | 6 | 1 |
{-# LANGUAGE AllowAmbiguousTypes
, DataKinds
, FlexibleInstances
, KindSignatures
, MultiParamTypeClasses
, ScopedTypeVariables
, TypeApplications
#-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.OverloadedLabels
-- Copyright : (c) Adam Gundry 2015-2016
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- This module defines the 'IsLabel' class is used by the
-- @OverloadedLabels@ extension. See the
-- <https://gitlab.haskell.org/ghc/ghc/wikis/records/overloaded-record-fields/overloaded-labels wiki page>
-- for more details.
--
-- When @OverloadedLabels@ is enabled, if GHC sees an occurrence of
-- the overloaded label syntax @#foo@, it is replaced with
--
-- > fromLabel @"foo" :: alpha
--
-- plus a wanted constraint @IsLabel "foo" alpha@.
--
-- Note that if @RebindableSyntax@ is enabled, the desugaring of
-- overloaded label syntax will make use of whatever @fromLabel@ is in
-- scope.
--
-----------------------------------------------------------------------------
-- Note [Overloaded labels]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- An overloaded label is represented by the 'HsOverLabel' constructor
-- of 'HsExpr', which stores the 'FastString' text of the label and an
-- optional id for the 'fromLabel' function to use (if
-- RebindableSyntax is enabled) . The type-checker transforms it into
-- a call to 'fromLabel'. See Note [Type-checking overloaded labels]
-- in TcExpr for more details in how type-checking works.
module GHC.OverloadedLabels
( IsLabel(..)
) where
import GHC.Base ( Symbol )
class IsLabel (x :: Symbol) a where
fromLabel :: a
| sdiehl/ghc | libraries/base/GHC/OverloadedLabels.hs | bsd-3-clause | 1,826 | 0 | 7 | 342 | 83 | 64 | 19 | 12 | 0 |
-- Example of number-parameterized types
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module NumberParamterizedTypes
(
RNat
, (/)
) where
import Data.Proxy (Proxy (..))
import GHC.TypeLits (KnownNat, Nat, natVal)
-- Natural number restricted with an upper bound
newtype i `RNat` (n :: Nat) = RNat { unRNat :: i } deriving (Eq, Ord)
type (/) = RNat
toRNat :: forall i n . (Integral i, KnownNat n) => i -> i `RNat` n
toRNat i = if (unRNat (minBound :: i `RNat` n)) <= i &&
i <= (unRNat (maxBound :: i `RNat` n))
then RNat i
else error "NumberParamterizedTypes.toRNat: bad argument"
instance Show i => Show (i `RNat` n) where
show = show . unRNat
{-|
Num instance is only for literals.
>>> :t 1 :: Int `RNat` 3
1 :: Int `RNat` 3 :: RNat Int 3
>>> 1 :: Int `RNat` 3
1
-}
instance (Integral i, KnownNat n) => Num (i `RNat` n) where
fromInteger = toRNat . fromInteger
{-|
>>> :t [0..2] :: [Int/5]
[0..2] :: [Int/5] :: [Int / 5]
>>> [0..2] :: [Int/5]
[0,1,2]
-}
instance (Integral i, KnownNat n) => Enum (i `RNat` n) where
succ (RNat i) = toRNat (succ i)
fromEnum = fromInteger . toInteger . unRNat
toEnum = toRNat .fromInteger . toInteger
{-|
>>> minBound :: Int/5
0
>>> maxBound :: Int/5
4
-}
instance (Integral i, KnownNat n) => Bounded (i `RNat` n) where
minBound = RNat 0
maxBound = RNat (fromInteger (natVal (Proxy :: Proxy n)) - 1)
| notae/haskell-exercise | NumberParameterizedTypes.hs | bsd-3-clause | 1,565 | 1 | 13 | 387 | 440 | 251 | 189 | 29 | 2 |
{-# LANGUAGE NoMonomorphismRestriction #-}
-----------------------------------------------------------------------------
-- |
-- Module : LGtk.Demos.PlotDemo.Main
-- Copyright : (c) Daniel Pek 2014
-- License : see LICENSE
--
-- Maintainer : [email protected]
--
-- Contains the entry point 'main', builds up the user interface, and
-- establishes data dependencies between GUI elements.
--
-----------------------------------------------------------------------------
module LGtk.Demos.PlotDemo.Main
( main
) where
import Control.Applicative
import Control.Lens hiding ((#))
import LGtk
import LGtk.Demos.PlotDemo.Plot
canvasSize = 600
lWidth = 1 / fromIntegral canvasSize
-- | 'main' runs the mainWidget monad, which builds up the user interface
--
main :: IO ()
main = runWidget mainWidget
mainWidget = notebook
[ (,) "PDani" $ notebook
[ (,) "Function" $ do
plotState <- newRef defPlotState
let (xmin, xmax) = interval $ viewport . _1 `lensMap` plotState
let (ymin, ymax) = interval $ viewport . _2 `lensMap` plotState
let errormsg = lens (fst . drawPlot lWidth) const `lensMap` plotState
hcat
[ canvas canvasSize canvasSize 1 (const $ return ()) Nothing (readRef plotState) $
\ps -> (snd . drawPlot lWidth) ps
, vcat
[ entry $ equation `lensMap` plotState
, label $ readRef errormsg
, hcat
[ label $ pure "X axis range: "
, entryShow xmin
, entryShow xmax
]
, hcat
[ label $ pure "Y axis range: "
, entryShow ymin
, entryShow ymax
]
]
]
]
]
interval :: (RefClass r, Ord a) => RefSimple r (a, a) -> (RefSimple r a, RefSimple r a)
interval ab = (lens fst set1 `lensMap` ab, lens snd set2 `lensMap` ab) where
set1 (_, b) a = (min b a, b)
set2 (a, _) b = (a, max a b)
| pdani/lgtk-plot-demo | src/LGtk/Demos/PlotDemo/Main.hs | bsd-3-clause | 2,156 | 0 | 20 | 754 | 518 | 283 | 235 | -1 | -1 |
module Data.Function.Instances.Algebra.Monoid where
import Data.Function.Instances.Algebra.Internal
import Algebra.Monoid as M
instance C a => C (k -> a) where
idt = const M.idt
(<*>) = zipFn (M.<*>)
cumulate fs = \x -> M.cumulate $ map ($ x) fs
| TobBrandt/function-instances-algebra | Data/Function/Instances/Algebra/Monoid.hs | bsd-3-clause | 254 | 0 | 9 | 45 | 98 | 58 | 40 | 7 | 0 |
{-|
Free monads build syntax trees. See the example sections for details.
A free monad over a functor resembles a list of that functor:
* 'return' behaves like @[]@ by not using the functor at all
* 'wrap' behaves like @(:)@ by prepending another layer of the functor
* 'liftF' behaves like @singleton@ by creating a list from a single layer of
the functor.
-}
module Control.Monad.Trans.Free (
-- * Usage
-- $usage
-- * Free monad
Free,
FreeF(..),
runFree,
-- * Free monad transformer
FreeT(..),
-- * Free monad operations
wrap,
liftF
-- * Free monad example
-- $freeexample
-- * Free monad transformer example
-- $freetexample
) where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Class
import Data.Functor.Identity
{- $usage
You can assemble values of type @Free f a@ or @FreeT f m a@ by hand using
the smart constructors 'return' (from @Control.Monad@) and 'wrap':
> return :: r -> FreeT f m r
> wrap :: f (FreeT f m r) -> FreeT f m r
Use 'runFree' to deconstruct values of type @Free f r@:
> case runFree x of
> Pure a -> ...
> Free w -> ...
Use 'runFreeT' to deconstruct values of type @FreeT f m r@ and bind the
result in the base monad @m@. You can then pattern match against the bound
value:
> do x <- runFreeT f
> case x of
> Pure a -> ...
> Free w -> ...
-}
-- | The signature for 'Free'
data FreeF f a x = Pure a | Free (f x)
{-|
A free monad transformer alternates nesting the base monad @m@ and the base
functor @f@, terminating with a value of type @a@.
* @f@ - The functor that generates the free monad transformer
* @m@ - The base monad
* @a@ - The return value
-}
newtype FreeT f m a = FreeT { runFreeT :: m (FreeF f a (FreeT f m a)) }
instance (Functor f, Monad m) => Functor (FreeT f m) where
fmap = liftM
instance (Functor f, Monad m) => Applicative (FreeT f m) where
pure = return
(<*>) = ap
instance (Functor f, Monad m) => Monad (FreeT f m) where
return = FreeT . return . Pure
m >>= f = FreeT $ do
x <- runFreeT m
runFreeT $ case x of
Pure a -> f a
Free w -> wrap $ fmap (>>= f) w
instance MonadTrans (FreeT f) where
lift = FreeT . liftM Pure
instance (Functor f, MonadIO m) => MonadIO (FreeT f m) where
liftIO = lift . liftIO
-- | Prepend one layer of the functor to the free monad
wrap :: (Monad m) => f (FreeT f m a) -> FreeT f m a
wrap = FreeT . return . Free
-- | Convert one layer of a functor into an operation in the free monad
liftF :: (Functor f, Monad m) => f a -> FreeT f m a
liftF x = wrap $ fmap return x
{-|
@Free f a@ is a list of nested @f@s terminating with a return value of type
@a@.
* @f@ - The functor that generates the free monad
* @a@ - The return value
-}
type Free f = FreeT f Identity
-- | Observation function that exposes the next step
runFree :: Free f r -> FreeF f r (Free f r)
runFree = runIdentity . runFreeT
{- $freeexample
To create a syntax tree, first define the signature for a single step in the
syntax tree:
> data TeletypeF next = PutString String next | GetString (String -> next)
... then make the signature a 'Functor', where 'fmap' applies the given
function to the @next@ step:
> instance Functor TeletypeF where
> fmap f (PutString str x) = PutString str (f x)
> fmap f (GetString k) = GetString (f . k)
The 'Free' type constructor generates the corresponding syntax tree from
this signature:
> type Teletype a = Free TeletypeF a
'liftF' creates primitive operations for building the syntax tree:
> putString :: String -> Teletype ()
> putString str = liftF $ PutString str ()
>
> getString :: Teletype String
> getString = liftF $ GetString id
The syntax tree is automatically a monad, so you can assemble these
operations into larger syntax trees using @do@ notation:
> echo :: Teletype a
> echo = forever $ do
> str <- getString
> putString str
... which is equivalent to the following hand-written syntax tree:
> echo' :: Teletype r
> echo' = wrap $ GetString $ \str -> wrap $ PutString str echo'
You then interpret the syntax tree using 'runFree' to inspect the tree one
step at a time.
> runIO :: Teletype a -> IO a
> runIO t = case runFree t of
> Pure r -> return r
> Free (PutString str t') -> do
> putStrLn str
> runIO t'
> Free (GetString k ) -> do
> str <- getLine
> runIO (k str)
>>> runIO echo
A<Enter>
A
Test<Enter>
Test
...
You can write pure interpreters, too:
> runPure :: Teletype a -> [String] -> [String]
> runPure t strs = case runFree t of
> Pure r -> []
> Free (PutString str t') -> str:runPure t' strs
> Free (GetString k ) -> case strs of
> [] -> []
> str:strs' -> runPure (k str) strs'
>>> runPure echo ["A", "Test"]
["A","Test"]
-}
{- $freetexample
The Free monad transformer 'FreeT' lets us invoke the base monad to build
the syntax tree. For example, you can use 'IO' to prompt the user to select
each step of the syntax tree using the following monad:
> FreeT TeletypeF IO r
Our original primitives actually had the following more polymorphic types,
so you can reuse them:
> putString :: (Monad m) => String -> FreeT TeletypeF m ()
> putString str = liftF $ PutString str ()
>
> getString :: (Monad m) => FreeT TeletypeF m String
> getString = liftF $ GetString id
Now the user can build the syntax tree from the command line:
> prompt :: FreeT TeletypeF IO ()
> prompt = do
> lift $ putStrLn "Supply the next step:
> cmd <- lift getLine
> case cmd of
> "forward" -> do
> str <- getString
> putString str
> prompt
> "greet" -> do
> putString "Hello, world!"
> prompt
> _ -> return ()
You can then run the syntax tree as the user builds it:
> -- The 'FreeT' version of 'runIO'
> runTIO :: FreeT TeletypeF IO r -> IO r
> runTIO t = do
> x <- runFreeT t
> case x of
> Pure r -> return r
> Free (PutString str t') -> do
> putStrLn str
> runTIO t'
> Free (GetString k) -> do
> str <- getLine
> runTIO (k str)
>>> runTIO prompt
Supply the next step:
greet<Enter>
Hello, world!
Supply the next step:
forward<Enter>
test<Enter>
test
Supply the next step:
quit<Enter>
-}
| Gabriel439/Haskell-Transformers-Free-Library | Control/Monad/Trans/Free.hs | bsd-3-clause | 6,613 | 0 | 15 | 1,868 | 558 | 310 | 248 | 37 | 1 |
{-# LANGUAGE OverloadedStrings, LambdaCase, RecordWildCards #-}
module Web.RTBBidder (bidderApp) where
import qualified Network.Wai as WAI
import Control.Exception (bracket_)
import qualified Web.RTBBidder.Types as WRB
bidderApp :: WRB.RTBProtocol -> WRB.Bidder -> WAI.Application
bidderApp WRB.RTBProtocol{..} bidder httpreq cont = do
bracket_ (return ()) (return ()) appmain
where
appmain = do
bidreq <- rtbDecodeReq httpreq >>= \case
Right r -> return r
Left l -> error $ "[TODO] implement error handling: " ++ l
bidres <- bidder bidreq
httpres <- rtbEncodeRes bidres
cont httpres
| hiratara/hs-rtb-bidder | src/Web/RTBBidder.hs | bsd-3-clause | 633 | 0 | 15 | 129 | 184 | 94 | 90 | 15 | 2 |
module FilterByRegex
where
import ChunkedFileProcessing
import qualified Data.ByteString.Lazy.Char8 as LB
import qualified Data.ByteString.Char8 as B
import Data.List (foldl')
import Text.Regex.PCRE.Light (compile, match, Regex)
import Control.Monad (forM)
import Control.Parallel.Strategies (NFData(..), rwhnf)
import Control.Exception (finally)
import System.IO
regex = compile pattern []
pattern = B.pack "\\s*\\d*\\.\\d*\\s*3"
toRegex s = compile (B.pack s) []
strict = B.concat . LB.toChunks
filterLines :: Regex -> LB.ByteString -> Bool -> [B.ByteString]
filterLines reg chunk inverse
| inverse = reverse $ foldl' augmentInverse [] (LB.lines chunk)
| otherwise = reverse $ foldl' augment [] (LB.lines chunk)
where augment accum line = case match reg (strict line) [] of
Just _ -> accum
_ -> (strict line):accum
augmentInverse accum line = case match reg (strict line) [] of
Just _ -> (strict line):accum
_ -> accum
processFile :: FilePath -> Bool -> String -> Int -> IO ()
processFile path inverse r numChunks = do
print $ "processFile with chunks: " ++ show numChunks
print $ "regex was :" ++ r
let regex = toRegex r
(chunks, handles) <- chunkedRead path numChunks
outH <- openFile (path ++ ".out") WriteMode
r <- forM chunks $ \chunk -> do
print $ "processing...:" ++ (show $ LB.length chunk)
B.hPut outH (B.unlines $ filterLines regex chunk inverse)
(rnf r `seq` return r) `finally` mapM_ hClose handles
hClose outH
| marcmo/filtering | FilterByRegex.hs | bsd-3-clause | 1,631 | 37 | 12 | 422 | 533 | 288 | 245 | 36 | 3 |
{-# LANGUAGE QuasiQuotes #-}
-- # LANGUAGE OverloadedStrings #
module Main (
main
) where
import Control.Applicative ((<$>))
import Control.Monad.Trans (liftIO)
import GHCJS.DOM
(enableInspector, webViewGetDomDocument, runWebGUI)
import GHCJS.DOM.Document (documentGetBody, documentGetHead, documentCreateElement, documentGetElementById)
import GHCJS.DOM.HTMLElement (htmlElementSetInnerHTML, htmlElementSetInnerText, castToHTMLElement)
import GHCJS.DOM.Element (elementOnclick, elementOnkeydown)
import GHCJS.DOM.HTMLParagraphElement
(castToHTMLParagraphElement)
import GHCJS.DOM.Node (nodeAppendChild)
import GHCJS.DOM.EventM (mouseClientXY)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import Text.Blaze.Html
import Data.Text.Lazy (unpack)
import Text.Hamlet
import Data.Text hiding (unpack)
import GHCJS.DOM.HTMLDivElement
import GHCJS.DOM.UIEvent
main = runWebGUI $ \webView -> do
enableInspector webView
Just doc <- webViewGetDomDocument webView
Just body <- documentGetBody doc
Just dHead <- documentGetHead doc
Just bDiv <- fmap castToHTMLElement <$> documentCreateElement doc "p"
htmlElementSetInnerHTML dHead $ unpack . renderHtml $ htmlHead render
htmlElementSetInnerHTML body $ unpack . renderHtml $ htmlSubDoc emailList options render
nodeAppendChild body (Just bDiv)
Just el <- documentGetElementById doc "email-content"
elementOnclick body $ do
x <- mouseClientXY
liftIO $ do
htmlElementSetInnerHTML (castToHTMLElement el) $ unpack . renderHtml $ renderEmail (emailList !! 2) sampleEmail2 render
htmlElementSetInnerHTML body $ unpack . renderHtml $ htmlSubDoc emailList options render
-- print x
putStrLn "You clicked a button!"
return ()
-- elementOnclick body $ do
-- (x,y) <- mouseClientXY
-- liftIO $ do
-- putStrLn "Hello from Native Haskell"
-- Just newParagraph <- fmap castToHTMLParagraphElement <$> documentCreateElement doc "p"
-- htmlElementSetInnerText newParagraph $ "Clicked " ++ show (x,y)
-- nodeAppendChild body (Just newParagraph)
-- return ()
return ()
data MyRoute = Home
render :: MyRoute -> [(Text, Text)] -> Text
render Home _ = (pack "/home")
htmlHead :: HtmlUrl MyRoute
htmlHead = [hamlet|
<link rel="stylesheet" href="http://yui.yahooapis.com/pure/0.5.0/pure-min.css">
<!--[if lte IE 8]>
<link rel="stylesheet" href="css/layouts/email-old-ie.css">
<![endif]-->
<!--[if gt IE 8]><!-->
<link rel="stylesheet" href="css/layouts/email.css">
<!--<![endif]-->
|]
htmlSubDoc :: [Email] -> [(String, Int)] -> HtmlUrl MyRoute
htmlSubDoc emails options = [hamlet|
<div class="content pure-g" id="layout">
<div class="pure-u" id="nav">
<a class="nav-menu-button" href="#">Menu
<div class="nav-inner">
<button class="primary-button pure-button">Compose
<div class="pure-menu pure-menu-open">
<ul>
$forall option <- options
^{renderSideOption option}
<li class="pure-menu-heading">Labels
<li>
<a href="#"><span class="email-label-personal"></span>Personal
<li>
<a href="#"><span class="email-label-work"></span>Work
<li>
<a href="#"><span class= "email-label-travel"></span>Travel
<div class="pure-u-1" id="list">
$forall email <- emails
^{renderEmailDesc email}
<div class="pure-u-1" id="main">
^{renderEmail (emailList !! 0) sampleEmail}
|]
options = [("Inbox", 2), ("Important", 1), ("Sent", 10), ("Drafts", 30), ("Trash", 0)]
renderSideOption :: (String, Int) -> HtmlUrl MyRoute
renderSideOption (name, count) = [hamlet|
<li>
<a href="#">#{name} ^{renderSideOptionCount count}
|]
renderSideOptionCount :: Int -> HtmlUrl MyRoute
renderSideOptionCount 0 = [hamlet| |]
renderSideOptionCount n = [hamlet| <span class="email-count">(#{n})|]
renderEmailDesc :: Email -> HtmlUrl MyRoute
renderEmailDesc (Email name subject desc) = [hamlet|
<div class="email-item pure-g">
<div class="pure-u"><img alt="Yahoo! News' avatar" class="email-avatar" height="64" src="img/common/ynews-avatar.png" width="64">
<div class="pure-u-3-4">
<h5 class="email-name">#{name}
<h4 class="email-subject">#{subject}
<p class="email-desc">#{desc}
|]
renderEmail :: Email -> HtmlUrl MyRoute -> HtmlUrl MyRoute
renderEmail (Email name subject desc) body = [hamlet|
<div class="email-content">
<div class="email-content-header pure-g">
<div class="pure-u-1-2">
<h1 class="email-content-title">Hello from Toronto
<p class="email-content-subtitle">
From Tilo Mitra at <span> 3:56pm, April 3, 2012
<div class="email-content-controls pure-u-1-2">
<button class="secondary-button pure-button">Reply
<button class="secondary-button pure-button">Forward
<button class="secondary-button pure-button">Move to
<div class="email-content-body" id="email-content">
^{body}
|]
emailList = [Email "Eric Ferraiuolo" "Re: Pull Requests" "Hey, I had some feedback for pull request #51. We should center the menu so it looks better on mobile."
,Email "YUI Library" "You have 5 bugs assigned to you" "Duis aute irure dolor in reprehenderit in voluptate velit essecillum dolore eu fugiat nulla."
,Email "Reid Burke" "Re: Design Language" "Excepteur sint occaecat cupidatat non proident, sunt in culpa."
,Email "Andrew Wooldridge" "YUI Blog Updates?" "Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip."
,Email "Yahoo! Finance" "How to protect your finances from winter storms" "Mauris tempor mi vitae sem aliquet pharetra. Fusce in dui purus, nec malesuada mauris."
,Email "Yahoo! News" "Summary for April 3rd, 2012" "We found 10 news articles that you may like."]
sampleEmail = [hamlet|
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.\n
<p>Duis aute irure dolor in reprehenderit in voluptate velit essecillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
<p>Aliquam ac feugiat dolor. Proin mattis massa sit amet enim iaculis tincidunt. Mauris tempor mi vitae sem aliquet pharetra. Fusce in dui purus, nec malesuada mauris. Curabitur ornare arcu quis mi blandit laoreet. Vivamus imperdiet fermentum mauris, ac posuere urna tempor at. Duis pellentesque justo ac sapien aliquet egestas. Morbi enim mi, porta eget ullamcorper at, pharetra id lorem.
<p>Donec sagittis dolor ut quam pharetra pretium varius in nibh. Suspendisse potenti. Donec imperdiet, velit vel adipiscing bibendum, leo eros tristique augue, eu rutrum lacus sapien vel quam. Nam orci arcu, luctus quis vestibulum ut, ullamcorper ut enim. Morbi semper erat quis orci aliquet condimentum. Nam interdum mauris sed massa dignissim rhoncus.
<p>Regards,<br>Tilo
|]
sampleEmail2 = [hamlet|
<p>This is a third email
|]
data Email = Email {
name :: String
, subject :: String
, desc :: String
} deriving (Eq, Show) | KevinCotrone/testing-ghcjs | src/Main.hs | bsd-3-clause | 7,664 | 0 | 20 | 1,706 | 890 | 497 | 393 | 69 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module Reflex.Dom.Widget.Input (module Reflex.Dom.Widget.Input, def, (&), (.~)) where
import Prelude
import Control.Lens hiding (element, ix)
import Control.Monad.Fix
import Control.Monad.IO.Class
import Control.Monad.Reader
import qualified Data.Bimap as Bimap
import Data.Default
import Data.Dependent.Map (DMap)
import qualified Data.Dependent.Map as DMap
import Data.Functor.Misc
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Semigroup
import Data.Text (Text)
import qualified Data.Text as T
import GHCJS.DOM.Element (castToElement)
import qualified GHCJS.DOM.Element as Element
import GHCJS.DOM.EventM (on)
import qualified GHCJS.DOM.FileList as FileList
import GHCJS.DOM.HTMLInputElement (HTMLInputElement)
import GHCJS.DOM.HTMLTextAreaElement (HTMLTextAreaElement)
import GHCJS.DOM.Types (File)
import Reflex.Class
import Reflex.Dynamic
import Reflex.Dom.Builder.Class
import Reflex.Dom.Builder.Immediate
import Reflex.Dom.Class
import Reflex.PerformEvent.Class
import Reflex.PostBuild.Class
import Reflex.Dom.Widget.Basic
import qualified Text.Read as T
import qualified GHCJS.DOM.Event as Event
import qualified GHCJS.DOM.HTMLInputElement as Input
data TextInput t
= TextInput { _textInput_value :: Dynamic t Text
, _textInput_input :: Event t Text
, _textInput_keypress :: Event t Int
, _textInput_keydown :: Event t Int
, _textInput_keyup :: Event t Int
, _textInput_hasFocus :: Dynamic t Bool
, _textInput_builderElement :: InputElement EventResult GhcjsDomSpace t
}
_textInput_element :: TextInput t -> HTMLInputElement
_textInput_element = _inputElement_raw . _textInput_builderElement
instance Reflex t => HasDomEvent t (TextInput t) en where
type DomEventType (TextInput t) en = DomEventType (InputElement EventResult GhcjsDomSpace t) en
domEvent en = domEvent en . _textInput_builderElement
data TextInputConfig t
= TextInputConfig { _textInputConfig_inputType :: Text
, _textInputConfig_initialValue :: Text
, _textInputConfig_setValue :: Event t Text
, _textInputConfig_attributes :: Dynamic t (Map Text Text)
}
instance Reflex t => Default (TextInputConfig t) where
{-# INLINABLE def #-}
def = TextInputConfig { _textInputConfig_inputType = "text"
, _textInputConfig_initialValue = ""
, _textInputConfig_setValue = never
, _textInputConfig_attributes = constDyn mempty
}
-- | Create an input whose value is a string. By default, the "type" attribute is set to "text", but it can be changed using the _textInputConfig_inputType field. Note that only types for which the value is always a string will work - types whose value may be null will not work properly with this widget.
{-# INLINABLE textInput #-}
textInput :: (DomBuilder t m, PostBuild t m, DomBuilderSpace m ~ GhcjsDomSpace) => TextInputConfig t -> m (TextInput t)
textInput (TextInputConfig inputType initial eSetValue dAttrs) = do
modifyAttrs <- dynamicAttributesToModifyAttributes $ fmap (Map.insert "type" inputType) dAttrs
i <- inputElement $ def
& inputElementConfig_initialValue .~ initial
& inputElementConfig_setValue .~ eSetValue
& inputElementConfig_elementConfig . elementConfig_modifyAttributes .~ fmap mapKeysToAttributeName modifyAttrs
return $ TextInput
{ _textInput_value = _inputElement_value i
, _textInput_input = _inputElement_input i
, _textInput_keypress = domEvent Keypress i
, _textInput_keydown = domEvent Keydown i
, _textInput_keyup = domEvent Keyup i
, _textInput_hasFocus = _inputElement_hasFocus i
, _textInput_builderElement = i
}
{-# INLINE textInputGetEnter #-}
{-# DEPRECATED textInputGetEnter "Use 'keypress Enter' instead" #-}
textInputGetEnter :: Reflex t => TextInput t -> Event t ()
textInputGetEnter = keypress Enter
{-# INLINABLE keypress #-}
keypress :: (Reflex t, HasDomEvent t e 'KeypressTag, DomEventType e 'KeypressTag ~ Int) => Key -> e -> Event t ()
keypress key i = fmapMaybe (\n -> if keyCodeLookup n == key then Just () else Nothing) $ domEvent Keypress i
data RangeInputConfig t
= RangeInputConfig { _rangeInputConfig_initialValue :: Float
, _rangeInputConfig_setValue :: Event t Float
, _rangeInputConfig_attributes :: Dynamic t (Map Text Text)
}
instance Reflex t => Default (RangeInputConfig t) where
{-# INLINABLE def #-}
def = RangeInputConfig { _rangeInputConfig_initialValue = 0
, _rangeInputConfig_setValue = never
, _rangeInputConfig_attributes = constDyn mempty
}
data RangeInput t
= RangeInput { _rangeInput_value :: Dynamic t Float
, _rangeInput_input :: Event t Float
, _rangeInput_mouseup :: Event t (Int, Int)
, _rangeInput_hasFocus :: Dynamic t Bool
, _rangeInput_element :: HTMLInputElement
}
-- | Create an input whose value is a float.
-- https://www.w3.org/wiki/HTML/Elements/input/range
{-# INLINABLE rangeInput #-}
rangeInput :: (DomBuilder t m, PostBuild t m, DomBuilderSpace m ~ GhcjsDomSpace) => RangeInputConfig t -> m (RangeInput t)
rangeInput (RangeInputConfig initial eSetValue dAttrs) = do
modifyAttrs <- dynamicAttributesToModifyAttributes $ fmap (Map.insert "type" "range") dAttrs
i <- inputElement $ def
& inputElementConfig_initialValue .~ (T.pack . show $ initial)
& inputElementConfig_setValue .~ (T.pack . show <$> eSetValue)
& inputElementConfig_elementConfig . elementConfig_modifyAttributes .~ fmap mapKeysToAttributeName modifyAttrs
return $ RangeInput
{ _rangeInput_value = read . T.unpack <$> _inputElement_value i
, _rangeInput_input = read . T.unpack <$> _inputElement_input i
, _rangeInput_mouseup = domEvent Mouseup i
, _rangeInput_hasFocus = _inputElement_hasFocus i
, _rangeInput_element = _inputElement_raw i
}
data TextAreaConfig t
= TextAreaConfig { _textAreaConfig_initialValue :: Text
, _textAreaConfig_setValue :: Event t Text
, _textAreaConfig_attributes :: Dynamic t (Map Text Text)
}
instance Reflex t => Default (TextAreaConfig t) where
{-# INLINABLE def #-}
def = TextAreaConfig { _textAreaConfig_initialValue = ""
, _textAreaConfig_setValue = never
, _textAreaConfig_attributes = constDyn mempty
}
data TextArea t
= TextArea { _textArea_value :: Dynamic t Text
, _textArea_input :: Event t Text
, _textArea_hasFocus :: Dynamic t Bool
, _textArea_keypress :: Event t Int
, _textArea_element :: HTMLTextAreaElement
}
{-# INLINABLE textArea #-}
textArea :: (DomBuilder t m, PostBuild t m, DomBuilderSpace m ~ GhcjsDomSpace) => TextAreaConfig t -> m (TextArea t)
textArea (TextAreaConfig initial eSet attrs) = do
modifyAttrs <- dynamicAttributesToModifyAttributes attrs
i <- textAreaElement $ def
& textAreaElementConfig_initialValue .~ initial
& textAreaElementConfig_setValue .~ eSet
& textAreaElementConfig_elementConfig . elementConfig_modifyAttributes .~ fmap mapKeysToAttributeName modifyAttrs
return $ TextArea
{ _textArea_value = _textAreaElement_value i
, _textArea_input = _textAreaElement_input i
, _textArea_keypress = domEvent Keypress i
, _textArea_hasFocus = _textAreaElement_hasFocus i
, _textArea_element = _textAreaElement_raw i
}
data CheckboxConfig t
= CheckboxConfig { _checkboxConfig_setValue :: Event t Bool
, _checkboxConfig_attributes :: Dynamic t (Map Text Text)
}
instance Reflex t => Default (CheckboxConfig t) where
{-# INLINABLE def #-}
def = CheckboxConfig { _checkboxConfig_setValue = never
, _checkboxConfig_attributes = constDyn mempty
}
data Checkbox t
= Checkbox { _checkbox_value :: Dynamic t Bool
, _checkbox_change :: Event t Bool
}
-- | Create an editable checkbox
-- Note: if the "type" or "checked" attributes are provided as attributes, they will be ignored
{-# INLINABLE checkbox #-}
checkbox :: (DomBuilder t m, PostBuild t m) => Bool -> CheckboxConfig t -> m (Checkbox t)
checkbox checked config = do
let permanentAttrs = "type" =: "checkbox"
dAttrs = Map.delete "checked" . Map.union permanentAttrs <$> _checkboxConfig_attributes config
modifyAttrs <- dynamicAttributesToModifyAttributes dAttrs
i <- inputElement $ def
& inputElementConfig_initialChecked .~ checked
& inputElementConfig_setChecked .~ _checkboxConfig_setValue config
& inputElementConfig_elementConfig . elementConfig_initialAttributes .~ Map.mapKeys (AttributeName Nothing) permanentAttrs
& inputElementConfig_elementConfig . elementConfig_modifyAttributes .~ fmap mapKeysToAttributeName modifyAttrs
return $ Checkbox
{ _checkbox_value = _inputElement_checked i
, _checkbox_change = _inputElement_checkedChange i
}
type family CheckboxViewEventResultType (en :: EventTag) :: * where
CheckboxViewEventResultType 'ClickTag = Bool
CheckboxViewEventResultType t = EventResultType t
regularToCheckboxViewEventType :: EventName t -> EventResultType t -> CheckboxViewEventResultType t
regularToCheckboxViewEventType en r = case en of
Click -> error "regularToCheckboxViewEventType: EventName Click should never be encountered"
Abort -> r
Blur -> r
Change -> r
Contextmenu -> r
Dblclick -> r
Drag -> r
Dragend -> r
Dragenter -> r
Dragleave -> r
Dragover -> r
Dragstart -> r
Drop -> r
Error -> r
Focus -> r
Input -> r
Invalid -> r
Keydown -> r
Keypress -> r
Keyup -> r
Load -> r
Mousedown -> r
Mouseenter -> r
Mouseleave -> r
Mousemove -> r
Mouseout -> r
Mouseover -> r
Mouseup -> r
Mousewheel -> r
Scroll -> r
Select -> r
Submit -> r
Wheel -> r
Beforecut -> r
Cut -> r
Beforecopy -> r
Copy -> r
Beforepaste -> r
Paste -> r
Reset -> r
Search -> r
Selectstart -> r
Touchstart -> r
Touchmove -> r
Touchend -> r
Touchcancel -> r
newtype CheckboxViewEventResult en = CheckboxViewEventResult { unCheckboxViewEventResult :: CheckboxViewEventResultType en }
--TODO
{-# INLINABLE checkboxView #-}
checkboxView :: forall t m. (DomBuilder t m, DomBuilderSpace m ~ GhcjsDomSpace, PostBuild t m, MonadHold t m) => Dynamic t (Map Text Text) -> Dynamic t Bool -> m (Event t Bool)
checkboxView dAttrs dValue = do
let permanentAttrs = "type" =: "checkbox"
modifyAttrs <- dynamicAttributesToModifyAttributes $ fmap (Map.union permanentAttrs) dAttrs
postBuild <- getPostBuild
let filters :: DMap EventName (GhcjsEventFilter CheckboxViewEventResult)
filters = DMap.singleton Click $ GhcjsEventFilter $ \(GhcjsDomEvent evt) -> do
Just t <- Event.getTarget evt
b <- Input.getChecked $ Input.castToHTMLInputElement t
return $ (,) preventDefault $ return $ Just $ CheckboxViewEventResult b
elementConfig :: ElementConfig CheckboxViewEventResult t m
elementConfig = (def :: ElementConfig EventResult t m)
{ _elementConfig_modifyAttributes = fmap mapKeysToAttributeName modifyAttrs
, _elementConfig_initialAttributes = Map.mapKeys (AttributeName Nothing) permanentAttrs
, _elementConfig_eventSpec = GhcjsEventSpec
{ _ghcjsEventSpec_filters = filters
, _ghcjsEventSpec_handler = \(en, GhcjsDomEvent evt) -> case en of
Click -> error "impossible"
_ -> do
Just e <- withIsEvent en $ Event.getTarget evt
mr <- runReaderT (defaultDomEventHandler (castToElement e) en) evt
return $ ffor mr $ \(EventResult r) -> CheckboxViewEventResult $ regularToCheckboxViewEventType en r
}
}
inputElementConfig :: InputElementConfig CheckboxViewEventResult t m
inputElementConfig = (def :: InputElementConfig EventResult t m)
& inputElementConfig_setChecked .~ leftmost [updated dValue, tag (current dValue) postBuild]
& inputElementConfig_elementConfig .~ elementConfig
i <- inputElement inputElementConfig
return $ unCheckboxViewEventResult <$> select (_element_events $ _inputElement_element i) (WrapArg Click)
data FileInput d t
= FileInput { _fileInput_value :: Dynamic t [File]
, _fileInput_element :: RawInputElement d
}
data FileInputConfig t
= FileInputConfig { _fileInputConfig_attributes :: Dynamic t (Map Text Text)
}
instance Reflex t => Default (FileInputConfig t) where
def = FileInputConfig { _fileInputConfig_attributes = constDyn mempty
}
fileInput :: forall t m. (MonadIO m, MonadFix m, MonadHold t m, TriggerEvent t m, DomBuilder t m, PostBuild t m, DomBuilderSpace m ~ GhcjsDomSpace)
=> FileInputConfig t -> m (FileInput (DomBuilderSpace m) t)
fileInput config = do
let insertType = Map.insert "type" "file"
dAttrs = insertType <$> _fileInputConfig_attributes config
modifyAttrs <- dynamicAttributesToModifyAttributes dAttrs
let filters = DMap.singleton Change . GhcjsEventFilter $ \_ -> do
return . (,) mempty $ return . Just $ EventResult ()
elCfg = (def :: ElementConfig EventResult t m)
& modifyAttributes .~ fmap mapKeysToAttributeName modifyAttrs
& elementConfig_eventSpec . ghcjsEventSpec_filters .~ filters
cfg = (def :: InputElementConfig EventResult t m) & inputElementConfig_elementConfig .~ elCfg
eRaw <- inputElement cfg
let e = _inputElement_raw eRaw
eChange <- wrapDomEvent e (`on` Element.change) $ do
Just files <- Input.getFiles e
len <- FileList.getLength files
mapM (fmap (fromMaybe (error "fileInput: fileList.item returned null")) . FileList.item files) [0 .. len-1]
dValue <- holdDyn [] eChange
return $ FileInput
{ _fileInput_value = dValue
, _fileInput_element = e
}
data Dropdown t k
= Dropdown { _dropdown_value :: Dynamic t k
, _dropdown_change :: Event t k
}
data DropdownConfig t k
= DropdownConfig { _dropdownConfig_setValue :: Event t k
, _dropdownConfig_attributes :: Dynamic t (Map Text Text)
}
instance Reflex t => Default (DropdownConfig t k) where
def = DropdownConfig { _dropdownConfig_setValue = never
, _dropdownConfig_attributes = constDyn mempty
}
type family DropdownViewEventResultType (en :: EventTag) :: * where
DropdownViewEventResultType 'ChangeTag = Text
DropdownViewEventResultType t = EventResultType t
newtype DropdownViewEventResult en = DropdownViewEventResult { unDropdownViewEventResult :: DropdownViewEventResultType en }
regularToDropdownViewEventType :: EventName t -> EventResultType t -> DropdownViewEventResultType t
regularToDropdownViewEventType en r = case en of
Change -> error "regularToDropdownViewEventType: EventName Change should never be encountered"
Abort -> r
Blur -> r
Click -> r
Contextmenu -> r
Dblclick -> r
Drag -> r
Dragend -> r
Dragenter -> r
Dragleave -> r
Dragover -> r
Dragstart -> r
Drop -> r
Error -> r
Focus -> r
Input -> r
Invalid -> r
Keydown -> r
Keypress -> r
Keyup -> r
Load -> r
Mousedown -> r
Mouseenter -> r
Mouseleave -> r
Mousemove -> r
Mouseout -> r
Mouseover -> r
Mouseup -> r
Mousewheel -> r
Scroll -> r
Select -> r
Submit -> r
Wheel -> r
Beforecut -> r
Cut -> r
Beforecopy -> r
Copy -> r
Beforepaste -> r
Paste -> r
Reset -> r
Search -> r
Selectstart -> r
Touchstart -> r
Touchmove -> r
Touchend -> r
Touchcancel -> r
--TODO: We should allow the user to specify an ordering instead of relying on the ordering of the Map
-- | Create a dropdown box
-- The first argument gives the initial value of the dropdown; if it is not present in the map of options provided, it will be added with an empty string as its text
dropdown :: forall k t m. (DomBuilder t m, MonadFix m, MonadHold t m, PostBuild t m, Ord k) => k -> Dynamic t (Map k Text) -> DropdownConfig t k -> m (Dropdown t k)
dropdown k0 options (DropdownConfig setK attrs) = do
optionsWithAddedKeys <- fmap (zipDynWith Map.union options) $ foldDyn Map.union (k0 =: "") $ fmap (=: "") setK
defaultKey <- holdDyn k0 setK
let (indexedOptions, ixKeys) = splitDynPure $ ffor optionsWithAddedKeys $ \os ->
let xs = fmap (\(ix, (k, v)) -> ((ix, k), ((ix, k), v))) $ zip [0::Int ..] $ Map.toList os
in (Map.fromList $ map snd xs, Bimap.fromList $ map fst xs)
modifyAttrs <- dynamicAttributesToModifyAttributes attrs
let cfg = def
& selectElementConfig_elementConfig . elementConfig_modifyAttributes .~ fmap mapKeysToAttributeName modifyAttrs
& selectElementConfig_setValue .~ fmap (T.pack . show) (attachPromptlyDynWithMaybe (flip Bimap.lookupR) ixKeys setK)
(eRaw, _) <- selectElement cfg $ listWithKey indexedOptions $ \(ix, k) v -> do
let optionAttrs = fmap (\dk -> "value" =: T.pack (show ix) <> if dk == k then "selected" =: "selected" else mempty) defaultKey
elDynAttr "option" optionAttrs $ dynText v
let lookupSelected ks v = do
key <- T.readMaybe $ T.unpack v
Bimap.lookup key ks
let eChange = attachPromptlyDynWith lookupSelected ixKeys $ _selectElement_change eRaw
let readKey keys mk = fromMaybe k0 $ do
k <- mk
guard $ Bimap.memberR k keys
return k
dValue <- fmap (zipDynWith readKey ixKeys) $ holdDyn (Just k0) $ leftmost [eChange, fmap Just setK]
return $ Dropdown dValue (attachPromptlyDynWith readKey ixKeys eChange)
concat <$> mapM makeLenses
[ ''TextAreaConfig
, ''TextArea
, ''TextInputConfig
, ''TextInput
, ''RangeInputConfig
, ''RangeInput
, ''FileInputConfig
, ''FileInput
, ''DropdownConfig
, ''Dropdown
, ''CheckboxConfig
, ''Checkbox
]
instance HasAttributes (TextAreaConfig t) where
type Attrs (TextAreaConfig t) = Dynamic t (Map Text Text)
attributes = textAreaConfig_attributes
instance HasAttributes (TextInputConfig t) where
type Attrs (TextInputConfig t) = Dynamic t (Map Text Text)
attributes = textInputConfig_attributes
instance HasAttributes (RangeInputConfig t) where
type Attrs (RangeInputConfig t) = Dynamic t (Map Text Text)
attributes = rangeInputConfig_attributes
instance HasAttributes (DropdownConfig t k) where
type Attrs (DropdownConfig t k) = Dynamic t (Map Text Text)
attributes = dropdownConfig_attributes
instance HasAttributes (CheckboxConfig t) where
type Attrs (CheckboxConfig t) = Dynamic t (Map Text Text)
attributes = checkboxConfig_attributes
instance HasAttributes (FileInputConfig t) where
type Attrs (FileInputConfig t) = Dynamic t (Map Text Text)
attributes = fileInputConfig_attributes
class HasSetValue a where
type SetValue a :: *
setValue :: Lens' a (SetValue a)
instance HasSetValue (TextAreaConfig t) where
type SetValue (TextAreaConfig t) = Event t Text
setValue = textAreaConfig_setValue
instance HasSetValue (TextInputConfig t) where
type SetValue (TextInputConfig t) = Event t Text
setValue = textInputConfig_setValue
instance HasSetValue (RangeInputConfig t) where
type SetValue (RangeInputConfig t) = Event t Float
setValue = rangeInputConfig_setValue
instance HasSetValue (DropdownConfig t k) where
type SetValue (DropdownConfig t k) = Event t k
setValue = dropdownConfig_setValue
instance HasSetValue (CheckboxConfig t) where
type SetValue (CheckboxConfig t) = Event t Bool
setValue = checkboxConfig_setValue
class HasValue a where
type Value a :: *
value :: a -> Value a
instance HasValue (InputElement er d t) where
type Value (InputElement er d t) = Dynamic t Text
value = _inputElement_value
instance HasValue (TextAreaElement er d t) where
type Value (TextAreaElement er d t) = Dynamic t Text
value = _textAreaElement_value
instance HasValue (TextArea t) where
type Value (TextArea t) = Dynamic t Text
value = _textArea_value
instance HasValue (TextInput t) where
type Value (TextInput t) = Dynamic t Text
value = _textInput_value
instance HasValue (FileInput d t) where
type Value (FileInput d t) = Dynamic t [File]
value = _fileInput_value
instance HasValue (Dropdown t k) where
type Value (Dropdown t k) = Dynamic t k
value = _dropdown_value
instance HasValue (Checkbox t) where
type Value (Checkbox t) = Dynamic t Bool
value = _checkbox_value
{-
type family Controller sm t a where
Controller Edit t a = (a, Event t a) -- Initial value and setter
Controller View t a = Dynamic t a -- Value (always)
type family Output sm t a where
Output Edit t a = Dynamic t a -- Value (always)
Output View t a = Event t a -- Requested changes
data CheckboxConfig sm t
= CheckboxConfig { _checkbox_input :: Controller sm t Bool
, _checkbox_attributes :: Attributes
}
instance Reflex t => Default (CheckboxConfig Edit t) where
def = CheckboxConfig (False, never) mempty
data Checkbox sm t
= Checkbox { _checkbox_output :: Output sm t Bool
}
data StateMode = Edit | View
--TODO: There must be a more generic way to get this witness and allow us to case on the type-level StateMode
data StateModeWitness (sm :: StateMode) where
EditWitness :: StateModeWitness Edit
ViewWitness :: StateModeWitness View
class HasStateModeWitness (sm :: StateMode) where
stateModeWitness :: StateModeWitness sm
instance HasStateModeWitness Edit where
stateModeWitness = EditWitness
instance HasStateModeWitness View where
stateModeWitness = ViewWitness
-}
| manyoo/reflex-dom | src/Reflex/Dom/Widget/Input.hs | bsd-3-clause | 22,415 | 0 | 25 | 4,975 | 5,569 | 2,908 | 2,661 | -1 | -1 |
{-# LANGUAGE CPP #-}
-- #define DEBUG
{-|
Module : AERN2.Poly.Cheb.Tests
Description : Tests for Chebyshev-basis polynomials
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
Tests for Chebyshev-basis polynomials
To run the tests using stack, execute:
@
stack test aern2-fun --test-arguments "-a 100 -m ChPoly"
@
-}
module AERN2.PPoly.Tests
-- (
-- specChPoly, tPPoly
-- , ChPolyConstruction(..)
-- , chPolyFromOps
-- , chPolyFromOpsWithDeg
-- , arbitraryWithMinOpsDom
-- , arbitraryWithDegDom
-- , makeFnPositive
-- , makeFnSmallRange
-- , makeFnPositiveSmallRange
-- )
where
#ifdef DEBUG
import Debug.Trace (trace)
#define maybeTrace trace
#else
#define maybeTrace (\ (_ :: String) t -> t)
#endif
import MixedTypesNumPrelude
-- import qualified Prelude as P
-- import Data.Ratio
import Text.Printf
-- import qualified Data.Set as Set
import Test.Hspec
import Test.QuickCheck
-- import qualified Test.Hspec.SmallCheck as SC
--
import AERN2.MP
import AERN2.MP.Dyadic
import AERN2.MP.Ball.Tests
import AERN2.Interval
import AERN2.RealFun.Operations
import AERN2.RealFun.Tests
-- import AERN2.RealFun.SineCosine (sineWithAccuracyGuide)
-- import AERN2.Poly.Basics
import qualified AERN2.Poly.Cheb as ChPoly
import AERN2.PPoly.Type
import AERN2.PPoly.Eval ()
import AERN2.PPoly.Division
import AERN2.PPoly.Maximum (minimumOptimisedWithAccuracy, maximumOptimisedWithAccuracy)
data PPolyConstruction =
PPolyConstruction
{ ppConstr_acGuide :: Accuracy
, ppConstr_dom :: DyadicInterval
, ppConstr_i0 :: FnIndex
, ppConstr_opIndices :: [(OpIndex, [FnIndex])]
}
deriving (Show)
pPolyFromOps :: PPolyConstruction -> PPoly
pPolyFromOps (PPolyConstruction acGuide dom i0 opIndices) =
applyOps opIndices (centreAsBall $ fns !! i0)
where
fns = map snd $ basicFunctions (dom, acGuide)
applyOps [] fn = fn
applyOps ((opIndex, operandIndices):rest) fn =
applyOps rest newFn
where
(_arity, opList) = operations !! opIndex
operands = map (fns !!) operandIndices
newFn = centreAsBall $ liftCheb2PPoly (reduceSizeUsingAccuracyGuide acGuide) $ opList (fn : operands)
pPolyFromOpsWithDeg :: Integer -> PPolyConstruction -> (PPoly, PPolyConstruction)
pPolyFromOpsWithDeg deg (PPolyConstruction acGuide dom i0 opIndices) =
applyOps [] opIndices (centreAsBall $ fns !! i0)
where
fns = map snd $ basicFunctions (dom, acGuide)
applyOps usedOpIndices [] fn =
(fn, PPolyConstruction acGuide dom i0 (reverse usedOpIndices))
applyOps usedOpIndices ((opIndex, operandIndices):rest) fn
| maximum (map (ChPoly.degree . centre . snd) (ppoly_pieces fn)) >= deg =
(fn, PPolyConstruction acGuide dom i0 (reverse usedOpIndices))
| otherwise =
applyOps ((opIndex, operandIndices):usedOpIndices) rest newFn
where
(_arity, opList) = operations !! opIndex
operands = map (fns !!) operandIndices
newFn = centreAsBall $ liftCheb2PPoly (reduceSizeUsingAccuracyGuide acGuide) $ opList (fn : operands)
type OpIndex = Integer
type Arity = Integer
operations :: [(Arity, [PPoly] -> PPoly)]
operations =
[op2 (+), op2 (-), op2 (*), op1 recipShift, (1, addBreak)]
where
op1 op = (1, \[e] -> op e)
op2 op = (2, \[e1,e2] -> op e1 e2)
acGuide = bits 10
recipShift p = inverseWithAccuracy acGuide (p - lb + 1)
where
lb :: MPBall
(lb, _) =
endpointsAsIntervals $
-- minimumOverDom p (getDomain p)
minimumOptimisedWithAccuracy p (mpBall l) (mpBall r) 5 5 acGuide
where
(Interval l r) = getDomain p
addBreak [p] =
-- Force a break point in the partition by adding a piecewise constant 0:
p + (linearPolygonI [(dyadic $ -1,mpBall 0),(x,mpBall 0),(dyadic 1,mpBall 0)] dom acGuide)
where
dom = getDomain p
Interval rl ru = applyApprox p dom
rlA = abs rl
ruA = abs ru
x
| rlA == 0 || ruA == 0 = dyadic 0
| otherwise = centre $ (ruA - rlA) /! (mpBall $ rlA + ruA)
-- x is an approximate average of dom endpoints, weighted by the range endpoints.
-- This definitoin is deliberately rather arbitrary to achieve a high variation.
addBreak _ = error "addBreak used with wrong arity"
type FnIndex = Integer
type Frequency = Integer
basicFunctions :: (DyadicInterval, Accuracy) -> [(Frequency, PPoly)]
basicFunctions domAcc = [(10,x), (1, c 0.5), (1, c 2), (1, c 100), (1, c (0.5^!20))]
where
x = fromPoly $ varFn domAcc ()
c :: (CanBeDyadic t) => t -> PPoly
c n = fromPoly $ constFn domAcc (dyadic n)
instance HasDomain PPolyConstruction where
type Domain PPolyConstruction = DyadicInterval
getDomain = ppConstr_dom
instance
-- (Arbitrary c, IsBall c, Show c) => Arbitrary (ChPolyConstruction c)
Arbitrary PPolyConstruction
where
arbitrary =
arbitraryWithDom =<< arbitraryNonEmptySmallInterval
--arbitraryWithDom =<< return (dyadicInterval (-1.0,1.0))
instance
-- (Arbitrary c, IsBall c, Show c) => ArbitraryWithDom (ChPolyConstruction c)
ArbitraryWithDom (PPolyConstruction)
where
arbitraryWithDom = arbitraryWithMinOpsDom 0
arbitraryWithMinOpsDom :: Integer -> DyadicInterval -> Gen PPolyConstruction
arbitraryWithMinOpsDom minOps dom =
sized withSize
where
withSize size =
do
numOfOps <- growingElements [minOps..(minOps+10+size)]
ops <- vectorOf (int numOfOps) (elements opIndicesArities)
fn0 <- elementsWeighted fnIndices
opIndices <- mapM addOperands ops
return $ PPolyConstruction acGuide dom fn0 opIndices
where
opIndicesArities = zip [0..] $ map fst operations
fnIndices = map (\(i,(n,_)) -> (n,i)) $ zip [0..] $ basicFunctions (dom, acGuide)
elementsWeighted es = frequency $ map (\(n,e) -> (int n, return e)) es
acGuide = bits $ 10 + size
addOperands (i, arity) =
do
operandIndices <- mapM getOperandIndex [2..arity]
return (i, operandIndices)
where
getOperandIndex _ = elementsWeighted fnIndices
arbitraryWithDegDom :: Integer -> DyadicInterval -> Gen (PPoly, PPolyConstruction)
arbitraryWithDegDom deg dom =
sized withSize
where
withSize size =
do
ops <- infiniteListOf (elements opIndicesArities)
fn0 <- elementsWeighted fnIndices
opIndices <- mapM addOperands ops
return $ pPolyFromOpsWithDeg deg $ PPolyConstruction acGuide dom fn0 opIndices
where
opIndicesArities = zip [0..] $ map fst operations
fnIndices = map (\(i,(n,_)) -> (n,i)) $ zip [0..] $ basicFunctions (dom, acGuide)
elementsWeighted es = frequency $ map (\(n,e) -> (int n, return e)) es
acGuide = bits $ 100 + size
addOperands (i, arity) =
do
operandIndices <- mapM getOperandIndex [2..arity]
return (i, operandIndices)
where
getOperandIndex _ = elementsWeighted fnIndices
instance
Arbitrary (FnAndDescr PPoly)
where
arbitrary =
do
constr <- arbitrary
return $ FnAndDescr (pPolyFromOps constr) (show constr)
instance
ArbitraryWithDom (FnAndDescr PPoly)
where
arbitraryWithDom dom =
do
constr <- arbitraryWithDom dom
return $ FnAndDescr (pPolyFromOps constr) (show constr)
instance Arbitrary PPoly where
arbitrary =
do
(FnAndDescr f _) <- arbitrary
return f
{-|
A runtime representative of type @ChPoly MPBall@.
Used for specialising polymorphic tests to concrete types.
-}
tPPoly :: T PPoly
tPPoly = T "PPoly"
anyFn :: FnAndDescr PPoly -> FnAndDescr PPoly
anyFn = id
makeFnPositive :: FnAndDescr PPoly -> FnAndDescr PPoly
makeFnPositive (FnAndDescr p pDescr) =
FnAndDescr res $ "makeFnPositive (" ++ pDescr ++ ")"
where
res
| lb !>! 0 = p
| otherwise = centreAsBall $ p - lb + 1
Interval l r = getDomain p
lb :: MPBall
(lb, _) = endpointsAsIntervals $ minimumOptimisedWithAccuracy p (mpBall l) (mpBall r) 5 5 (bits 0)
makeFnSmallRange :: Integer -> FnAndDescr PPoly -> FnAndDescr PPoly
makeFnSmallRange limit (FnAndDescr p pDescr) =
maybeTrace (printf "makeFnSmallRange: p = %s" (show p)) $
maybeTrace (printf "makeFnSmallRange: p construction = %s" pDescr) $
maybeTrace (printf "makeFnSmallRange: radius p = %s" (show (radius p))) $
maybeTrace (printf "makeFnSmallRange: lb = %s" (show lb)) $
maybeTrace (printf "makeFnSmallRange: ub = %s" (show ub)) $
FnAndDescr res $ "makeFnSmallRange " ++ show limit ++ " (" ++ pDescr ++ ")"
where
res
| b !<! limit = p
| otherwise = centreAsBall $ (limit * p /! b)
b = ub `max` (-lb)
lb, ub :: MPBall
-- (lb, _) = endpoints $ minimumOverDom p (getDomain p)
-- (_, ub) = endpoints $ maximumOverDom p (getDomain p)
(lb, _) = endpointsAsIntervals $ minimumOptimisedWithAccuracy p (mpBall l) (mpBall r) 5 5 (bits 0)
(_, ub) = endpointsAsIntervals $ maximumOptimisedWithAccuracy p (mpBall l) (mpBall r) 5 5 (bits 0)
Interval l r = getDomain p
makeFnPositiveSmallRange :: Integer -> FnAndDescr PPoly -> FnAndDescr PPoly
makeFnPositiveSmallRange limit (FnAndDescr p pDescr) =
FnAndDescr res $ "makeFnPositiveSmallRange " ++ show limit ++ " (" ++ pDescr ++ ")"
where
res
| 1 !<=! lb && ub !<! limit = p
| b !<! limit = p - lb + 1
| otherwise = centreAsBall $ (1 - lb + (limit * p /! b))
b = ub `max` (-lb)
lb, ub :: MPBall
-- (lb, _) = endpoints $ minimumOverDom p (getDomain p)
-- (_, ub) = endpoints $ maximumOverDom p (getDomain p)
(lb, _) = endpointsAsIntervals $ minimumOptimisedWithAccuracy p (mpBall l) (mpBall r) 5 5 (bits 0)
(_, ub) = endpointsAsIntervals $ maximumOptimisedWithAccuracy p (mpBall l) (mpBall r) 5 5 (bits 0)
Interval l r = getDomain p
-- precondAnyT :: t -> Bool
-- precondAnyT _t = True
--
-- precondNonZeroT :: (HasEqCertainly t Integer) => t -> Bool
-- precondNonZeroT t = t !/=! 0
--
-- precondSmallT :: (HasOrderCertainly t Integer) => t -> Bool
-- precondSmallT t = -1000 !<=! t && t !<=! 1000
specChPoly :: Spec
specChPoly =
describe ("ChPoly") $ do
-- describe "evaluation" $ do
-- specEvalConstFn tMPBall tPPoly tMPBall
-- specEvalUnaryVarFn tPPoly tMPBall
describe "ring" $ do
specFnPointwiseOp2 tPPoly tMPBall "+" (+) (+) anyFn anyFn
specFnPointwiseOp2 tPPoly tMPBall "-" (-) (-) anyFn anyFn
specFnPointwiseOp2 tPPoly tMPBall "*" (*) (*) anyFn anyFn
describe "field" $ do
specFnPointwiseOp1 tPPoly tMPBall "1/" (inverseWithAccuracy (bits 0)) (1/!) (makeFnPositiveSmallRange 100)
-- describe "size reduction" $ do
-- specFnPointwiseOp1 tPPoly tMPBall "reduce size (bits=10)" (reduceSizeUsingAccuracyGuide (bits 10)) id anyFn
-- specFnPointwiseOp1 tPPoly tMPBall "reduce size (bits=0)" (reduceSizeUsingAccuracyGuide (bits 0)) id anyFn
-- specCanReduceSizeUsingAccuracyGuide tPPoly
-- describe "range" $ do
-- specCanMaximiseOverDom tPPoly tMPBall
-- describe "trigonometric" $ do
-- specFnPointwiseOp1 tPPoly tMPBall "sine" (sineWithAccuracyGuide (bits 10)) (sin) (makeFnSmallRange 10)
generate :: IO ()
generate =
do
fns <- (sample' arbitrary :: IO [PPolyConstruction])
mapM_ putStrLn $ concat $ map (\fnC -> [show fnC, show (pPolyFromOps fnC)]) fns
test1 =
pPolyFromOps $
PPolyConstruction
{ppConstr_acGuide = bits 18, ppConstr_dom = Interval (dyadic (-1)) (dyadic 1),
ppConstr_i0 = 0, -- x
ppConstr_opIndices = [(2,[0]),(4,[])]} -- addBreak(x^2)
{- recent issues:
-}
{- a template for probing bugs:
generate :: IO ()
generate =
do
fns <- (sample' arbitrary :: IO [PPolyConstruction])
mapM_ putStrLn $ concat $ map (\fnC -> [show fnC, show (pPolyFromOps fnC)]) fns
dom1 = Interval (dyadic 100663296) (dyadic 100663299)
p1D =
PPolyConstruction {ppConstr_acGuide = bits 12, ppConstr_dom = dom1,
ppConstr_i0 = 0, ppConstr_opIndices = [(0,[3]),(0,[0])]}
p1 = chPolyFromOps p1D
p1FD = FnAndDescr p1 (show p1D)
p2D =
PPolyConstruction {ppConstr_acGuide = bits 30, ppConstr_dom = dom12,
ppConstr_i0 = 4,
ppConstr_opIndices = [(2,[0]),(0,[0]),(0,[0]),(0,[0]),(0,[0]),(2,[0]),(2,[0]),(2,[0]),(0,[0]),(0,[0]),(2,[0]),(1,[0]),(1,[0]),(2,[1]),(0,[0]),(1,[0]),(0,[0]),(2,[4]),(0,[0]),(2,[0]),(2,[0])]}
p2 = chPolyFromOps p2D
p2FD = FnAndDescr p2 (show p2D)
FnAndDescr p2sm _ = makeFnPositiveSmallRange 100 p2FD
p1Divp2sm = chebDivideDCT (bits 0) p1 p2sm
-- pt = (mpBall 1104) + (mpBall )
-}
| michalkonecny/aern2 | aern2-fun-univariate/src/AERN2/PPoly/Tests.hs | bsd-3-clause | 12,389 | 0 | 20 | 2,534 | 3,203 | 1,703 | 1,500 | -1 | -1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE OverloadedStrings #-}
module Develop.Generate.Help
( makeHtml
, makeCodeHtml
, makeElmHtml
)
where
import Text.Blaze.Html5 ((!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import qualified Develop.StaticFiles as StaticFiles
-- PAGES
makeHtml :: String -> String -> String -> H.Html
makeHtml title jsFile initCode =
H.docTypeHtml $ do
H.head $ do
H.meta ! A.charset "UTF-8"
H.title $ H.toHtml title
H.link
! A.type_ "text/css"
! A.rel "stylesheet"
! A.href (H.toValue ("/" ++ StaticFiles.cssPath))
H.script ! A.src (H.toValue jsFile) $ ""
H.body $ do
H.script $ H.preEscapedToMarkup initCode
-- CODE
makeCodeHtml :: String -> String -> H.Html
makeCodeHtml title code =
H.docTypeHtml $ do
H.head $ do
H.meta ! A.charset "UTF-8"
H.title $ H.toHtml title
H.style ! A.type_ "text/css" $ codeStyle
H.link ! A.rel "stylesheet" ! A.href "//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.3.0/styles/default.min.css"
H.script ! A.src "//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.3.0/highlight.min.js" $ ""
H.script $ "if (hljs) { hljs.initHighlightingOnLoad(); }"
H.body ! A.style "background-color: #F0F0F0;" $ do
H.pre $ H.code $ H.toHtml code
codeStyle :: H.Html
codeStyle =
H.toHtml $ unlines $
[ "html, head, body, pre {"
, " margin: 0;"
, " height: 100%;"
, "}"
, "body {"
, " font-family: 'Source Code Pro', monospace;"
, "}"
]
-- ELM CODE
makeElmHtml :: FilePath -> H.Html
makeElmHtml filePath =
H.docTypeHtml $ do
H.head $ do
H.meta ! A.charset "UTF-8"
H.title $ H.toHtml ("~/" ++ filePath)
H.style ! A.type_ "text/css" $ elmStyle
H.body $ do
H.div ! A.style waitingStyle $ do
H.div ! A.style "font-size: 3em;" $ "Building your project!"
H.img ! A.src (H.toValue StaticFiles.waitingPath)
H.div ! A.style "font-size: 1em" $ "With new projects, I need a bunch of extra time to download packages."
H.script ! A.src (H.toValue ("/_compile/" ++ filePath)) ! A.charset "utf-8" $ ""
H.script $ H.preEscapedToMarkup $ unlines $
[ "while (document.body.firstChild) {"
, " document.body.removeChild(document.body.firstChild);"
, "}"
, "runElmProgram();"
]
elmStyle :: H.Html
elmStyle =
H.toHtml $ unlines $
[ "@import url(http://fonts.googleapis.com/css?family=Source+Sans+Pro);"
, "html, head, body {"
, " margin: 0;"
, " height: 100%;"
, "}"
]
waitingStyle :: H.AttributeValue
waitingStyle =
H.stringValue $
"width: 100%; height: 100%; display: flex; flex-direction: column;"
++ " justify-content: center; align-items: center; color: #9A9A9A;"
++ " font-family: 'Source Sans Pro';"
| evancz/cli | src/Develop/Generate/Help.hs | bsd-3-clause | 2,893 | 0 | 18 | 691 | 762 | 385 | 377 | 77 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Dimensions.DE
( allDimensions
) where
import Duckling.Dimensions.Types
allDimensions :: [Seal Dimension]
allDimensions =
[ Seal Distance
, Seal Duration
, Seal Numeral
, Seal Ordinal
, Seal Time
, Seal Volume
]
| facebookincubator/duckling | Duckling/Dimensions/DE.hs | bsd-3-clause | 454 | 0 | 6 | 90 | 75 | 44 | 31 | 11 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiWayIf #-}
module Game.Monsters.MFlyer where
import Control.Lens (use, preuse, ix, zoom, (^.), (.=), (%=), (&), (.~), (%~))
import Control.Monad (when, unless, liftM, void)
import Data.Bits ((.&.))
import Data.Char (toLower)
import Linear (V3(..), _x, _z)
import qualified Data.ByteString.Char8 as BC
import qualified Data.Vector as V
import {-# SOURCE #-} Game.GameImportT
import Game.LevelLocalsT
import Game.GameLocalsT
import Game.CVarT
import Game.SpawnTempT
import Game.EntityStateT
import Game.EdictT
import Game.GClientT
import Game.MoveInfoT
import Game.ClientPersistantT
import Game.ClientRespawnT
import Game.MonsterInfoT
import Game.PlayerStateT
import Types
import QuakeRef
import QuakeState
import CVarVariables
import Game.Adapters
import qualified Constants
import qualified Game.GameAI as GameAI
import qualified Game.GameMisc as GameMisc
import qualified Game.GameWeapon as GameWeapon
import qualified Game.GameUtil as GameUtil
import qualified Game.Monster as Monster
import qualified Game.Monsters.MFlash as MFlash
import qualified Util.Lib as Lib
import qualified Util.Math3D as Math3D
modelScale :: Float
modelScale = 1.0
actionAttack1 :: Int
actionAttack1 = 1
actionAttack2 :: Int
actionAttack2 = 2
actionRun :: Int
actionRun = 3
frameStart01 :: Int
frameStart01 = 0
frameStart06 :: Int
frameStart06 = 5
frameStop01 :: Int
frameStop01 = 6
frameStop07 :: Int
frameStop07 = 12
frameStand01 :: Int
frameStand01 = 13
frameStand45 :: Int
frameStand45 = 57
frameAttack101 :: Int
frameAttack101 = 58
frameAttack106 :: Int
frameAttack106 = 63
frameAttack107 :: Int
frameAttack107 = 64
frameAttack118 :: Int
frameAttack118 = 75
frameAttack119 :: Int
frameAttack119 = 76
frameAttack121 :: Int
frameAttack121 = 78
frameAttack201 :: Int
frameAttack201 = 79
frameAttack204 :: Int
frameAttack204 = 82
frameAttack207 :: Int
frameAttack207 = 85
frameAttack210 :: Int
frameAttack210 = 88
frameAttack217 :: Int
frameAttack217 = 95
frameBankLeft01 :: Int
frameBankLeft01 = 96
frameBankLeft07 :: Int
frameBankLeft07 = 102
frameBankRight01 :: Int
frameBankRight01 = 103
frameBankRight07 :: Int
frameBankRight07 = 109
frameRollLeft01 :: Int
frameRollLeft01 = 110
frameRollLeft09 :: Int
frameRollLeft09 = 118
farmeRollRight01 :: Int
farmeRollRight01 = 119
frameRollRight09 :: Int
frameRollRight09 = 127
frameDefense01 :: Int
frameDefense01 = 128
frameDefense06 :: Int
frameDefense06 = 133
framePain101 :: Int
framePain101 = 134
framePain109 :: Int
framePain109 = 142
framePain201 :: Int
framePain201 = 143
framePain204 :: Int
framePain204 = 146
framePain301 :: Int
framePain301 = 147
framePain304 :: Int
framePain304 = 150
flyerSight :: EntInteract
flyerSight =
GenericEntInteract "flyer_sight" $ \selfRef _ -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundSight <- use $ mFlyerGlobals.mFlyerSoundSight
sound (Just selfRef) Constants.chanVoice soundSight 1 Constants.attnNorm 0
return True
flyerIdle :: EntThink
flyerIdle =
GenericEntThink "flyer_idle" $ \selfRef -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundIdle <- use $ mFlyerGlobals.mFlyerSoundIdle
sound (Just selfRef) Constants.chanVoice soundIdle 1 Constants.attnIdle 0
return True
flyerPopBlades :: EntThink
flyerPopBlades =
GenericEntThink "flyer_pop_blades" $ \selfRef -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundSproing <- use $ mFlyerGlobals.mFlyerSoundSproing
sound (Just selfRef) Constants.chanVoice soundSproing 1 Constants.attnNorm 0
return True
flyerFramesStand :: V.Vector MFrameT
flyerFramesStand =
V.fromList [ MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
, MFrameT (Just GameAI.aiStand) 0 Nothing
]
flyerMoveStand :: MMoveT
flyerMoveStand = MMoveT "flyerMoveStand" frameStand01 frameStand45 flyerFramesStand Nothing
flyerFramesWalk :: V.Vector MFrameT
flyerFramesWalk =
V.fromList [ MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
, MFrameT (Just GameAI.aiWalk) 5 Nothing
]
flyerMoveWalk :: MMoveT
flyerMoveWalk = MMoveT "flyerMoveWalk" frameStand01 frameStand45 flyerFramesWalk Nothing
flyerFramesRun :: V.Vector MFrameT
flyerFramesRun =
V.fromList [ MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
, MFrameT (Just GameAI.aiRun) 10 Nothing
]
flyerMoveRun :: MMoveT
flyerMoveRun = MMoveT "flyerMoveRun" frameStand01 frameStand45 flyerFramesRun Nothing
flyerRun :: EntThink
flyerRun =
GenericEntThink "flyer_run" $ \selfRef -> do
self <- readRef selfRef
let action = if (self^.eMonsterInfo.miAIFlags) .&. Constants.aiStandGround /= 0
then flyerMoveStand
else flyerMoveRun
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just action)
return True
flyerWalk :: EntThink
flyerWalk =
GenericEntThink "flyer_walk" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveWalk)
return True
flyerStand :: EntThink
flyerStand =
GenericEntThink "flyer_stand" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveStand)
return True
flyerNextMove :: EntThink
flyerNextMove =
GenericEntThink "flyer_nextmove" $ \selfRef -> do
nextMove <- use $ mFlyerGlobals.mFlyerNextMove
if | nextMove == actionAttack1 ->
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveStartMelee)
| nextMove == actionAttack2 ->
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveAttack2)
| nextMove == actionRun ->
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveRun)
| otherwise ->
return ()
return True
flyerFramesStart :: V.Vector MFrameT
flyerFramesStart =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 (Just flyerNextMove)
]
flyerMoveStart :: MMoveT
flyerMoveStart = MMoveT "flyerMoveStart" frameStart01 frameStart06 flyerFramesStart Nothing
flyerFramesStop :: V.Vector MFrameT
flyerFramesStop =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 (Just flyerNextMove)
]
flyerMoveStop :: MMoveT
flyerMoveStop = MMoveT "flyerMoveStop" frameStop01 frameStop07 flyerFramesStop Nothing
flyerStop :: EntThink
flyerStop =
GenericEntThink "flyer_stop" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveStop)
return True
flyerStart :: EntThink
flyerStart =
GenericEntThink "flyer_start" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveStart)
return True
flyerFramesRollRight :: V.Vector MFrameT
flyerFramesRollRight =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
flyerMoveRollRight :: MMoveT
flyerMoveRollRight = MMoveT "flyerMoveRollRight" farmeRollRight01 frameRollRight09 flyerFramesRollRight Nothing
flyerFramesRollLeft :: V.Vector MFrameT
flyerFramesRollLeft =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
flyerMoveRollLeft :: MMoveT
flyerMoveRollLeft = MMoveT "flyerMoveRollLeft" frameRollLeft01 frameRollLeft09 flyerFramesRollLeft Nothing
flyerFramesPain3 :: V.Vector MFrameT
flyerFramesPain3 =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
flyerMovePain3 :: MMoveT
flyerMovePain3 = MMoveT "flyerMovePain3" framePain301 framePain304 flyerFramesPain3 (Just flyerRun)
flyerFramesPain2 :: V.Vector MFrameT
flyerFramesPain2 =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
flyerMovePain2 :: MMoveT
flyerMovePain2 = MMoveT "flyerMovePain2" framePain201 framePain204 flyerFramesPain2 (Just flyerRun)
flyerFramesPain1 :: V.Vector MFrameT
flyerFramesPain1 =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
flyerMovePain1 :: MMoveT
flyerMovePain1 = MMoveT "flyerMovePain1" framePain101 framePain109 flyerFramesPain1 (Just flyerRun)
flyerFramesDefense :: V.Vector MFrameT
flyerFramesDefense =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
-- Hold this frame
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
flyerMoveDefense :: MMoveT
flyerMoveDefense = MMoveT "flyerMoveDefense" frameDefense01 frameDefense06 flyerFramesDefense Nothing
flyerFramesBankRight :: V.Vector MFrameT
flyerFramesBankRight =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
flyerMoveBankRight :: MMoveT
flyerMoveBankRight = MMoveT "flyerMoveBankRight" frameBankRight01 frameBankRight07 flyerFramesBankRight Nothing
flyerFramesBankLeft :: V.Vector MFrameT
flyerFramesBankLeft =
V.fromList [ MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
, MFrameT (Just GameAI.aiMove) 0 Nothing
]
flyerMoveBankLeft :: MMoveT
flyerMoveBankLeft = MMoveT "flyerMoveBankLeft" frameBankLeft01 frameBankLeft07 flyerFramesBankLeft Nothing
flyerFireLeft :: EntThink
flyerFireLeft =
GenericEntThink "flyer_fireleft" $ \selfRef -> do
flyerFire selfRef Constants.mz2FlyerBlaster1
return True
flyerFireRight :: EntThink
flyerFireRight =
GenericEntThink "flyer_fireright" $ \selfRef -> do
flyerFire selfRef Constants.mz2FlyerBlaster2
return True
flyerFramesAttack2 :: V.Vector MFrameT
flyerFramesAttack2 =
V.fromList [ MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) (-10) (Just flyerFireLeft) -- left gun
, MFrameT (Just GameAI.aiCharge) (-10) (Just flyerFireRight) -- right gun
, MFrameT (Just GameAI.aiCharge) (-10) (Just flyerFireLeft) -- left gun
, MFrameT (Just GameAI.aiCharge) (-10) (Just flyerFireRight) -- right gun
, MFrameT (Just GameAI.aiCharge) (-10) (Just flyerFireLeft) -- left gun
, MFrameT (Just GameAI.aiCharge) (-10) (Just flyerFireRight) -- right gun
, MFrameT (Just GameAI.aiCharge) (-10) (Just flyerFireLeft) -- left gun
, MFrameT (Just GameAI.aiCharge) (-10) (Just flyerFireRight) -- right gun
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
]
flyerMoveAttack2 :: MMoveT
flyerMoveAttack2 = MMoveT "flyerMoveAttack2" frameAttack201 frameAttack217 flyerFramesAttack2 (Just flyerRun)
flyerSlashLeft :: EntThink
flyerSlashLeft =
GenericEntThink "flyer_slash_left" $ \selfRef -> do
self <- readRef selfRef
let aim = V3 (fromIntegral Constants.meleeDistance) (self^.eMins._x) 0
GameWeapon.fireHit selfRef aim 5 0
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundSlash <- use $ mFlyerGlobals.mFlyerSoundSlash
sound (Just selfRef) Constants.chanWeapon soundSlash 1 Constants.attnNorm 0
return True
flyerSlashRight :: EntThink
flyerSlashRight =
GenericEntThink "flyer_slash_right" $ \selfRef -> do
self <- readRef selfRef
let aim = V3 (fromIntegral Constants.meleeDistance) (self^.eMaxs._x) 0
GameWeapon.fireHit selfRef aim 5 0
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundSlash <- use $ mFlyerGlobals.mFlyerSoundSlash
sound (Just selfRef) Constants.chanWeapon soundSlash 1 Constants.attnNorm 0
return True
flyerLoopMelee :: EntThink
flyerLoopMelee =
GenericEntThink "flyer_loop_melee" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveLoopMelee)
return True
flyerFramesStartMelee :: V.Vector MFrameT
flyerFramesStartMelee =
V.fromList [ MFrameT (Just GameAI.aiCharge) 0 (Just flyerPopBlades)
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
]
flyerMoveStartMelee :: MMoveT
flyerMoveStartMelee = MMoveT "flyerMoveStartMelee" frameAttack101 frameAttack106 flyerFramesStartMelee (Just flyerLoopMelee)
flyerFramesEndMelee :: V.Vector MFrameT
flyerFramesEndMelee =
V.fromList [ MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
]
flyerMoveEndMelee :: MMoveT
flyerMoveEndMelee = MMoveT "flyerMoveEndMelee" frameAttack119 frameAttack121 flyerFramesEndMelee (Just flyerRun)
flyerFramesLoopMelee :: V.Vector MFrameT
flyerFramesLoopMelee =
V.fromList [ MFrameT (Just GameAI.aiCharge) 0 Nothing -- Loop Start
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 (Just flyerSlashLeft) -- Left Wing Strike
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 (Just flyerSlashRight) -- Right Wing Strike
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing
, MFrameT (Just GameAI.aiCharge) 0 Nothing -- Loop Ends
]
flyerCheckMelee :: EntThink
flyerCheckMelee =
GenericEntThink "flyer_check_melee" $ \selfRef -> do
self <- readRef selfRef
let Just enemyRef = self^.eEnemy
enemy <- readRef enemyRef
r <- Lib.randomF
let currentMove = if GameUtil.range self enemy == Constants.rangeMelee
then if r <= 0.8
then flyerMoveLoopMelee
else flyerMoveEndMelee
else flyerMoveEndMelee
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just currentMove)
return True
flyerMoveLoopMelee :: MMoveT
flyerMoveLoopMelee = MMoveT "flyerMoveLoopMelee" frameAttack107 frameAttack118 flyerFramesLoopMelee (Just flyerCheckMelee)
flyerAttack :: EntThink
flyerAttack =
GenericEntThink "flyer_attack" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveAttack2)
return True
flyerSetStart :: EntThink
flyerSetStart =
GenericEntThink "flyer_setstart" $ \selfRef -> do
mFlyerGlobals.mFlyerNextMove .= actionRun
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveStart)
return True
flyerMelee :: EntThink
flyerMelee =
GenericEntThink "flyer_melee" $ \selfRef -> do
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveStartMelee)
return True
flyerPain :: EntPain
flyerPain =
GenericEntPain "flyer_pain" $ \selfRef _ _ _ -> do
self <- readRef selfRef
when ((self^.eHealth) <= (self^.eMaxHealth) `div` 2) $
modifyRef selfRef (\v -> v & eEntityState.esSkinNum .~ 1)
levelTime <- use $ gameBaseGlobals.gbLevel.llTime
unless (levelTime < (self^.ePainDebounceTime)) $ do
modifyRef selfRef (\v -> v & ePainDebounceTime .~ levelTime + 3)
skillValue <- liftM (^.cvValue) skillCVar
unless (skillValue == 3) $ do -- no pain anims in nightmare
n <- liftM (`mod` 3) Lib.rand
(soundPain, currentMove) <- if | n == 0 -> do
soundPain <- use $ mFlyerGlobals.mFlyerSoundPain1
return (soundPain, flyerMovePain1)
| n == 1 -> do
soundPain <- use $ mFlyerGlobals.mFlyerSoundPain2
return (soundPain, flyerMovePain2)
| otherwise -> do
soundPain <- use $ mFlyerGlobals.mFlyerSoundPain1
return (soundPain, flyerMovePain3)
sound <- use $ gameBaseGlobals.gbGameImport.giSound
sound (Just selfRef) Constants.chanVoice soundPain 1 Constants.attnNorm 0
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just currentMove)
flyerDie :: EntDie
flyerDie =
GenericEntDie "flyer_die" $ \selfRef _ _ _ _ -> do
sound <- use $ gameBaseGlobals.gbGameImport.giSound
soundDie <- use $ mFlyerGlobals.mFlyerSoundDie
sound (Just selfRef) Constants.chanVoice soundDie 1 Constants.attnNorm 0
GameMisc.becomeExplosion1 selfRef
flyerFire :: Ref EdictT -> Int -> Quake ()
flyerFire selfRef flashNumber = do
self <- readRef selfRef
let effect = if (self^.eEntityState.esFrame) `elem` [frameAttack204, frameAttack207, frameAttack210]
then Constants.efHyperblaster
else 0
(Just forward, Just right, _) = Math3D.angleVectors (self^.eEntityState.esAngles) True True False
start = Math3D.projectSource (self^.eEntityState.esOrigin) (MFlash.monsterFlashOffset V.! flashNumber) forward right
Just enemyRef = self^.eEnemy
enemy <- readRef enemyRef
let V3 a b c = enemy^.eEntityState.esOrigin
end = V3 a b (c + fromIntegral (enemy^.eViewHeight))
dir = end - start
Monster.monsterFireBlaster selfRef start dir 1 1000 flashNumber effect
{-
- QUAKED monster_flyer (1 .5 0) (-16 -16 -24) (16 16 32) Ambush
- Trigger_Spawn Sight
-}
spMonsterFlyer :: Ref EdictT -> Quake ()
spMonsterFlyer selfRef = do
deathmatchValue <- liftM (^.cvValue) deathmatchCVar
if deathmatchValue /= 0
then
GameUtil.freeEdict selfRef
else do
-- fix a map bug in jail5.bsp
self <- readRef selfRef
mapName <- use $ gameBaseGlobals.gbLevel.llMapName
when (BC.map toLower mapName == "jail5" && (self^.eEntityState.esOrigin._z) == (-104)) $
modifyRef selfRef (\v -> v & eTargetName .~ (self^.eTarget)
& eTarget .~ Nothing)
gameImport <- use $ gameBaseGlobals.gbGameImport
let soundIndex = gameImport^.giSoundIndex
modelIndex = gameImport^.giModelIndex
linkEntity = gameImport^.giLinkEntity
soundIndex (Just "flyer/flysght1.wav") >>= (mFlyerGlobals.mFlyerSoundSight .=)
soundIndex (Just "flyer/flysrch1.wav") >>= (mFlyerGlobals.mFlyerSoundIdle .=)
soundIndex (Just "flyer/flypain1.wav") >>= (mFlyerGlobals.mFlyerSoundPain1 .=)
soundIndex (Just "flyer/flypain2.wav") >>= (mFlyerGlobals.mFlyerSoundPain2 .=)
soundIndex (Just "flyer/flyatck2.wav") >>= (mFlyerGlobals.mFlyerSoundSlash .=)
soundIndex (Just "flyer/flyatck1.wav") >>= (mFlyerGlobals.mFlyerSoundSproing .=)
soundIndex (Just "flyer/flydeth1.wav") >>= (mFlyerGlobals.mFlyerSoundDie .=)
void $ soundIndex (Just "flyer/flyatck3.wav")
soundIdx <- soundIndex (Just "flyer/flyidle1.wav")
modelIdx <- modelIndex (Just "models/monsters/flyer/tris.md2")
modifyRef selfRef (\v -> v & eEntityState.esModelIndex .~ modelIdx
& eMins .~ V3 (-16) (-16) (-24)
& eMaxs .~ V3 16 16 32
& eMoveType .~ Constants.moveTypeStep
& eSolid .~ Constants.solidBbox
& eEntityState.esSound .~ soundIdx
& eHealth .~ 50
& eMass .~ 50
& ePain .~ Just flyerPain
& eDie .~ Just flyerDie
& eMonsterInfo.miStand .~ Just flyerStand
& eMonsterInfo.miWalk .~ Just flyerWalk
& eMonsterInfo.miRun .~ Just flyerRun
& eMonsterInfo.miAttack .~ Just flyerAttack
& eMonsterInfo.miMelee .~ Just flyerMelee
& eMonsterInfo.miSight .~ Just flyerSight
& eMonsterInfo.miIdle .~ Just flyerIdle)
linkEntity selfRef
modifyRef selfRef (\v -> v & eMonsterInfo.miCurrentMove .~ Just flyerMoveStand
& eMonsterInfo.miScale .~ modelScale)
void $ think GameAI.flyMonsterStart selfRef
| ksaveljev/hake-2 | src/Game/Monsters/MFlyer.hs | bsd-3-clause | 31,283 | 0 | 47 | 9,114 | 8,830 | 4,483 | 4,347 | -1 | -1 |
{-|
Module : BPlusTree.Node
Description : Node module for B+ Tree
License : BSD3
Maintainer : [email protected]
Stability : experimental
This module provides data type 'Node' which corresponds to a node in a B+ tree.
A node contains information about the number of keys, list of keys, list of
values and file name (file pointer) to the parent node. The values corresponding
to the keys are file names (pointers) to the children nodes.
-}
module BPlusTree.Node where
import Aria
import BPlusTree.Types
import qualified Data.ByteString.Char8 as B
-- | Pathname of the directory storing the database, relative to project root
dataPath :: FilePath
dataPath = "data/"
-- | Node data type captures the node object in B+ Tree.
data Node = Node {
keyCount :: Int, -- ^ Number of keys in the node
keys :: [AriaKey], -- ^ List of keys (sorted)
values :: [BPTFileName], -- ^ List of values (pointers to children)
parent :: Maybe BPTFileName -- ^ FileName of the parent node
} deriving (Show, Read)
-- | It reads the content of the file with given file name and returns the
-- 'Node' object inside IO monad.
readNode :: BPTFileName -> IO Node
readNode nodeName = do
fileContents <- B.readFile (dataPath ++ nodeName)
let node = read (B.unpack fileContents) :: Node
return node
-- | It writes the 'show' value of given 'Leaf' object to the file with given
-- name.
writeNode :: BPTFileName -> Node -> IO ()
writeNode nodeName = B.writeFile (dataPath ++ nodeName) . B.pack . show
| proneetv/ariaDB | src/Service/BPlusTree/Node.hs | bsd-3-clause | 1,573 | 0 | 13 | 358 | 213 | 121 | 92 | 19 | 1 |
module Language.ImProve.Path
( totalPaths
) where
import Language.ImProve.Core
totalPaths :: Name -> Statement -> IO ()
totalPaths name stmt = do
putStrLn $ "total paths: " ++ show (paths stmt)
writeFile (name ++ ".dot") (dot stmt)
paths :: Statement -> Integer
paths a = case a of
Assign _ _ -> 1
Branch _ a b -> paths a + paths b
Sequence a b -> paths a * paths b
Assert _ _ _ -> 1
Assume _ _ -> 1
Label _ a -> paths a
Null -> 1
dot :: Statement -> String
dot stmt = unlines $ ["digraph {"] ++ links ++ ["}"]
where
(_, _, links) = d 0 1 stmt
d :: Int -> Int -> Statement -> (Int, Int, [String])
d src id a = case a of
Branch _ a b -> (id2, id2 + 1, link src id ++ a' ++ b' ++ link srcA id2 ++ link srcB id2)
where
(srcA, id1, a') = d id (id + 1) a
(srcB, id2, b') = d id id1 b
Sequence a b -> (srcB, id2, a' ++ b')
where
(srcA, id1, a') = d src id a
(srcB, id2, b') = d srcA id1 b
Label _ a -> d src id a
_ -> (src, id, [])
where
link :: Int -> Int -> [String]
link a b = [" " ++ show a ++ " -> " ++ show b]
| tomahawkins/improve | Language/ImProve/Path.hs | bsd-3-clause | 1,123 | 0 | 13 | 354 | 575 | 295 | 280 | 31 | 7 |
{-# LANGUAGE PatternGuards, ViewPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
module Narradar.Processor.UsableRules where
import Narradar.Framework
import Narradar.Framework.Ppr
data UsableRulesProof = UsableRulesProof deriving (Eq,Show,Ord)
instance Pretty UsableRulesProof where pPrint _ = text "Usable rules proof"
{-
usableRulesP :: (Pretty v, Pretty id, Ord v, Ord a, Enum v, id ~ Identifier a) => Problem id v -> ProblemProofG id v
usableRulesP p@(Problem typ trs dps)
| (isBNarrowing .|. isGNarrowing) typ = step UsableRulesP p (iUsableRules p pi' (rhs <$> rules dps))
| otherwise = return p
where
pi' = AF.restrictTo (getDefinedSymbols dps `mappend` getConstructorSymbols trs ) <$> getAF typ
-} | pepeiborra/narradar | src/Narradar/Processor/UsableRules.hs | bsd-3-clause | 787 | 0 | 6 | 122 | 64 | 38 | 26 | 9 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverlappingInstances #-}
module Text.Syntax.Printer.Text
( Printer ()
, print
, print'
, runAsPrinter
, runAsPrinter'
) where
import Prelude hiding (print)
import Control.Isomorphism.Partial
import Control.Monad
import Data.Monoid
import qualified Data.Text as S
import qualified Data.Text.Lazy as L
import Data.Text.Lazy.Builder
import Text.Syntax.Poly
newtype Printer alpha = Printer { runPrinter :: alpha -> Maybe Builder }
instance IsoFunctor Printer where
iso <$> Printer p = Printer (\b -> unapply iso b >>= p)
instance ProductFunctor Printer where
Printer p <*> Printer q = Printer (\(x, y) -> liftM2 mappend (p x) (q y))
instance IsoAlternative Printer where
Printer p <||> Printer q = Printer (\s -> mplus (p s) (q s))
empty = Printer (\_ -> Nothing)
instance TryAlternative Printer
instance AbstractSyntax Printer where
syntax x = Printer (\y -> if x == y then Just mempty
else Nothing)
instance Syntax Char Printer where
token = Printer $ Just . singleton
runAsPrinter :: RunAsPrinter Char L.Text a ErrorString
runAsPrinter s = maybe (Left . errorString $ "print error") Right . fmap toLazyText . runPrinter s
runAsPrinter' :: RunAsPrinter Char S.Text a ErrorString
runAsPrinter' s = (L.toStrict `fmap`) . runAsPrinter s
print :: SyntaxT Char a -> a -> L.Text
print = doPrint runAsPrinter
print' :: SyntaxT Char a -> a -> S.Text
print' = doPrint runAsPrinter'
doPrint :: RunAsPrinter Char tcs a ErrorString -> SyntaxT Char a -> a -> tcs
doPrint p s v = case p s v of
Left e -> error $ show e
Right r -> r
| schernichkin/exchange | src/Text/Syntax/Printer/Text.hs | bsd-3-clause | 1,793 | 0 | 11 | 427 | 578 | 306 | 272 | 47 | 2 |
-- |
-- Module : Crypto.Hash.SHA512
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
-- module containing the binding functions to work with the
-- SHA512 cryptographic hash.
--
{-# LANGUAGE ForeignFunctionInterface #-}
module Crypto.Hash.SHA512 ( SHA512 (..) ) where
import Crypto.Hash.Types
import Foreign.Ptr (Ptr)
import Data.Word (Word8, Word32)
-- | SHA512 cryptographic hash algorithm
data SHA512 = SHA512
deriving (Show)
instance HashAlgorithm SHA512 where
hashBlockSize _ = 128
hashDigestSize _ = 64
hashInternalContextSize _ = 256
hashInternalInit = c_sha512_init
hashInternalUpdate = c_sha512_update
hashInternalFinalize = c_sha512_finalize
foreign import ccall unsafe "cryptonite_sha512_init"
c_sha512_init :: Ptr (Context a)-> IO ()
foreign import ccall "cryptonite_sha512_update"
c_sha512_update :: Ptr (Context a) -> Ptr Word8 -> Word32 -> IO ()
foreign import ccall unsafe "cryptonite_sha512_finalize"
c_sha512_finalize :: Ptr (Context a) -> Ptr (Digest a) -> IO ()
| nomeata/cryptonite | Crypto/Hash/SHA512.hs | bsd-3-clause | 1,192 | 0 | 10 | 271 | 236 | 132 | 104 | 20 | 0 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeApplications #-}
module HaskellCI.Config where
import HaskellCI.Prelude
import Distribution.Simple.Utils (fromUTF8BS)
import qualified Data.ByteString as BS
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Distribution.CabalSpecVersion as C
import qualified Distribution.Compat.CharParsing as C
import qualified Distribution.Compat.Newtype as C
import qualified Distribution.FieldGrammar as C
import qualified Distribution.Fields as C
import qualified Distribution.Parsec as C
import qualified Distribution.Pretty as C
import qualified Distribution.Types.PackageName as C
import qualified Distribution.Types.Version as C
import qualified Text.PrettyPrint as PP
import HaskellCI.Config.ConstraintSet
import HaskellCI.Config.CopyFields
import HaskellCI.Config.Docspec
import HaskellCI.Config.Doctest
import HaskellCI.Config.Empty
import HaskellCI.Config.Folds
import HaskellCI.Config.HLint
import HaskellCI.Config.Installed
import HaskellCI.Config.Jobs
import HaskellCI.Config.PackageScope
import HaskellCI.Config.Ubuntu
import HaskellCI.HeadHackage
import HaskellCI.Newtypes
import HaskellCI.OptionsGrammar
import HaskellCI.ParsecUtils
import HaskellCI.TestedWith
-------------------------------------------------------------------------------
-- Config
-------------------------------------------------------------------------------
-- TODO: split other blocks like DoctestConfig
data Config = Config
{ cfgCabalInstallVersion :: Maybe Version
, cfgJobs :: Maybe Jobs
, cfgUbuntu :: !Ubuntu
, cfgTestedWith :: !TestedWithJobs
, cfgEnabledJobs :: !VersionRange
, cfgCopyFields :: !CopyFields
, cfgLocalGhcOptions :: [String]
, cfgSubmodules :: !Bool
, cfgCache :: !Bool
, cfgInstallDeps :: !Bool
, cfgInstalled :: [Installed]
, cfgTests :: !VersionRange
, cfgRunTests :: !VersionRange
, cfgBenchmarks :: !VersionRange
, cfgHaddock :: !VersionRange
, cfgNoTestsNoBench :: !VersionRange
, cfgUnconstrainted :: !VersionRange
, cfgHeadHackage :: !VersionRange
, cfgGhcjsTests :: !Bool
, cfgGhcjsTools :: ![C.PackageName]
, cfgTestOutputDirect :: !Bool
, cfgCheck :: !Bool
, cfgOnlyBranches :: [String]
, cfgIrcChannels :: [String]
, cfgIrcNickname :: Maybe String
, cfgIrcPassword :: Maybe String
, cfgIrcIfInOriginRepo :: Bool
, cfgEmailNotifications :: Bool
, cfgProjectName :: Maybe String
, cfgFolds :: S.Set Fold
, cfgGhcHead :: !Bool
, cfgPostgres :: !Bool
, cfgGoogleChrome :: !Bool
, cfgEnv :: M.Map Version String
, cfgAllowFailures :: !VersionRange
, cfgLastInSeries :: !Bool
, cfgLinuxJobs :: !VersionRange
, cfgMacosJobs :: !VersionRange
, cfgGhcupJobs :: !VersionRange
, cfgGhcupVersion :: !Version
, cfgApt :: S.Set String
, cfgTravisPatches :: [FilePath]
, cfgGitHubPatches :: [FilePath]
, cfgInsertVersion :: !Bool
, cfgErrorMissingMethods :: !PackageScope
, cfgDoctest :: !DoctestConfig
, cfgDocspec :: !DocspecConfig
, cfgHLint :: !HLintConfig
, cfgConstraintSets :: [ConstraintSet]
, cfgRawProject :: [C.PrettyField ()]
, cfgRawTravis :: !String
, cfgGitHubActionName :: !(Maybe String)
}
deriving (Generic)
defaultCabalInstallVersion :: Maybe Version
defaultCabalInstallVersion = Just (C.mkVersion [3,4])
defaultGhcupVersion :: Version
defaultGhcupVersion = C.mkVersion [0,1,14,1]
emptyConfig :: Config
emptyConfig = case runEG configGrammar of
Left xs -> error $ "Required fields: " ++ show xs
Right x -> x
-------------------------------------------------------------------------------
-- Grammar
-------------------------------------------------------------------------------
configGrammar
:: ( OptionsGrammar c g, Applicative (g Config)
, c (Identity HLintJob)
, c (Identity PackageScope)
, c (Identity TestedWithJobs)
, c (Identity Ubuntu)
, c (Identity Jobs)
, c (Identity CopyFields)
, c (Identity Version)
, c Env, c Folds, c CopyFields, c HeadVersion
, c (C.List C.FSep (Identity Installed) Installed)
, Applicative (g DoctestConfig)
, Applicative (g DocspecConfig)
, Applicative (g HLintConfig))
=> g Config Config
configGrammar = Config
<$> C.optionalFieldDefAla "cabal-install-version" HeadVersion (field @"cfgCabalInstallVersion") defaultCabalInstallVersion
^^^ metahelp "VERSION" "cabal-install version for all jobs"
<*> C.optionalField "jobs" (field @"cfgJobs")
^^^ metahelp "JOBS" "jobs (N:M - cabal:ghc)"
<*> C.optionalFieldDef "distribution" (field @"cfgUbuntu") Bionic
^^^ metahelp "DIST" "distribution version (xenial, bionic)"
<*> C.optionalFieldDef "jobs-selection" (field @"cfgTestedWith") TestedWithUniform
^^^ metahelp "uniform|any" "Jobs selection across packages"
<*> rangeField "enabled" (field @"cfgEnabledJobs") anyVersion
^^^ metahelp "RANGE" "Restrict jobs selection futher from per package tested-with"
<*> C.optionalFieldDef "copy-fields" (field @"cfgCopyFields") CopyFieldsSome
^^^ metahelp "none|some|all" "Copy ? fields from cabal.project fields"
<*> C.monoidalFieldAla "local-ghc-options" (C.alaList' C.NoCommaFSep C.Token') (field @"cfgLocalGhcOptions")
^^^ metahelp "OPTS" "--ghc-options for local packages"
<*> C.booleanFieldDef "submodules" (field @"cfgSubmodules") False
^^^ help "Clone submodules, i.e. recursively"
<*> C.booleanFieldDef "cache" (field @"cfgCache") True
^^^ help "Disable caching"
<*> C.booleanFieldDef "install-dependencies" (field @"cfgInstallDeps") True
^^^ help "Skip separate dependency installation step"
<*> C.monoidalFieldAla "installed" (C.alaList C.FSep) (field @"cfgInstalled")
^^^ metahelp "+/-PKG" "Specify 'constraint: ... installed' packages"
<*> rangeField "tests" (field @"cfgTests") anyVersion
^^^ metahelp "RANGE" "Build tests with"
<*> rangeField "run-tests" (field @"cfgRunTests") anyVersion
^^^ metahelp "RANGE" "Run tests with (note: only built tests are run)"
<*> rangeField "benchmarks" (field @"cfgBenchmarks") anyVersion
^^^ metahelp "RANGE" "Build benchmarks"
<*> rangeField "haddock" (field @"cfgHaddock") anyVersion
^^^ metahelp "RANGE" "Haddock step"
<*> rangeField "no-tests-no-benchmarks" (field @"cfgNoTestsNoBench") anyVersion
^^^ metahelp "RANGE" "Build without tests and benchmarks"
<*> rangeField "unconstrained" (field @"cfgUnconstrainted") anyVersion
^^^ metahelp "RANGE" "Make unconstrained build"
<*> rangeField "head-hackage" (field @"cfgHeadHackage") defaultHeadHackage
^^^ metahelp "RANGE" "Use head.hackage repository. Also marks as allow-failures"
<*> C.booleanFieldDef "ghcjs-tests" (field @"cfgGhcjsTests") False
^^^ help "Run tests with GHCJS (experimental, relies on cabal-plan finding test-suites)"
<*> C.monoidalFieldAla "ghcjs-tools" (C.alaList C.FSep) (field @"cfgGhcjsTools")
-- ^^^ metahelp "TOOL" "Additional host tools to install with GHCJS"
<*> C.booleanFieldDef "test-output-direct" (field @"cfgTestOutputDirect") True
^^^ help "Use --test-show-details=direct, may cause problems with build-type: Custom"
<*> C.booleanFieldDef "cabal-check" (field @"cfgCheck") True
^^^ help "Disable cabal check run"
<*> C.monoidalFieldAla "branches" (C.alaList' C.FSep C.Token') (field @"cfgOnlyBranches")
^^^ metahelp "BRANCH" "Enable builds only for specific branches"
<*> C.monoidalFieldAla "irc-channels" (C.alaList' C.FSep C.Token') (field @"cfgIrcChannels")
^^^ metahelp "IRC" "Enable IRC notifications to given channel (e.g. 'irc.libera.chat#haskell-lens')"
<*> C.freeTextField "irc-nickname" (field @"cfgIrcNickname")
^^^ metahelp "NICKNAME" "Nickname with which to authenticate to an IRC server. Only used if `irc-channels` are set."
<*> C.freeTextField "irc-password" (field @"cfgIrcPassword")
^^^ metahelp "PASSWORD" "Password with which to authenticate to an IRC server. Only used if `irc-channels` are set."
<*> C.booleanFieldDef "irc-if-in-origin-repo" (field @"cfgIrcIfInOriginRepo") False
^^^ help "Only send IRC notifications if run from the original remote (GitHub Actions only)"
<*> C.booleanFieldDef "email-notifications" (field @"cfgEmailNotifications") True
^^^ help "Disable email notifications"
<*> C.optionalFieldAla "project-name" C.Token' (field @"cfgProjectName")
^^^ metahelp "NAME" "Project name (used for IRC notifications), defaults to package name or name of first package listed in cabal.project file"
<*> C.monoidalFieldAla "folds" Folds (field @"cfgFolds")
^^^ metahelp "FOLD" "Build steps to fold"
<*> C.booleanFieldDef "ghc-head" (field @"cfgGhcHead") False
^^^ help "Add ghc-head job"
<*> C.booleanFieldDef "postgresql" (field @"cfgPostgres") False
^^^ help "Add postgresql service"
<*> C.booleanFieldDef "google-chrome" (field @"cfgGoogleChrome") False
^^^ help "Add google-chrome service"
<*> C.monoidalFieldAla "env" Env (field @"cfgEnv")
^^^ metahelp "ENV" "Environment variables per job (e.g. `8.0.2:HADDOCK=false`)"
<*> C.optionalFieldDefAla "allow-failures" Range (field @"cfgAllowFailures") noVersion
^^^ metahelp "JOB" "Allow failures of particular GHC version"
<*> C.booleanFieldDef "last-in-series" (field @"cfgLastInSeries") False
^^^ help "[Discouraged] Assume there are only GHCs last in major series: 8.2.* will match only 8.2.2"
<*> rangeField "linux-jobs" (field @"cfgLinuxJobs") anyVersion
^^^ metahelp "RANGE" "Jobs to build on Linux"
<*> rangeField "macos-jobs" (field @"cfgMacosJobs") noVersion
^^^ metahelp "RANGE" "Jobs to additionally build with OSX"
<*> rangeField "ghcup-jobs" (field @"cfgGhcupJobs") noVersion
^^^ metahelp "RANGE" "(Linux) jobs to use ghcup to install tools"
<*> C.optionalFieldDef "ghcup-version" (field @"cfgGhcupVersion") defaultGhcupVersion
^^^ metahelp "VERSION" "ghcup version"
<*> C.monoidalFieldAla "apt" (alaSet' C.NoCommaFSep C.Token') (field @"cfgApt")
^^^ metahelp "PKG" "Additional apt packages to install"
<*> C.monoidalFieldAla "travis-patches" (C.alaList' C.NoCommaFSep C.Token') (field @"cfgTravisPatches")
^^^ metaActionHelp "PATCH" "file" ".patch files to apply to the generated Travis YAML file"
<*> C.monoidalFieldAla "github-patches" (C.alaList' C.NoCommaFSep C.Token') (field @"cfgGitHubPatches")
^^^ metaActionHelp "PATCH" "file" ".patch files to apply to the generated GitHub Actions YAML file"
<*> C.booleanFieldDef "insert-version" (field @"cfgInsertVersion") True
^^^ help "Don't insert the haskell-ci version into the generated Travis YAML file"
<*> C.optionalFieldDef "error-missing-methods" (field @"cfgErrorMissingMethods") PackageScopeLocal
^^^ metahelp "PKGSCOPE" "Insert -Werror=missing-methods for package scope (none, local, all)"
<*> C.blurFieldGrammar (field @"cfgDoctest") doctestConfigGrammar
<*> C.blurFieldGrammar (field @"cfgDocspec") docspecConfigGrammar
<*> C.blurFieldGrammar (field @"cfgHLint") hlintConfigGrammar
<*> pure [] -- constraint sets
<*> pure [] -- raw project fields
<*> C.freeTextFieldDef "raw-travis" (field @"cfgRawTravis")
^^^ help "Raw travis commands which will be run at the very end of the script"
<*> C.freeTextField "github-action-name" (field @"cfgGitHubActionName")
^^^ help "The name of GitHub Action"
-------------------------------------------------------------------------------
-- Reading
-------------------------------------------------------------------------------
readConfigFile :: MonadIO m => FilePath -> m Config
readConfigFile = liftIO . readAndParseFile parseConfigFile
parseConfigFile :: [C.Field C.Position] -> C.ParseResult Config
parseConfigFile fields0 = do
config <- C.parseFieldGrammar C.cabalSpecLatest fields configGrammar
config' <- traverse parseSection $ concat sections
return (foldl' (&) config config')
where
(fields, sections) = C.partitionFields fields0
parseSection :: C.Section C.Position -> C.ParseResult (Config -> Config)
parseSection (C.MkSection (C.Name pos name) args cfields)
| name == "constraint-set" = do
name' <- parseName pos args
let (fs, _sections) = C.partitionFields cfields
cs <- C.parseFieldGrammar C.cabalSpecLatest fs (constraintSetGrammar name')
return $ over (field @"cfgConstraintSets") (cs :)
| name == "raw-project" = do
let fs = C.fromParsecFields cfields
return $ over (field @"cfgRawProject") (++ map void fs)
| otherwise = do
C.parseWarning pos C.PWTUnknownSection $ "Unknown section " ++ fromUTF8BS name
return id
-------------------------------------------------------------------------------
-- Env
-------------------------------------------------------------------------------
newtype Env = Env (M.Map Version String)
deriving anyclass (C.Newtype (M.Map Version String))
instance C.Parsec Env where
parsec = Env . M.fromList <$> C.parsecLeadingCommaList p where
p = do
v <- C.parsec
_ <- C.char ':'
s <- C.munch1 $ \c -> c /= ','
return (v, s)
instance C.Pretty Env where
pretty (Env m) = PP.fsep . PP.punctuate PP.comma . map p . M.toList $ m where
p (v, s) = C.pretty v PP.<> PP.colon PP.<> PP.text s
-------------------------------------------------------------------------------
-- From Cabal
-------------------------------------------------------------------------------
parseName :: C.Position -> [C.SectionArg C.Position] -> C.ParseResult String
parseName pos args = fromUTF8BS <$> parseNameBS pos args
parseNameBS :: C.Position -> [C.SectionArg C.Position] -> C.ParseResult BS.ByteString
parseNameBS pos args = case args of
[C.SecArgName _pos secName] ->
pure secName
[C.SecArgStr _pos secName] ->
pure secName
[] -> do
C.parseFailure pos "name required"
pure ""
_ -> do
-- TODO: pretty print args
C.parseFailure pos $ "Invalid name " ++ show args
pure ""
| hvr/multi-ghc-travis | src/HaskellCI/Config.hs | bsd-3-clause | 17,538 | 0 | 106 | 5,659 | 3,313 | 1,687 | 1,626 | -1 | -1 |
{-# language CPP #-}
-- | = Name
--
-- VK_GOOGLE_hlsl_functionality1 - device extension
--
-- == VK_GOOGLE_hlsl_functionality1
--
-- [__Name String__]
-- @VK_GOOGLE_hlsl_functionality1@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 224
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- [__Contact__]
--
-- - Hai Nguyen
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_GOOGLE_hlsl_functionality1] @chaoticbob%0A<<Here describe the issue or question you have about the VK_GOOGLE_hlsl_functionality1 extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2018-07-09
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/GOOGLE/SPV_GOOGLE_hlsl_functionality1.html SPV_GOOGLE_hlsl_functionality1>
--
-- [__Contributors__]
--
-- - Hai Nguyen, Google
--
-- - Neil Henning, AMD
--
-- == Description
--
-- The @VK_GOOGLE_hlsl_functionality1@ extension allows use of the
-- @SPV_GOOGLE_hlsl_functionality1@ extension in SPIR-V shader modules.
--
-- == New Enum Constants
--
-- - 'GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME'
--
-- - 'GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION'
--
-- - 'GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME'
--
-- - 'GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION'
--
-- == Version History
--
-- - Revision 1, 2018-07-09 (Neil Henning)
--
-- - Initial draft
--
-- == See Also
--
-- No cross-references are available
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_GOOGLE_hlsl_functionality1 Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_GOOGLE_hlsl_functionality1 ( pattern GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION
, pattern GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME
, GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION
, pattern GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION
, GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME
, pattern GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME
) where
import Data.String (IsString)
-- No documentation found for TopLevel "VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION"
pattern GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION = GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION
-- No documentation found for TopLevel "VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME"
pattern GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME = GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME
type GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION"
pattern GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION :: forall a . Integral a => a
pattern GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION = 1
type GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME = "VK_GOOGLE_hlsl_functionality1"
-- No documentation found for TopLevel "VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME"
pattern GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME = "VK_GOOGLE_hlsl_functionality1"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_GOOGLE_hlsl_functionality1.hs | bsd-3-clause | 3,742 | 0 | 8 | 769 | 218 | 160 | 58 | -1 | -1 |
import Plots
import Plots.Types hiding (B)
import Plots.Types.Function
import Plots.Axis
import Diagrams.Prelude
import Diagrams.Coordinates.Polar
import Diagrams.Backend.Rasterific
myaxis :: Axis B Polar Double
myaxis = polarAxis &~ do
addPlotable $
mkParametricPlot (\t -> mkPolar t (t @@ rad))
& parametricDomain . _2 .~ 8
make :: Diagram B -> IO ()
make = renderRasterific "examples/polar.png" (mkWidth 600)
main = make $ renderAxis myaxis
| bergey/plots | examples/polar.hs | bsd-3-clause | 461 | 0 | 17 | 79 | 151 | 81 | 70 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
module Text.Formattable where
import qualified Data.Text as T
class Formattable f where
pack :: T.Text -> f
unpack :: f -> T.Text
instance Formattable T.Text where
pack = id
unpack = id
instance Formattable Int where
pack = read . T.unpack
unpack = T.pack . show
instance Formattable Double where
pack = read . T.unpack
unpack = T.pack . show
instance Formattable Float where
pack = read . T.unpack
unpack = T.pack . show
instance Formattable Char where
pack = read . T.unpack
unpack = T.pack . show
instance Formattable String where
pack = T.unpack
unpack = T.pack
| myuon/FuncFormat | Text/Formattable.hs | bsd-3-clause | 640 | 0 | 8 | 141 | 208 | 114 | 94 | 24 | 0 |
module HW5.ExprT where
data ExprT = Lit Integer
| Add ExprT ExprT
| Mul ExprT ExprT
deriving (Show, Eq)
| cgag/cis-194-solutions | src/HW5/ExprT.hs | bsd-3-clause | 129 | 0 | 6 | 44 | 40 | 23 | 17 | 5 | 0 |
module Network.AMQP.MessageBus
( Exchange (..)
, ExchangeName (..)
, Message (..)
, MessageHandler (..)
, Queue (..)
, QueueName (..)
, QueueStatus (..)
, TopicName (..)
, WithConn (..)
, ackEnvelope
, createExchange
, deleteExchange
, createQueue
, deleteQueue
, getTopicMessages
, produceTopicMessage
, subscribe
, withConn
) where
import Network.AMQP.Internal.Connection
import Network.AMQP.Internal.Consumer
import Network.AMQP.Internal.Producer
import Network.AMQP.Internal.Types
| gust/feature-creature | legacy/lib/Network/AMQP/MessageBus.hs | mit | 486 | 0 | 5 | 58 | 127 | 89 | 38 | 23 | 0 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad (foldM, when)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as C
import Data.IntMap.Strict (IntMap)
import qualified Data.IntMap.Strict as IntMap
import Data.List (foldl', intersperse)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Word (Word32)
import Options.Applicative
import System.IO (Handle, IOMode(ReadMode), SeekMode(AbsoluteSeek), hIsEOF, hSeek, openFile)
import SLCT.Options
import qualified SLCT.ByteStringHash as BSHash
type Hash = Int
type Count = Word32
type Hashes = IntMap Count
type Clusters = Map ByteString Count
hash :: ByteString -> Hash
{-# INLINE hash #-}
hash = fromIntegral . BSHash.fnv1a64
splitWords :: ByteString -> [ByteString]
{-# INLINE splitWords #-}
splitWords = C.split ' '
clusterify :: Hashes -> Count -> [ByteString] -> ByteString
clusterify freq wf ws = BS.concat $ intersperse " " $ map (\w -> if frequent w then w else "*") ws
where
frequent :: ByteString -> Bool
frequent w = case IntMap.lookup (hash w) freq of Just i -> i >= wf
Nothing -> False
wordFrequency :: Hashes -> Handle -> IO Hashes
wordFrequency hashes h = do
eof <- hIsEOF h
if eof then return hashes
else do
l <- BS.hGetLine h
let ws = splitWords l
let hashes' = foldl' (\m w -> IntMap.insertWith (+) (hash w) 1 m) hashes ws
wordFrequency hashes' h
populateClusters :: Hashes -> Count -> Clusters -> Handle -> IO Clusters
populateClusters freq wf clusters h = do
eof <- hIsEOF h
if eof then return clusters
else do
l <- BS.hGetLine h
let ws = splitWords l
let cluster = clusterify freq wf ws
populateClusters freq wf (Map.insertWith (+) cluster 1 clusters) h
main :: IO()
main = do
opts <- execParser (optionsParser `withInfo` "SLCT-hs version 0.1.0.0, Copyright AUTHORS")
print opts
when (null $ inputFiles opts) $ error "No input files specified"
inputs <- mapM (`openFile` ReadMode) (inputFiles opts)
frequentWords <- foldM wordFrequency IntMap.empty inputs
-- return to beginning of input files for a second pass
mapM_ (\h -> hSeek h AbsoluteSeek 0) inputs
clusters <- foldM (populateClusters frequentWords (fromIntegral $ minWordFreq opts)) Map.empty inputs
-- print stats
putStrLn $ "total distinct words: " ++ show (IntMap.size frequentWords)
putStrLn $ "clusters: " ++ show (Map.size clusters)
-- print all clusters with at least "--cf N" matches
let filtered = Map.filter (>= (fromIntegral $ minClusterFreq opts)) clusters
mapM_ (\(k,v) -> putStrLn (show v ++ "\t" ++ show k)) (Map.assocs filtered)
where
withInfo opts desc = info (helper <*> opts) $ progDesc desc
| dmit/slct-hs | slct.hs | gpl-2.0 | 2,931 | 6 | 25 | 700 | 932 | 486 | 446 | 62 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.CreateNetworkInterface
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates a network interface in the specified subnet.
--
-- For more information about network interfaces, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-eni.html Elastic Network Interfaces>
-- in the /Amazon Elastic Compute Cloud User Guide for Linux/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CreateNetworkInterface.html>
module Network.AWS.EC2.CreateNetworkInterface
(
-- * Request
CreateNetworkInterface
-- ** Request constructor
, createNetworkInterface
-- ** Request lenses
, cniDescription
, cniDryRun
, cniGroups
, cniPrivateIpAddress
, cniPrivateIpAddresses
, cniSecondaryPrivateIpAddressCount
, cniSubnetId
-- * Response
, CreateNetworkInterfaceResponse
-- ** Response constructor
, createNetworkInterfaceResponse
-- ** Response lenses
, cnirNetworkInterface
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data CreateNetworkInterface = CreateNetworkInterface
{ _cniDescription :: Maybe Text
, _cniDryRun :: Maybe Bool
, _cniGroups :: List "SecurityGroupId" Text
, _cniPrivateIpAddress :: Maybe Text
, _cniPrivateIpAddresses :: List "item" PrivateIpAddressSpecification
, _cniSecondaryPrivateIpAddressCount :: Maybe Int
, _cniSubnetId :: Text
} deriving (Eq, Read, Show)
-- | 'CreateNetworkInterface' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cniDescription' @::@ 'Maybe' 'Text'
--
-- * 'cniDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'cniGroups' @::@ ['Text']
--
-- * 'cniPrivateIpAddress' @::@ 'Maybe' 'Text'
--
-- * 'cniPrivateIpAddresses' @::@ ['PrivateIpAddressSpecification']
--
-- * 'cniSecondaryPrivateIpAddressCount' @::@ 'Maybe' 'Int'
--
-- * 'cniSubnetId' @::@ 'Text'
--
createNetworkInterface :: Text -- ^ 'cniSubnetId'
-> CreateNetworkInterface
createNetworkInterface p1 = CreateNetworkInterface
{ _cniSubnetId = p1
, _cniDescription = Nothing
, _cniPrivateIpAddress = Nothing
, _cniGroups = mempty
, _cniPrivateIpAddresses = mempty
, _cniSecondaryPrivateIpAddressCount = Nothing
, _cniDryRun = Nothing
}
-- | A description for the network interface.
cniDescription :: Lens' CreateNetworkInterface (Maybe Text)
cniDescription = lens _cniDescription (\s a -> s { _cniDescription = a })
cniDryRun :: Lens' CreateNetworkInterface (Maybe Bool)
cniDryRun = lens _cniDryRun (\s a -> s { _cniDryRun = a })
-- | The IDs of one or more security groups.
cniGroups :: Lens' CreateNetworkInterface [Text]
cniGroups = lens _cniGroups (\s a -> s { _cniGroups = a }) . _List
-- | The primary private IP address of the network interface. If you don't specify
-- an IP address, Amazon EC2 selects one for you from the subnet range. If you
-- specify an IP address, you cannot indicate any IP addresses specified in 'privateIpAddresses' as primary (only one IP address can be designated as primary).
cniPrivateIpAddress :: Lens' CreateNetworkInterface (Maybe Text)
cniPrivateIpAddress =
lens _cniPrivateIpAddress (\s a -> s { _cniPrivateIpAddress = a })
-- | One or more private IP addresses.
cniPrivateIpAddresses :: Lens' CreateNetworkInterface [PrivateIpAddressSpecification]
cniPrivateIpAddresses =
lens _cniPrivateIpAddresses (\s a -> s { _cniPrivateIpAddresses = a })
. _List
-- | The number of secondary private IP addresses to assign to a network
-- interface. When you specify a number of secondary IP addresses, Amazon EC2
-- selects these IP addresses within the subnet range. You can't specify this
-- option and specify more than one private IP address using 'privateIpAddresses'.
--
-- The number of IP addresses you can assign to a network interface varies by
-- instance type. For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-eni.html#AvailableIpPerENI Private IP Addresses Per ENI PerInstance Type> in the /Amazon Elastic Compute Cloud User Guide for Linux/.
cniSecondaryPrivateIpAddressCount :: Lens' CreateNetworkInterface (Maybe Int)
cniSecondaryPrivateIpAddressCount =
lens _cniSecondaryPrivateIpAddressCount
(\s a -> s { _cniSecondaryPrivateIpAddressCount = a })
-- | The ID of the subnet to associate with the network interface.
cniSubnetId :: Lens' CreateNetworkInterface Text
cniSubnetId = lens _cniSubnetId (\s a -> s { _cniSubnetId = a })
newtype CreateNetworkInterfaceResponse = CreateNetworkInterfaceResponse
{ _cnirNetworkInterface :: Maybe NetworkInterface
} deriving (Eq, Read, Show)
-- | 'CreateNetworkInterfaceResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cnirNetworkInterface' @::@ 'Maybe' 'NetworkInterface'
--
createNetworkInterfaceResponse :: CreateNetworkInterfaceResponse
createNetworkInterfaceResponse = CreateNetworkInterfaceResponse
{ _cnirNetworkInterface = Nothing
}
-- | Information about the network interface.
cnirNetworkInterface :: Lens' CreateNetworkInterfaceResponse (Maybe NetworkInterface)
cnirNetworkInterface =
lens _cnirNetworkInterface (\s a -> s { _cnirNetworkInterface = a })
instance ToPath CreateNetworkInterface where
toPath = const "/"
instance ToQuery CreateNetworkInterface where
toQuery CreateNetworkInterface{..} = mconcat
[ "Description" =? _cniDescription
, "DryRun" =? _cniDryRun
, "SecurityGroupId" `toQueryList` _cniGroups
, "PrivateIpAddress" =? _cniPrivateIpAddress
, "PrivateIpAddresses" `toQueryList` _cniPrivateIpAddresses
, "SecondaryPrivateIpAddressCount" =? _cniSecondaryPrivateIpAddressCount
, "SubnetId" =? _cniSubnetId
]
instance ToHeaders CreateNetworkInterface
instance AWSRequest CreateNetworkInterface where
type Sv CreateNetworkInterface = EC2
type Rs CreateNetworkInterface = CreateNetworkInterfaceResponse
request = post "CreateNetworkInterface"
response = xmlResponse
instance FromXML CreateNetworkInterfaceResponse where
parseXML x = CreateNetworkInterfaceResponse
<$> x .@? "networkInterface"
| kim/amazonka | amazonka-ec2/gen/Network/AWS/EC2/CreateNetworkInterface.hs | mpl-2.0 | 7,555 | 0 | 10 | 1,677 | 855 | 517 | 338 | 95 | 1 |
module TestLexer (testLexer) where
import Test.HUnit
import Lexer
checkToken :: String -> (TokenType -> Bool) -> Test
checkToken s f = TestLabel ("test token \"" ++ s ++ "\"") $ TestCase $ do
let tokens = alexScanTokens s
length tokens @?= 1
f (tokenType . head $ tokens) @? ""
checkTokenList :: String -> [TokenType] -> Test
checkTokenList s ts = TestLabel ("test token list \"" ++ s ++ "\"") $ TestCase $ do
let tokens = alexScanTokens s
(map tokenType tokens) @?= ts
checkTokenEq :: String -> TokenType -> Test
checkTokenEq s t = checkToken s (\t' -> t' == t)
isFloat :: TokenType -> Bool
isFloat (TLiteral (LFloat _)) = True
isFloat _ = False
isDouble :: TokenType -> Bool
isDouble (TLiteral (LDouble _)) = True
isDouble _ = False
checkKeyword :: String -> Test
checkKeyword s = checkTokenEq s (TKeyword s)
checkOperator :: String -> Test
checkOperator s = checkTokenEq s (TOperator s)
checkIdentifier :: String -> Test
checkIdentifier s = checkTokenEq s (TIdentifier s)
testLexer :: Test.HUnit.Test
testLexer = TestList $ [
checkTokenEq "123" (TLiteral . LInt $ 123),
checkTokenEq "123L" (TLiteral . LLong $ 123),
checkTokenEq "true" (TLiteral . LBoolean $ True),
checkTokenEq "false" (TLiteral . LBoolean $ False),
checkToken "123.123" isDouble,
checkToken "123.123" isDouble,
checkToken "123.123D" isDouble,
checkToken "123.123F" isFloat,
uncurry checkTokenList tokenList
] ++ (map checkKeyword keywords)
++ (map checkOperator operators)
++ (map checkIdentifier identifiers)
tokenList :: (String, [TokenType])
tokenList = ("class A { int x; A(int x) { this.x = x; } }",
[
TKeyword "class", TIdentifier "A", TOperator "{",
TKeyword "int", TIdentifier "x", TOperator ";",
TIdentifier "A", TOperator "(", TKeyword "int", TIdentifier "x", TOperator ")", TOperator "{",
TKeyword "this", TOperator ".", TIdentifier "x", TOperator "=", TIdentifier "x", TOperator ";",
TOperator "}", TOperator "}"
])
keywords :: [String]
keywords = [
"class",
"void",
"if",
"else",
"while",
"for",
"break",
"continue",
"return",
"null",
"this",
"new",
"boolean",
"byte",
"short",
"int",
"long",
"float",
"double"
]
operators :: [String]
operators = [
"||",
"&&",
"==",
"!=",
"<",
">",
"<=",
">=",
"+",
"-",
"*",
"/",
"%",
"!",
"--",
"++",
"(",
")",
"{",
"}",
",",
";",
".",
"="
]
identifiers :: [String]
identifiers = [
"class1",
"value",
"BufferedInputStream",
"CONSTANT_NAME",
"_strange_name",
"a_1_b_"
]
| ademinn/JavaWithClasses | test/src/TestLexer.hs | bsd-3-clause | 2,961 | 0 | 13 | 934 | 885 | 481 | 404 | 103 | 1 |
-- |
-- Utility functions.
--
module Utility (
alternate
, bin
, equating
, exitBecause
, logStr
, logStrLn
, pad
, pleat
, trimBins
) where
import Data.Ord (comparing)
import Data.List (sort, sortBy, groupBy)
import System.Exit (exitFailure)
import System.IO (hPutStr, hPutStrLn, stderr)
-- | Put data in bins defined by the given categories.
bin :: Ord a => [a] -> [b] -> [[b]]
bin cats = map (snd . unzip)
. groupBy (equating fst)
. sortBy (comparing fst)
. zip cats
-- | Trim bins so that each bin contains at most the given number of elements.
trimBins :: Ord a => Int -> [[a]] -> [[a]]
trimBins n = map (take n . pleat)
-- | Take a list and reorder it so that the median element comes first and the
-- extremal elements come last. Example:
--
-- > pleat [1..9] = [5,4,6,3,7,2,8,1]
-- > pleat [0..9] = [5,4,6,3,7,2,8,1,9,0]
--
-- Note that the input need not be sorted.
pleat :: Ord a => [a] -> [a]
pleat xs = alternate bs (reverse as)
where
(as,bs) = splitAt (length xs `div` 2) $ sort xs
-- | Combine two lists into one by alternately taking an element from the
-- first, then from the second. For example:
--
-- > alternate [1..3] [5..9] = [1,5,2,6,3,7,8,9]
--
-- Note that no elements from either list are lost.
alternate :: [a] -> [a] -> [a]
alternate (x:xs) (y:ys) = x : y : alternate xs ys
alternate xs [] = xs
alternate [] ys = ys
-- | Use e.g. with sorting routines as in
--
-- > sortBy (equating fst)
--
equating :: Eq a => (t -> a) -> t -> t -> Bool
equating f x y = f x == f y
-- | Exit program with failure and log reason to stderr.
exitBecause :: String -> IO b
exitBecause reason = logStrLn reason >> exitFailure
-- | Log string with newline to stderr
logStrLn :: String -> IO ()
logStrLn = hPutStrLn stderr
-- | Log string to stderr
logStr :: String -> IO ()
logStr = hPutStr stderr
-- | Pad string with spaces so that it is at least 'n' chars wide
pad :: Int -> String -> String
pad n s | n > k = replicate (n-k) ' ' ++ s
| otherwise = s
where
k = length s
| b4winckler/up-down-signature | src/sigscore/Utility.hs | bsd-3-clause | 2,085 | 0 | 11 | 507 | 599 | 328 | 271 | 40 | 1 |
{-# OPTIONS_GHC -fno-implicit-prelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Ord
-- Copyright : (c) The University of Glasgow 2005
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- Orderings
--
-----------------------------------------------------------------------------
module Data.Ord (
Ord(..),
Ordering(..),
comparing,
) where
#if __GLASGOW_HASKELL__
import GHC.Base
#endif
-- |
-- > comparing p x y = compare (p x) (p y)
--
-- Useful combinator for use in conjunction with the @xxxBy@ family
-- of functions from "Data.List", for example:
--
-- > ... sortBy (comparing fst) ...
comparing :: (Ord a) => (b -> a) -> b -> b -> Ordering
comparing p x y = compare (p x) (p y)
| alekar/hugs | packages/base/Data/Ord.hs | bsd-3-clause | 885 | 0 | 8 | 161 | 116 | 76 | 40 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Stack.Options
(BuildCommand(..)
,GlobalOptsContext(..)
,benchOptsParser
,buildOptsParser
,cleanOptsParser
,configCmdSetParser
,configOptsParser
,dockerOptsParser
,dockerCleanupOptsParser
,dotOptsParser
,execOptsParser
,evalOptsParser
,globalOptsParser
,initOptsParser
,newOptsParser
,nixOptsParser
,logLevelOptsParser
,ghciOptsParser
,solverOptsParser
,testOptsParser
,haddockOptsParser
,hpcReportOptsParser
,pvpBoundsOption
,globalOptsFromMonoid
,splitObjsWarning
) where
import Control.Monad.Logger (LogLevel (..))
import Data.Char (isSpace, toLower, toUpper)
import Data.List (intercalate)
import Data.List.Split (splitOn)
import qualified Data.Map as Map
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Monoid.Extra
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Text.Read (decimal)
import Distribution.Version (anyVersion)
import Options.Applicative
import Options.Applicative.Args
import Options.Applicative.Builder.Extra
import Options.Applicative.Types (fromM, oneM, readerAsk)
import Path
import Stack.Build (splitObjsWarning)
import Stack.Clean (CleanOpts (..))
import Stack.Config (packagesParser)
import Stack.ConfigCmd
import Stack.Constants
import Stack.Coverage (HpcReportOpts (..))
import Stack.Docker
import qualified Stack.Docker as Docker
import Stack.Dot
import Stack.Ghci (GhciOpts (..))
import Stack.Init
import Stack.New
import Stack.Nix
import Stack.Types
import Stack.Types.TemplateName
-- | Allows adjust global options depending on their context
-- Note: This was being used to remove ambibuity between the local and global
-- implementation of stack init --resolver option. Now that stack init has no
-- local --resolver this is not being used anymore but the code is kept for any
-- similar future use cases.
data GlobalOptsContext
= OuterGlobalOpts -- ^ Global options before subcommand name
| OtherCmdGlobalOpts -- ^ Global options following any other subcommand
| BuildCmdGlobalOpts
deriving (Show, Eq)
-- | Parser for bench arguments.
-- FIXME hiding options
benchOptsParser :: Bool -> Parser BenchmarkOptsMonoid
benchOptsParser hide0 = BenchmarkOptsMonoid
<$> optionalFirst (strOption (long "benchmark-arguments" <>
metavar "BENCH_ARGS" <>
help ("Forward BENCH_ARGS to the benchmark suite. " <>
"Supports templates from `cabal bench`") <>
hide))
<*> optionalFirst (switch (long "no-run-benchmarks" <>
help "Disable running of benchmarks. (Benchmarks will still be built.)" <>
hide))
where hide = hideMods hide0
-- | Parser for CLI-only build arguments
buildOptsParser :: BuildCommand
-> Parser BuildOptsCLI
buildOptsParser cmd =
BuildOptsCLI <$>
many
(textArgument
(metavar "TARGET" <>
help "If none specified, use all packages")) <*>
switch
(long "dry-run" <>
help "Don't build anything, just prepare to") <*>
((\x y z ->
concat [x, y, z]) <$>
flag
[]
["-Wall", "-Werror"]
(long "pedantic" <>
help "Turn on -Wall and -Werror") <*>
flag
[]
["-O0"]
(long "fast" <>
help "Turn off optimizations (-O0)") <*>
many
(textOption
(long "ghc-options" <>
metavar "OPTION" <>
help "Additional options passed to GHC"))) <*>
(Map.unionsWith Map.union <$>
many
(option
readFlag
(long "flag" <>
metavar "PACKAGE:[-]FLAG" <>
help
("Override flags set in stack.yaml " <>
"(applies to local packages and extra-deps)")))) <*>
(flag'
BSOnlyDependencies
(long "dependencies-only" <>
help "A synonym for --only-dependencies") <|>
flag'
BSOnlySnapshot
(long "only-snapshot" <>
help
"Only build packages for the snapshot database, not the local database") <|>
flag'
BSOnlyDependencies
(long "only-dependencies" <>
help
"Only build packages that are dependencies of targets on the command line") <|>
pure BSAll) <*>
(flag'
FileWatch
(long "file-watch" <>
help
"Watch for changes in local files and automatically rebuild. Ignores files in VCS boring/ignore file") <|>
flag'
FileWatchPoll
(long "file-watch-poll" <>
help
"Like --file-watch, but polling the filesystem instead of using events") <|>
pure NoFileWatch) <*>
many (cmdOption
(long "exec" <>
metavar "CMD [ARGS]" <>
help "Command and arguments to run after a successful build")) <*>
switch
(long "only-configure" <>
help
"Only perform the configure step, not any builds. Intended for tool usage, may break when used on multiple packages at once!") <*>
pure cmd
-- | Parser for package:[-]flag
readFlag :: ReadM (Map (Maybe PackageName) (Map FlagName Bool))
readFlag = do
s <- readerAsk
case break (== ':') s of
(pn, ':':mflag) -> do
pn' <-
case parsePackageNameFromString pn of
Nothing
| pn == "*" -> return Nothing
| otherwise -> readerError $ "Invalid package name: " ++ pn
Just x -> return $ Just x
let (b, flagS) =
case mflag of
'-':x -> (False, x)
_ -> (True, mflag)
flagN <-
case parseFlagNameFromString flagS of
Nothing -> readerError $ "Invalid flag name: " ++ flagS
Just x -> return x
return $ Map.singleton pn' $ Map.singleton flagN b
_ -> readerError "Must have a colon"
-- | Command-line parser for the clean command.
cleanOptsParser :: Parser CleanOpts
cleanOptsParser = CleanShallow <$> packages <|> doFullClean
where
packages =
many
(packageNameArgument
(metavar "PACKAGE" <>
help "If none specified, clean all local packages"))
doFullClean =
flag'
CleanFull
(long "full" <>
help "Delete all work directories (.stack-work by default) in the project")
-- | Command-line arguments parser for configuration.
configOptsParser :: GlobalOptsContext -> Parser ConfigMonoid
configOptsParser hide0 =
(\stackRoot workDir buildOpts dockerOpts nixOpts systemGHC installGHC arch os ghcVariant jobs includes libs skipGHCCheck skipMsys localBin modifyCodePage allowDifferentUser -> mempty
{ configMonoidStackRoot = stackRoot
, configMonoidWorkDir = workDir
, configMonoidBuildOpts = buildOpts
, configMonoidDockerOpts = dockerOpts
, configMonoidNixOpts = nixOpts
, configMonoidSystemGHC = systemGHC
, configMonoidInstallGHC = installGHC
, configMonoidSkipGHCCheck = skipGHCCheck
, configMonoidArch = arch
, configMonoidOS = os
, configMonoidGHCVariant = ghcVariant
, configMonoidJobs = jobs
, configMonoidExtraIncludeDirs = includes
, configMonoidExtraLibDirs = libs
, configMonoidSkipMsys = skipMsys
, configMonoidLocalBinPath = localBin
, configMonoidModifyCodePage = modifyCodePage
, configMonoidAllowDifferentUser = allowDifferentUser
})
<$> optionalFirst (option readAbsDir
( long stackRootOptionName
<> metavar (map toUpper stackRootOptionName)
<> help ("Absolute path to the global stack root directory " ++
"(Overrides any STACK_ROOT environment variable)")
<> hide
))
<*> optionalFirst (strOption
( long "work-dir"
<> metavar "WORK-DIR"
<> help "Override work directory (default: .stack-work)"
<> hide
))
<*> buildOptsMonoidParser (hide0 /= BuildCmdGlobalOpts)
<*> dockerOptsParser True
<*> nixOptsParser True
<*> firstBoolFlags
"system-ghc"
"using the system installed GHC (on the PATH) if available and a matching version"
hide
<*> firstBoolFlags
"install-ghc"
"downloading and installing GHC if necessary (can be done manually with stack setup)"
hide
<*> optionalFirst (strOption
( long "arch"
<> metavar "ARCH"
<> help "System architecture, e.g. i386, x86_64"
<> hide
))
<*> optionalFirst (strOption
( long "os"
<> metavar "OS"
<> help "Operating system, e.g. linux, windows"
<> hide
))
<*> optionalFirst (ghcVariantParser (hide0 /= OuterGlobalOpts))
<*> optionalFirst (option auto
( long "jobs"
<> short 'j'
<> metavar "JOBS"
<> help "Number of concurrent jobs to run"
<> hide
))
<*> fmap Set.fromList (many (textOption
( long "extra-include-dirs"
<> metavar "DIR"
<> help "Extra directories to check for C header files"
<> hide
)))
<*> fmap Set.fromList (many (textOption
( long "extra-lib-dirs"
<> metavar "DIR"
<> help "Extra directories to check for libraries"
<> hide
)))
<*> firstBoolFlags
"skip-ghc-check"
"skipping the GHC version and architecture check"
hide
<*> firstBoolFlags
"skip-msys"
"skipping the local MSYS installation (Windows only)"
hide
<*> optionalFirst (strOption
( long "local-bin-path"
<> metavar "DIR"
<> help "Install binaries to DIR"
<> hide
))
<*> firstBoolFlags
"modify-code-page"
"setting the codepage to support UTF-8 (Windows only)"
hide
<*> firstBoolFlags
"allow-different-user"
("permission for users other than the owner of the stack root " ++
"directory to use a stack installation (POSIX only)")
hide
where hide = hideMods (hide0 /= OuterGlobalOpts)
readAbsDir :: ReadM (Path Abs Dir)
readAbsDir = do
s <- readerAsk
case parseAbsDir s of
Just p -> return p
Nothing ->
readerError
("Failed to parse absolute path to directory: '" ++ s ++ "'")
buildOptsMonoidParser :: Bool -> Parser BuildOptsMonoid
buildOptsMonoidParser hide0 =
transform <$> trace <*> profile <*> options
where
hide =
hideMods hide0
transform tracing profiling =
enable
where
enable opts
| tracing || profiling =
opts
{ buildMonoidLibProfile = First (Just True)
, buildMonoidExeProfile = First (Just True)
, buildMonoidBenchmarkOpts = bopts
{ beoMonoidAdditionalArgs = First (getFirst (beoMonoidAdditionalArgs bopts) <>
Just (" " <> unwords additionalArgs))
}
, buildMonoidTestOpts = topts
{ toMonoidAdditionalArgs = (toMonoidAdditionalArgs topts) <>
additionalArgs
}
}
| otherwise =
opts
where
bopts =
buildMonoidBenchmarkOpts opts
topts =
buildMonoidTestOpts opts
additionalArgs =
"+RTS" : catMaybes [trac, prof, Just "-RTS"]
trac =
if tracing
then Just "-xc"
else Nothing
prof =
if profiling
then Just "-p"
else Nothing
profile =
flag
False
True
(long "profile" <>
help
"Enable profiling in libraries, executables, etc. \
\for all expressions and generate a profiling report\
\ in exec or benchmarks" <>
hide)
trace =
flag
False
True
(long "trace" <>
help
"Enable profiling in libraries, executables, etc. \
\for all expressions and generate a backtrace on \
\exception" <>
hide)
options =
BuildOptsMonoid <$> libProfiling <*> exeProfiling <*> haddock <*>
haddockOptsParser hide0 <*> openHaddocks <*>
haddockDeps <*> copyBins <*> preFetch <*> keepGoing <*> forceDirty <*>
tests <*> testOptsParser hide0 <*> benches <*> benchOptsParser hide0 <*> reconfigure <*>
cabalVerbose <*> splitObjs
libProfiling =
firstBoolFlags
"library-profiling"
"library profiling for TARGETs and all its dependencies"
hide
exeProfiling =
firstBoolFlags
"executable-profiling"
"executable profiling for TARGETs and all its dependencies"
hide
haddock =
firstBoolFlags
"haddock"
"generating Haddocks the package(s) in this directory/configuration"
hide
openHaddocks =
firstBoolFlags
"open"
"opening the local Haddock documentation in the browser"
hide
haddockDeps =
firstBoolFlags "haddock-deps" "building Haddocks for dependencies" hide
copyBins =
firstBoolFlags
"copy-bins"
"copying binaries to the local-bin-path (see 'stack path')"
hide
keepGoing =
firstBoolFlags
"keep-going"
"continue running after a step fails (default: false for build, true for test/bench)"
hide
preFetch =
firstBoolFlags
"prefetch"
"Fetch packages necessary for the build immediately, useful with --dry-run"
hide
forceDirty =
firstBoolFlags
"force-dirty"
"Force treating all local packages as having dirty files (useful for cases where stack can't detect a file change"
hide
tests =
firstBoolFlags
"test"
"testing the package(s) in this directory/configuration"
hide
benches =
firstBoolFlags
"bench"
"benchmarking the package(s) in this directory/configuration"
hide
reconfigure =
firstBoolFlags
"reconfigure"
"Perform the configure step even if unnecessary. Useful in some corner cases with custom Setup.hs files"
hide
cabalVerbose =
firstBoolFlags
"cabal-verbose"
"Ask Cabal to be verbose in its output"
hide
splitObjs =
firstBoolFlags
"split-objs"
("Enable split-objs, to reduce output size (at the cost of build time). " ++ splitObjsWarning)
hide
nixOptsParser :: Bool -> Parser NixOptsMonoid
nixOptsParser hide0 = overrideActivation <$>
(NixOptsMonoid
<$> pure (Any False)
<*> firstBoolFlags nixCmdName
"use of a Nix-shell"
hide
<*> firstBoolFlags "nix-pure"
"use of a pure Nix-shell"
hide
<*> optionalFirst
(textArgsOption
(long "nix-packages" <>
metavar "NAMES" <>
help "List of packages that should be available in the nix-shell (space separated)" <>
hide))
<*> optionalFirst
(option
str
(long "nix-shell-file" <>
metavar "FILEPATH" <>
help "Nix file to be used to launch a nix-shell (for regular Nix users)" <>
hide))
<*> optionalFirst
(textArgsOption
(long "nix-shell-options" <>
metavar "OPTIONS" <>
help "Additional options passed to nix-shell" <>
hide))
<*> optionalFirst
(textArgsOption
(long "nix-path" <>
metavar "PATH_OPTIONS" <>
help "Additional options to override NIX_PATH parts (notably 'nixpkgs')" <>
hide))
)
where
hide = hideMods hide0
overrideActivation m =
if m /= mempty then m { nixMonoidEnable = (First . Just . fromFirst True) (nixMonoidEnable m) }
else m
textArgsOption = fmap (map T.pack) . argsOption
-- | Options parser configuration for Docker.
dockerOptsParser :: Bool -> Parser DockerOptsMonoid
dockerOptsParser hide0 =
DockerOptsMonoid
<$> pure (Any False)
<*> firstBoolFlags dockerCmdName
"using a Docker container"
hide
<*> fmap First
((Just . DockerMonoidRepo) <$> option str (long (dockerOptName dockerRepoArgName) <>
hide <>
metavar "NAME" <>
help "Docker repository name") <|>
(Just . DockerMonoidImage) <$> option str (long (dockerOptName dockerImageArgName) <>
hide <>
metavar "IMAGE" <>
help "Exact Docker image ID (overrides docker-repo)") <|>
pure Nothing)
<*> firstBoolFlags (dockerOptName dockerRegistryLoginArgName)
"registry requires login"
hide
<*> firstStrOption (long (dockerOptName dockerRegistryUsernameArgName) <>
hide <>
metavar "USERNAME" <>
help "Docker registry username")
<*> firstStrOption (long (dockerOptName dockerRegistryPasswordArgName) <>
hide <>
metavar "PASSWORD" <>
help "Docker registry password")
<*> firstBoolFlags (dockerOptName dockerAutoPullArgName)
"automatic pulling latest version of image"
hide
<*> firstBoolFlags (dockerOptName dockerDetachArgName)
"running a detached Docker container"
hide
<*> firstBoolFlags (dockerOptName dockerPersistArgName)
"not deleting container after it exits"
hide
<*> firstStrOption (long (dockerOptName dockerContainerNameArgName) <>
hide <>
metavar "NAME" <>
help "Docker container name")
<*> argsOption (long (dockerOptName dockerRunArgsArgName) <>
hide <>
value [] <>
metavar "'ARG1 [ARG2 ...]'" <>
help "Additional options to pass to 'docker run'")
<*> many (option auto (long (dockerOptName dockerMountArgName) <>
hide <>
metavar "(PATH | HOST-PATH:CONTAINER-PATH)" <>
help ("Mount volumes from host in container " ++
"(may specify multiple times)")))
<*> many (option str (long (dockerOptName dockerEnvArgName) <>
hide <>
metavar "NAME=VALUE" <>
help ("Set environment variable in container " ++
"(may specify multiple times)")))
<*> firstStrOption (long (dockerOptName dockerDatabasePathArgName) <>
hide <>
metavar "PATH" <>
help "Location of image usage tracking database")
<*> firstStrOption
(long(dockerOptName dockerStackExeArgName) <>
hide <>
metavar (intercalate "|"
[ dockerStackExeDownloadVal
, dockerStackExeHostVal
, dockerStackExeImageVal
, "PATH" ]) <>
help (concat [ "Location of "
, stackProgName
, " executable used in container" ]))
<*> firstBoolFlags (dockerOptName dockerSetUserArgName)
"setting user in container to match host"
hide
<*> pure (IntersectingVersionRange anyVersion)
where
dockerOptName optName = dockerCmdName ++ "-" ++ T.unpack optName
firstStrOption = optionalFirst . option str
hide = hideMods hide0
-- | Parser for docker cleanup arguments.
dockerCleanupOptsParser :: Parser Docker.CleanupOpts
dockerCleanupOptsParser =
Docker.CleanupOpts <$>
(flag' Docker.CleanupInteractive
(short 'i' <>
long "interactive" <>
help "Show cleanup plan in editor and allow changes (default)") <|>
flag' Docker.CleanupImmediate
(short 'y' <>
long "immediate" <>
help "Immediately execute cleanup plan") <|>
flag' Docker.CleanupDryRun
(short 'n' <>
long "dry-run" <>
help "Display cleanup plan but do not execute") <|>
pure Docker.CleanupInteractive) <*>
opt (Just 14) "known-images" "LAST-USED" <*>
opt Nothing "unknown-images" "CREATED" <*>
opt (Just 0) "dangling-images" "CREATED" <*>
opt Nothing "stopped-containers" "CREATED" <*>
opt Nothing "running-containers" "CREATED"
where opt def' name mv =
fmap Just
(option auto
(long name <>
metavar (mv ++ "-DAYS-AGO") <>
help ("Remove " ++
toDescr name ++
" " ++
map toLower (toDescr mv) ++
" N days ago" ++
case def' of
Just n -> " (default " ++ show n ++ ")"
Nothing -> ""))) <|>
flag' Nothing
(long ("no-" ++ name) <>
help ("Do not remove " ++
toDescr name ++
case def' of
Just _ -> ""
Nothing -> " (default)")) <|>
pure def'
toDescr = map (\c -> if c == '-' then ' ' else c)
-- | Parser for arguments to `stack dot`
dotOptsParser :: Parser DotOpts
dotOptsParser = DotOpts
<$> includeExternal
<*> includeBase
<*> depthLimit
<*> fmap (maybe Set.empty Set.fromList . fmap splitNames) prunedPkgs
where includeExternal = boolFlags False
"external"
"inclusion of external dependencies"
idm
includeBase = boolFlags True
"include-base"
"inclusion of dependencies on base"
idm
depthLimit =
optional (option auto
(long "depth" <>
metavar "DEPTH" <>
help ("Limit the depth of dependency resolution " <>
"(Default: No limit)")))
prunedPkgs = optional (strOption
(long "prune" <>
metavar "PACKAGES" <>
help ("Prune each package name " <>
"from the comma separated list " <>
"of package names PACKAGES")))
splitNames :: String -> [String]
splitNames = map (takeWhile (not . isSpace) . dropWhile isSpace) . splitOn ","
ghciOptsParser :: Parser GhciOpts
ghciOptsParser = GhciOpts
<$> switch (long "no-build" <> help "Don't build before launching GHCi")
<*> fmap concat (many (argsOption (long "ghci-options" <>
metavar "OPTION" <>
help "Additional options passed to GHCi")))
<*> optional
(strOption (long "with-ghc" <>
metavar "GHC" <>
help "Use this GHC to run GHCi"))
<*> (not <$> boolFlags True "load" "load modules on start-up" idm)
<*> packagesParser
<*> optional
(textOption
(long "main-is" <>
metavar "TARGET" <>
help "Specify which target should contain the main \
\module to load, such as for an executable for \
\test suite or benchmark."))
<*> switch (long "load-local-deps" <> help "Load all local dependencies of your targets")
<*> switch (long "skip-intermediate-deps" <> help "Skip loading intermediate target dependencies")
<*> boolFlags True "package-hiding" "package hiding" idm
<*> buildOptsParser Build
-- | Parser for exec command
execOptsParser :: Maybe SpecialExecCmd -> Parser ExecOpts
execOptsParser mcmd =
ExecOpts
<$> maybe eoCmdParser pure mcmd
<*> eoArgsParser
<*> execOptsExtraParser
where
eoCmdParser = ExecCmd <$> strArgument (metavar "CMD")
eoArgsParser = many (strArgument (metavar "-- ARGS (e.g. stack ghc -- X.hs -o x)"))
evalOptsParser :: String -- ^ metavar
-> Parser EvalOpts
evalOptsParser meta =
EvalOpts
<$> eoArgsParser
<*> execOptsExtraParser
where
eoArgsParser :: Parser String
eoArgsParser = strArgument (metavar meta)
-- | Parser for extra options to exec command
execOptsExtraParser :: Parser ExecOptsExtra
execOptsExtraParser = eoPlainParser <|>
ExecOptsEmbellished
<$> eoEnvSettingsParser
<*> eoPackagesParser
where
eoEnvSettingsParser :: Parser EnvSettings
eoEnvSettingsParser = EnvSettings
<$> pure True
<*> boolFlags True
"ghc-package-path"
"setting the GHC_PACKAGE_PATH variable for the subprocess"
idm
<*> boolFlags True
"stack-exe"
"setting the STACK_EXE environment variable to the path for the stack executable"
idm
<*> pure False
eoPackagesParser :: Parser [String]
eoPackagesParser = many (strOption (long "package" <> help "Additional packages that must be installed"))
eoPlainParser :: Parser ExecOptsExtra
eoPlainParser = flag' ExecOptsPlain
(long "plain" <>
help "Use an unmodified environment (only useful with Docker)")
-- | Parser for global command-line options.
globalOptsParser :: GlobalOptsContext -> Maybe LogLevel -> Parser GlobalOptsMonoid
globalOptsParser kind defLogLevel =
GlobalOptsMonoid <$>
optionalFirst (strOption (long Docker.reExecArgName <> hidden <> internal)) <*>
optionalFirst (option auto (long dockerEntrypointArgName <> hidden <> internal)) <*>
(First <$> logLevelOptsParser hide0 defLogLevel) <*>
configOptsParser kind <*>
optionalFirst (abstractResolverOptsParser hide0) <*>
optionalFirst (compilerOptsParser hide0) <*>
firstBoolFlags
"terminal"
"overriding terminal detection in the case of running in a false terminal"
hide <*>
optionalFirst
(strOption
(long "stack-yaml" <>
metavar "STACK-YAML" <>
help ("Override project stack.yaml file " <>
"(overrides any STACK_YAML environment variable)") <>
hide))
where
hide = hideMods hide0
hide0 = kind /= OuterGlobalOpts
-- | Create GlobalOpts from GlobalOptsMonoid.
globalOptsFromMonoid :: Bool -> GlobalOptsMonoid -> GlobalOpts
globalOptsFromMonoid defaultTerminal GlobalOptsMonoid{..} = GlobalOpts
{ globalReExecVersion = getFirst globalMonoidReExecVersion
, globalDockerEntrypoint = getFirst globalMonoidDockerEntrypoint
, globalLogLevel = fromFirst defaultLogLevel globalMonoidLogLevel
, globalConfigMonoid = globalMonoidConfigMonoid
, globalResolver = getFirst globalMonoidResolver
, globalCompiler = getFirst globalMonoidCompiler
, globalTerminal = fromFirst defaultTerminal globalMonoidTerminal
, globalStackYaml = getFirst globalMonoidStackYaml }
initOptsParser :: Parser InitOpts
initOptsParser =
InitOpts <$> searchDirs
<*> solver <*> omitPackages
<*> overwrite <*> fmap not ignoreSubDirs
where
searchDirs =
many (textArgument
(metavar "DIRS" <>
help "Directories to include, default is current directory."))
ignoreSubDirs = switch (long "ignore-subdirs" <>
help "Do not search for .cabal files in sub directories")
overwrite = switch (long "force" <>
help "Force overwriting an existing stack.yaml")
omitPackages = switch (long "omit-packages" <>
help "Exclude conflicting or incompatible user packages")
solver = switch (long "solver" <>
help "Use a dependency solver to determine extra dependencies")
-- | Parser for a logging level.
logLevelOptsParser :: Bool -> Maybe LogLevel -> Parser (Maybe LogLevel)
logLevelOptsParser hide defLogLevel =
fmap (Just . parse)
(strOption (long "verbosity" <>
metavar "VERBOSITY" <>
help "Verbosity: silent, error, warn, info, debug" <>
hideMods hide)) <|>
flag' (Just verboseLevel)
(short 'v' <> long "verbose" <>
help ("Enable verbose mode: verbosity level \"" <> showLevel verboseLevel <> "\"") <>
hideMods hide) <|>
flag' (Just silentLevel)
(long "silent" <>
help ("Enable silent mode: verbosity level \"" <> showLevel silentLevel <> "\"") <>
hideMods hide) <|>
pure defLogLevel
where verboseLevel = LevelDebug
silentLevel = LevelOther "silent"
showLevel l =
case l of
LevelDebug -> "debug"
LevelInfo -> "info"
LevelWarn -> "warn"
LevelError -> "error"
LevelOther x -> T.unpack x
parse s =
case s of
"debug" -> LevelDebug
"info" -> LevelInfo
"warn" -> LevelWarn
"error" -> LevelError
_ -> LevelOther (T.pack s)
-- | Parser for the resolver
abstractResolverOptsParser :: Bool -> Parser AbstractResolver
abstractResolverOptsParser hide =
option readAbstractResolver
(long "resolver" <>
metavar "RESOLVER" <>
help "Override resolver in project file" <>
hideMods hide)
readAbstractResolver :: ReadM AbstractResolver
readAbstractResolver = do
s <- readerAsk
case s of
"global" -> return ARGlobal
"nightly" -> return ARLatestNightly
"lts" -> return ARLatestLTS
'l':'t':'s':'-':x | Right (x', "") <- decimal $ T.pack x ->
return $ ARLatestLTSMajor x'
_ ->
case parseResolverText $ T.pack s of
Left e -> readerError $ show e
Right x -> return $ ARResolver x
compilerOptsParser :: Bool -> Parser CompilerVersion
compilerOptsParser hide =
option readCompilerVersion
(long "compiler" <>
metavar "COMPILER" <>
help "Use the specified compiler" <>
hideMods hide)
readCompilerVersion :: ReadM CompilerVersion
readCompilerVersion = do
s <- readerAsk
case parseCompilerVersion (T.pack s) of
Nothing -> readerError $ "Failed to parse compiler: " ++ s
Just x -> return x
-- | GHC variant parser
ghcVariantParser :: Bool -> Parser GHCVariant
ghcVariantParser hide =
option
readGHCVariant
(long "ghc-variant" <> metavar "VARIANT" <>
help
"Specialized GHC variant, e.g. integersimple (implies --no-system-ghc)" <>
hideMods hide
)
where
readGHCVariant = do
s <- readerAsk
case parseGHCVariant s of
Left e -> readerError (show e)
Right v -> return v
-- | Parser for @solverCmd@
solverOptsParser :: Parser Bool
solverOptsParser = boolFlags False
"update-config"
"Automatically update stack.yaml with the solver's recommendations"
idm
-- | Parser for haddock arguments.
haddockOptsParser :: Bool -> Parser HaddockOptsMonoid
haddockOptsParser hide0 =
HaddockOptsMonoid <$> fmap (fromMaybe [])
(optional
(argsOption
(long "haddock-arguments" <>
metavar "HADDOCK_ARGS" <>
help "Arguments passed to the haddock program" <>
hide)))
where hide = hideMods hide0
-- | Parser for test arguments.
-- FIXME hide args
testOptsParser :: Bool -> Parser TestOptsMonoid
testOptsParser hide0 =
TestOptsMonoid
<$> firstBoolFlags
"rerun-tests"
"running already successful tests"
hide
<*> fmap
(fromMaybe [])
(optional
(argsOption
(long "test-arguments" <>
metavar "TEST_ARGS" <>
help "Arguments passed in to the test suite program" <>
hide)))
<*> optionalFirst
(switch
(long "coverage" <>
help "Generate a code coverage report" <>
hide))
<*> optionalFirst
(switch
(long "no-run-tests" <>
help "Disable running of tests. (Tests will still be built.)" <>
hide))
where hide = hideMods hide0
-- | Parser for @stack new@.
newOptsParser :: Parser (NewOpts,InitOpts)
newOptsParser = (,) <$> newOpts <*> initOptsParser
where
newOpts =
NewOpts <$>
packageNameArgument
(metavar "PACKAGE_NAME" <> help "A valid package name.") <*>
switch
(long "bare" <>
help "Do not create a subdirectory for the project") <*>
optional (templateNameArgument
(metavar "TEMPLATE_NAME" <>
help "Name of a template or a local template in a file or a URL.\
\ For example: foo or foo.hsfiles or ~/foo or\
\ https://example.com/foo.hsfiles")) <*>
fmap
M.fromList
(many
(templateParamArgument
(short 'p' <> long "param" <> metavar "KEY:VALUE" <>
help
"Parameter for the template in the format key:value")))
-- | Parser for @stack hpc report@.
hpcReportOptsParser :: Parser HpcReportOpts
hpcReportOptsParser = HpcReportOpts
<$> many (textArgument $ metavar "TARGET_OR_TIX")
<*> switch (long "all" <> help "Use results from all packages and components")
<*> optional (strOption (long "destdir" <> help "Output directy for HTML report"))
pvpBoundsOption :: Parser PvpBounds
pvpBoundsOption =
option
readPvpBounds
(long "pvp-bounds" <> metavar "PVP-BOUNDS" <>
help
"How PVP version bounds should be added to .cabal file: none, lower, upper, both")
where
readPvpBounds = do
s <- readerAsk
case parsePvpBounds $ T.pack s of
Left e ->
readerError e
Right v ->
return v
configCmdSetParser :: Parser ConfigCmdSet
configCmdSetParser =
fromM
(do field <-
oneM
(strArgument
(metavar "FIELD VALUE"))
oneM (fieldToValParser field))
where
fieldToValParser :: String -> Parser ConfigCmdSet
fieldToValParser s =
case s of
"resolver" ->
ConfigCmdSetResolver <$>
argument
readAbstractResolver
idm
_ ->
error "parse stack config set field: only set resolver is implemented"
-- | If argument is True, hides the option from usage and help
hideMods :: Bool -> Mod f a
hideMods hide = if hide then internal <> hidden else idm
| Heather/stack | src/Stack/Options.hs | bsd-3-clause | 37,958 | 0 | 33 | 14,400 | 6,621 | 3,293 | 3,328 | 884 | 9 |
{-#LANGUAGE OverloadedStrings #-}
{-#LANGUAGE QuasiQuotes #-}
{-#LANGUAGE NoImplicitPrelude #-}
module Web.Sprinkles.ApplicationTest
where
import Web.Sprinkles.Prelude
import Web.Sprinkles.Project (Project (..), loadProject)
import Web.Sprinkles.ServerConfig (ServerConfig (..))
import Web.Sprinkles.Serve (appFromProject)
import Web.Sprinkles.Logger (Logger (..), LogMessage (..), tChanLogger)
import System.Directory
import System.FilePath
import System.IO.Temp
import Data.Default (def)
import Test.Tasty
import Test.Tasty.HUnit
import Network.Wai.Test
import Text.Heredoc
applicationTests :: TestTree
applicationTests = testGroup "Application"
[ testCase "Serve blank page" testServeBlankPage
]
testServeBlankPage = do
let projectFiles =
[ ( "project.yml"
, [str|rules:
| - pattern: '/'
| template: 'index.html'
|])
, ( "templates/index.html"
, "All is well."
)
]
withFakeProject projectFiles . runProjectSession $ do
response <- request (setPath defaultRequest "/")
assertStatus 200 response
assertContentType "text/html" response
assertBody "All is well." response
runProjectSession :: Session a -> Project -> IO a
runProjectSession action project =
runSession action (appFromProject project)
withFakeProject :: [(FilePath, ByteString)] -> (Project -> IO ()) -> IO ()
withFakeProject files inner = do
withSystemTempDirectory "sprinkles-fake-project-" $ \projectRoot ->
bracket (acquire projectRoot) release go
where
acquire :: FilePath -> IO FilePath
acquire projectRoot = do
setCurrentDirectory projectRoot
forM files $ \(filename, contents) -> do
let dirname = takeDirectory filename
createDirectoryIfMissing True dirname
writeFile filename contents
return projectRoot
release :: FilePath -> IO ()
release projectRoot = return ()
go :: FilePath -> IO ()
go dir = do
let sconfig = def { scRootDir = "." }
logChan <- newTChanIO
project <- setFakeLogger logChan <$> loadProject sconfig
inner project
setFakeLogger :: TChan LogMessage -> Project -> Project
setFakeLogger logChan project =
project { projectLogger = tChanLogger logChan }
| tdammers/templar | test/Web/Sprinkles/ApplicationTest.hs | bsd-3-clause | 2,467 | 0 | 16 | 679 | 581 | 305 | 276 | 57 | 1 |
-- |
-- Module : Data.Primitive.Internal.Compat
-- Copyright : (c) Roman Leshchinskiy 2011-2012
-- License : BSD-style
--
-- Maintainer : Roman Leshchinskiy <[email protected]>
-- Portability : non-portable
--
-- Compatibility functions
--
module Data.Primitive.Internal.Compat (mkNoRepType) where
#if MIN_VERSION_base(4,2,0)
import Data.Data (mkNoRepType)
#else
import Data.Data (mkNorepType)
mkNoRepType = mkNorepType
#endif
| rleshchinskiy/primitive | Data/Primitive/Internal/Compat.hs | bsd-3-clause | 445 | 0 | 5 | 67 | 36 | 28 | 8 | 3 | 1 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/Text/Lazy/Read.hs" #-}
{-# LANGUAGE OverloadedStrings, CPP #-}
{-# LANGUAGE Safe #-}
-- |
-- Module : Data.Text.Lazy.Read
-- Copyright : (c) 2010, 2011 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Functions used frequently when reading textual data.
module Data.Text.Lazy.Read
(
Reader
, decimal
, hexadecimal
, signed
, rational
, double
) where
import Control.Monad (liftM)
import Data.Char (isDigit, isHexDigit)
import Data.Int (Int8, Int16, Int32, Int64)
import Data.Ratio ((%))
import Data.Text.Internal.Read
import Data.Text.Lazy as T
import Data.Word (Word, Word8, Word16, Word32, Word64)
-- | Read some text. If the read succeeds, return its value and the
-- remaining text, otherwise an error message.
type Reader a = IReader Text a
type Parser = IParser Text
-- | Read a decimal integer. The input must begin with at least one
-- decimal digit, and is consumed until a non-digit or end of string
-- is reached.
--
-- This function does not handle leading sign characters. If you need
-- to handle signed input, use @'signed' 'decimal'@.
--
-- /Note/: For fixed-width integer types, this function does not
-- attempt to detect overflow, so a sufficiently long input may give
-- incorrect results. If you are worried about overflow, use
-- 'Integer' for your result type.
decimal :: Integral a => Reader a
{-# SPECIALIZE decimal :: Reader Int #-}
{-# SPECIALIZE decimal :: Reader Int8 #-}
{-# SPECIALIZE decimal :: Reader Int16 #-}
{-# SPECIALIZE decimal :: Reader Int32 #-}
{-# SPECIALIZE decimal :: Reader Int64 #-}
{-# SPECIALIZE decimal :: Reader Integer #-}
{-# SPECIALIZE decimal :: Reader Data.Word.Word #-}
{-# SPECIALIZE decimal :: Reader Word8 #-}
{-# SPECIALIZE decimal :: Reader Word16 #-}
{-# SPECIALIZE decimal :: Reader Word32 #-}
{-# SPECIALIZE decimal :: Reader Word64 #-}
decimal txt
| T.null h = Left "input does not start with a digit"
| otherwise = Right (T.foldl' go 0 h, t)
where (h,t) = T.span isDigit txt
go n d = (n * 10 + fromIntegral (digitToInt d))
-- | Read a hexadecimal integer, consisting of an optional leading
-- @\"0x\"@ followed by at least one hexadecimal digit. Input is
-- consumed until a non-hex-digit or end of string is reached.
-- This function is case insensitive.
--
-- This function does not handle leading sign characters. If you need
-- to handle signed input, use @'signed' 'hexadecimal'@.
--
-- /Note/: For fixed-width integer types, this function does not
-- attempt to detect overflow, so a sufficiently long input may give
-- incorrect results. If you are worried about overflow, use
-- 'Integer' for your result type.
hexadecimal :: Integral a => Reader a
{-# SPECIALIZE hexadecimal :: Reader Int #-}
{-# SPECIALIZE hexadecimal :: Reader Integer #-}
hexadecimal txt
| h == "0x" || h == "0X" = hex t
| otherwise = hex txt
where (h,t) = T.splitAt 2 txt
hex :: Integral a => Reader a
{-# SPECIALIZE hexadecimal :: Reader Int #-}
{-# SPECIALIZE hexadecimal :: Reader Int8 #-}
{-# SPECIALIZE hexadecimal :: Reader Int16 #-}
{-# SPECIALIZE hexadecimal :: Reader Int32 #-}
{-# SPECIALIZE hexadecimal :: Reader Int64 #-}
{-# SPECIALIZE hexadecimal :: Reader Integer #-}
{-# SPECIALIZE hexadecimal :: Reader Word #-}
{-# SPECIALIZE hexadecimal :: Reader Word8 #-}
{-# SPECIALIZE hexadecimal :: Reader Word16 #-}
{-# SPECIALIZE hexadecimal :: Reader Word32 #-}
{-# SPECIALIZE hexadecimal :: Reader Word64 #-}
hex txt
| T.null h = Left "input does not start with a hexadecimal digit"
| otherwise = Right (T.foldl' go 0 h, t)
where (h,t) = T.span isHexDigit txt
go n d = (n * 16 + fromIntegral (hexDigitToInt d))
-- | Read an optional leading sign character (@\'-\'@ or @\'+\'@) and
-- apply it to the result of applying the given reader.
signed :: Num a => Reader a -> Reader a
{-# INLINE signed #-}
signed f = runP (signa (P f))
-- | Read a rational number.
--
-- This function accepts an optional leading sign character, followed
-- by at least one decimal digit. The syntax similar to that accepted
-- by the 'read' function, with the exception that a trailing @\'.\'@
-- or @\'e\'@ /not/ followed by a number is not consumed.
--
-- Examples:
--
-- >rational "3" == Right (3.0, "")
-- >rational "3.1" == Right (3.1, "")
-- >rational "3e4" == Right (30000.0, "")
-- >rational "3.1e4" == Right (31000.0, "")
-- >rational ".3" == Left "input does not start with a digit"
-- >rational "e3" == Left "input does not start with a digit"
--
-- Examples of differences from 'read':
--
-- >rational "3.foo" == Right (3.0, ".foo")
-- >rational "3e" == Right (3.0, "e")
rational :: Fractional a => Reader a
{-# SPECIALIZE rational :: Reader Double #-}
rational = floaty $ \real frac fracDenom -> fromRational $
real % 1 + frac % fracDenom
-- | Read a rational number.
--
-- The syntax accepted by this function is the same as for 'rational'.
--
-- /Note/: This function is almost ten times faster than 'rational',
-- but is slightly less accurate.
--
-- The 'Double' type supports about 16 decimal places of accuracy.
-- For 94.2% of numbers, this function and 'rational' give identical
-- results, but for the remaining 5.8%, this function loses precision
-- around the 15th decimal place. For 0.001% of numbers, this
-- function will lose precision at the 13th or 14th decimal place.
double :: Reader Double
double = floaty $ \real frac fracDenom ->
fromIntegral real +
fromIntegral frac / fromIntegral fracDenom
signa :: Num a => Parser a -> Parser a
{-# SPECIALIZE signa :: Parser Int -> Parser Int #-}
{-# SPECIALIZE signa :: Parser Int8 -> Parser Int8 #-}
{-# SPECIALIZE signa :: Parser Int16 -> Parser Int16 #-}
{-# SPECIALIZE signa :: Parser Int32 -> Parser Int32 #-}
{-# SPECIALIZE signa :: Parser Int64 -> Parser Int64 #-}
{-# SPECIALIZE signa :: Parser Integer -> Parser Integer #-}
signa p = do
sign <- perhaps '+' $ char (\c -> c == '-' || c == '+')
if sign == '+' then p else negate `liftM` p
char :: (Char -> Bool) -> Parser Char
char p = P $ \t -> case T.uncons t of
Just (c,t') | p c -> Right (c,t')
_ -> Left "character does not match"
floaty :: Fractional a => (Integer -> Integer -> Integer -> a) -> Reader a
{-# INLINE floaty #-}
floaty f = runP $ do
sign <- perhaps '+' $ char (\c -> c == '-' || c == '+')
real <- P decimal
T fraction fracDigits <- perhaps (T 0 0) $ do
_ <- char (=='.')
digits <- P $ \t -> Right (fromIntegral . T.length $ T.takeWhile isDigit t, t)
n <- P decimal
return $ T n digits
let e c = c == 'e' || c == 'E'
power <- perhaps 0 (char e >> signa (P decimal) :: Parser Int)
let n = if fracDigits == 0
then if power == 0
then fromIntegral real
else fromIntegral real * (10 ^^ power)
else if power == 0
then f real fraction (10 ^ fracDigits)
else f real fraction (10 ^ fracDigits) * (10 ^^ power)
return $! if sign == '+'
then n
else -n
| phischu/fragnix | tests/packages/scotty/Data.Text.Lazy.Read.hs | bsd-3-clause | 7,321 | 0 | 19 | 1,700 | 1,246 | 682 | 564 | 109 | 5 |
--
-- The Computer Language Benchmarks Game
-- http://benchmarksgame.alioth.debian.org/
--
-- Contributed by Don Stewart
-- Parallelized by Louis Wasserman
{-#LANGUAGE BangPatterns #-}
import System.Environment
import Control.Monad
import System.Mem
import Data.Bits
import Text.Printf
--
-- an artificially strict tree.
--
-- normally you would ensure the branches are lazy, but this benchmark
-- requires strict allocation.
--
data Tree = Nil | Node !Int !Tree !Tree
minN = 4
io s n t = printf "%s of depth %d\t check: %d\n" s n t
main = do
n <- getArgs >>= readIO . head
let maxN = max (minN + 2) n
stretchN = maxN + 1
-- stretch memory tree
let c = {-# SCC "stretch" #-} check (make 0 stretchN)
io "stretch tree" stretchN c
-- allocate a long lived tree
let !long = make 0 maxN
-- allocate, walk, and deallocate many bottom-up binary trees
let vs = depth minN maxN
mapM_ (\((m,d,i)) -> io (show m ++ "\t trees") d i) vs
-- confirm the the long-lived binary tree still exists
io "long lived tree" maxN (check long)
-- generate many trees
depth :: Int -> Int -> [(Int,Int,Int)]
depth d m
| d <= m = let
s = sumT d n 0
rest = depth (d+2) m
in s `par` ((2*n,d,s) : rest)
| otherwise = []
where n = bit (m - d + minN)
-- allocate and check lots of trees
sumT :: Int -> Int -> Int -> Int
sumT d 0 t = t
sumT d i t = a `par` b `par` sumT d (i-1) ans
where a = check (make i d)
b = check (make (-i) d)
ans = a + b + t
check = check' True 0
-- traverse the tree, counting up the nodes
check' :: Bool -> Int -> Tree -> Int
check' !b !z Nil = z
check' b z (Node i l r) = check' (not b) (check' b (if b then z+i else z-i) l) r
-- build a tree
make :: Int -> Int -> Tree
make i 0 = Node i Nil Nil
make i d = Node i (make (i2-1) d2) (make i2 d2)
where i2 = 2*i; d2 = d-1--
| agocorona/transient | tests/Test2.hs | mit | 1,904 | 2 | 13 | 515 | 720 | 378 | 342 | 47 | 2 |
#!/usr/bin/env runhaskell
import Data.Char (isDigit)
import Data.List (intercalate)
import Data.Monoid ((<>))
import Data.Version (showVersion)
import Distribution.PackageDescription
import Distribution.Verbosity
import Distribution.Simple
import Distribution.Simple.Setup (BuildFlags(..), ReplFlags(..), TestFlags(..), fromFlag)
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.BuildPaths (autogenModulesDir)
import Distribution.Simple.Utils (createDirectoryIfMissingVerbose, rewriteFile, rawSystemStdout)
main :: IO ()
main =
let hooks = simpleUserHooks
in defaultMainWithHooks hooks {
preConf = \args flags -> do
createDirectoryIfMissingVerbose silent True "gen"
(preConf hooks) args flags
, sDistHook = \pd mlbi uh flags -> do
genBuildInfo silent pd
(sDistHook hooks) pd mlbi uh flags
, buildHook = \pd lbi uh flags -> do
genBuildInfo (fromFlag $ buildVerbosity flags) pd
(buildHook hooks) pd lbi uh flags
, replHook = \pd lbi uh flags args -> do
genBuildInfo (fromFlag $ replVerbosity flags) pd
(replHook hooks) pd lbi uh flags args
, testHook = \args pd lbi uh flags -> do
genBuildInfo (fromFlag $ testVerbosity flags) pd
(testHook hooks) args pd lbi uh flags
}
genBuildInfo :: Verbosity -> PackageDescription -> IO ()
genBuildInfo verbosity pkg = do
createDirectoryIfMissingVerbose verbosity True "gen"
let (PackageName pname) = pkgName . package $ pkg
version = pkgVersion . package $ pkg
name = "BuildInfo_" ++ (map (\c -> if c == '-' then '_' else c) pname)
targetHs = "gen/" ++ name ++ ".hs"
targetText = "gen/version.txt"
t <- timestamp verbosity
gv <- gitVersion verbosity
let v = showVersion version
let buildVersion = intercalate "-" [v, t, gv]
rewriteFile targetHs $ unlines [
"module " ++ name ++ " where"
, "import Prelude"
, "data RuntimeBuildInfo = RuntimeBuildInfo { buildVersion :: String, timestamp :: String, gitVersion :: String }"
, "buildInfo :: RuntimeBuildInfo"
, "buildInfo = RuntimeBuildInfo \"" ++ v ++ "\" \"" ++ t ++ "\" \"" ++ gv ++ "\""
, "buildInfoVersion :: String"
, "buildInfoVersion = \"" ++ buildVersion ++ "\""
]
rewriteFile targetText buildVersion
gitVersion :: Verbosity -> IO String
gitVersion verbosity = do
ver <- rawSystemStdout verbosity "git" ["log", "--pretty=format:%h", "-n", "1"]
notModified <- ((>) 1 . length) `fmap` rawSystemStdout verbosity "git" ["status", "--porcelain"]
return $ ver ++ if notModified then "" else "-M"
timestamp :: Verbosity -> IO String
timestamp verbosity =
rawSystemStdout verbosity "date" ["+%Y%m%d%H%M%S"] >>= \s ->
case splitAt 14 s of
(d, n : []) ->
if (length d == 14 && filter isDigit d == d)
then return d
else fail $ "date has failed to produce the correct format [" <> s <> "]."
_ ->
fail $ "date has failed to produce a date long enough [" <> s <> "]."
| charleso/bantam | framework/Setup.hs | unlicense | 3,111 | 0 | 16 | 761 | 903 | 471 | 432 | 66 | 3 |
{- misc utility functions
-
- Copyright 2010-2011 Joey Hess <[email protected]>
-
- License: BSD-2-clause
-}
{-# LANGUAGE CPP #-}
module Utility.Misc where
import System.IO
import Control.Monad
import Foreign
import Data.Char
import Data.List
import Control.Applicative
import System.Exit
#ifndef mingw32_HOST_OS
import System.Posix.Process (getAnyProcessStatus)
import Utility.Exception
#endif
import Utility.FileSystemEncoding
import Utility.Monad
{- A version of hgetContents that is not lazy. Ensures file is
- all read before it gets closed. -}
hGetContentsStrict :: Handle -> IO String
hGetContentsStrict = hGetContents >=> \s -> length s `seq` return s
{- A version of readFile that is not lazy. -}
readFileStrict :: FilePath -> IO String
readFileStrict = readFile >=> \s -> length s `seq` return s
{- Reads a file strictly, and using the FileSystemEncoding, so it will
- never crash on a badly encoded file. -}
readFileStrictAnyEncoding :: FilePath -> IO String
readFileStrictAnyEncoding f = withFile f ReadMode $ \h -> do
fileEncoding h
hClose h `after` hGetContentsStrict h
{- Writes a file, using the FileSystemEncoding so it will never crash
- on a badly encoded content string. -}
writeFileAnyEncoding :: FilePath -> String -> IO ()
writeFileAnyEncoding f content = withFile f WriteMode $ \h -> do
fileEncoding h
hPutStr h content
{- Like break, but the item matching the condition is not included
- in the second result list.
-
- separate (== ':') "foo:bar" = ("foo", "bar")
- separate (== ':') "foobar" = ("foobar", "")
-}
separate :: (a -> Bool) -> [a] -> ([a], [a])
separate c l = unbreak $ break c l
where
unbreak r@(a, b)
| null b = r
| otherwise = (a, tail b)
{- Breaks out the first line. -}
firstLine :: String -> String
firstLine = takeWhile (/= '\n')
{- Splits a list into segments that are delimited by items matching
- a predicate. (The delimiters are not included in the segments.)
- Segments may be empty. -}
segment :: (a -> Bool) -> [a] -> [[a]]
segment p l = map reverse $ go [] [] l
where
go c r [] = reverse $ c:r
go c r (i:is)
| p i = go [] (c:r) is
| otherwise = go (i:c) r is
prop_segment_regressionTest :: Bool
prop_segment_regressionTest = all id
-- Even an empty list is a segment.
[ segment (== "--") [] == [[]]
-- There are two segements in this list, even though the first is empty.
, segment (== "--") ["--", "foo", "bar"] == [[],["foo","bar"]]
]
{- Includes the delimiters as segments of their own. -}
segmentDelim :: (a -> Bool) -> [a] -> [[a]]
segmentDelim p l = map reverse $ go [] [] l
where
go c r [] = reverse $ c:r
go c r (i:is)
| p i = go [] ([i]:c:r) is
| otherwise = go (i:c) r is
{- Replaces multiple values in a string.
-
- Takes care to skip over just-replaced values, so that they are not
- mangled. For example, massReplace [("foo", "new foo")] does not
- replace the "new foo" with "new new foo".
-}
massReplace :: [(String, String)] -> String -> String
massReplace vs = go [] vs
where
go acc _ [] = concat $ reverse acc
go acc [] (c:cs) = go ([c]:acc) vs cs
go acc ((val, replacement):rest) s
| val `isPrefixOf` s =
go (replacement:acc) vs (drop (length val) s)
| otherwise = go acc rest s
{- Wrapper around hGetBufSome that returns a String.
-
- The null string is returned on eof, otherwise returns whatever
- data is currently available to read from the handle, or waits for
- data to be written to it if none is currently available.
-
- Note on encodings: The normal encoding of the Handle is ignored;
- each byte is converted to a Char. Not unicode clean!
-}
hGetSomeString :: Handle -> Int -> IO String
hGetSomeString h sz = do
fp <- mallocForeignPtrBytes sz
len <- withForeignPtr fp $ \buf -> hGetBufSome h buf sz
map (chr . fromIntegral) <$> withForeignPtr fp (peekbytes len)
where
peekbytes :: Int -> Ptr Word8 -> IO [Word8]
peekbytes len buf = mapM (peekElemOff buf) [0..pred len]
{- Reaps any zombie git processes.
-
- Warning: Not thread safe. Anything that was expecting to wait
- on a process and get back an exit status is going to be confused
- if this reap gets there first. -}
reapZombies :: IO ()
#ifndef mingw32_HOST_OS
reapZombies = do
-- throws an exception when there are no child processes
catchDefaultIO Nothing (getAnyProcessStatus False True)
>>= maybe (return ()) (const reapZombies)
#else
reapZombies = return ()
#endif
exitBool :: Bool -> IO a
exitBool False = exitFailure
exitBool True = exitSuccess
| avengerpenguin/propellor | src/Utility/Misc.hs | bsd-2-clause | 4,502 | 6 | 12 | 895 | 1,184 | 618 | 566 | 70 | 3 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-2012
Note [Unarisation]
~~~~~~~~~~~~~~~~~~
The idea of this pass is to translate away *all* unboxed-tuple binders. So for example:
f (x :: (# Int, Bool #)) = f x + f (# 1, True #)
==>
f (x1 :: Int) (x2 :: Bool) = f x1 x2 + f 1 True
It is important that we do this at the STG level and NOT at the core level
because it would be very hard to make this pass Core-type-preserving.
STG fed to the code generators *must* be unarised because the code generators do
not support unboxed tuple binders natively.
Note [Unarisation and arity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because of unarisation, the arity that will be recorded in the generated info table
for an Id may be larger than the idArity. Instead we record what we call the RepArity,
which is the Arity taking into account any expanded arguments, and corresponds to
the number of (possibly-void) *registers* arguments will arrive in.
-}
{-# LANGUAGE CPP #-}
module UnariseStg (unarise) where
#include "HsVersions.h"
import CoreSyn
import StgSyn
import VarEnv
import UniqSupply
import Id
import MkId (realWorldPrimId)
import Type
import TysWiredIn
import DataCon
import VarSet
import OccName
import Name
import Util
import Outputable
import BasicTypes
-- | A mapping from unboxed-tuple binders to the Ids they were expanded to.
--
-- INVARIANT: Ids in the range don't have unboxed tuple types.
--
-- Those in-scope variables without unboxed-tuple types are not present in
-- the domain of the mapping at all.
type UnariseEnv = VarEnv [Id]
ubxTupleId0 :: Id
ubxTupleId0 = dataConWorkId (tupleCon UnboxedTuple 0)
unarise :: UniqSupply -> [StgBinding] -> [StgBinding]
unarise us binds = zipWith (\us -> unariseBinding us init_env) (listSplitUniqSupply us) binds
where -- See Note [Nullary unboxed tuple] in Type.lhs
init_env = unitVarEnv ubxTupleId0 [realWorldPrimId]
unariseBinding :: UniqSupply -> UnariseEnv -> StgBinding -> StgBinding
unariseBinding us rho bind = case bind of
StgNonRec x rhs -> StgNonRec x (unariseRhs us rho rhs)
StgRec xrhss -> StgRec $ zipWith (\us (x, rhs) -> (x, unariseRhs us rho rhs))
(listSplitUniqSupply us) xrhss
unariseRhs :: UniqSupply -> UnariseEnv -> StgRhs -> StgRhs
unariseRhs us rho rhs = case rhs of
StgRhsClosure ccs b_info fvs update_flag srt args expr
-> StgRhsClosure ccs b_info (unariseIds rho fvs) update_flag
(unariseSRT rho srt) args' (unariseExpr us' rho' expr)
where (us', rho', args') = unariseIdBinders us rho args
StgRhsCon ccs con args
-> StgRhsCon ccs con (unariseArgs rho args)
------------------------
unariseExpr :: UniqSupply -> UnariseEnv -> StgExpr -> StgExpr
unariseExpr _ rho (StgApp f args)
| null args
, UbxTupleRep tys <- repType (idType f)
= -- Particularly important where (##) is concerned
-- See Note [Nullary unboxed tuple]
StgConApp (tupleCon UnboxedTuple (length tys))
(map StgVarArg (unariseId rho f))
| otherwise
= StgApp f (unariseArgs rho args)
unariseExpr _ _ (StgLit l)
= StgLit l
unariseExpr _ rho (StgConApp dc args)
| isUnboxedTupleCon dc = StgConApp (tupleCon UnboxedTuple (length args')) args'
| otherwise = StgConApp dc args'
where
args' = unariseArgs rho args
unariseExpr _ rho (StgOpApp op args ty)
= StgOpApp op (unariseArgs rho args) ty
unariseExpr us rho (StgLam xs e)
= StgLam xs' (unariseExpr us' rho' e)
where
(us', rho', xs') = unariseIdBinders us rho xs
unariseExpr us rho (StgCase e case_lives alts_lives bndr srt alt_ty alts)
= StgCase (unariseExpr us1 rho e) (unariseLives rho case_lives)
(unariseLives rho alts_lives) bndr (unariseSRT rho srt)
alt_ty' alts'
where
(us1, us2) = splitUniqSupply us
(alt_ty', alts') = unariseAlts us2 rho alt_ty bndr (repType (idType bndr)) alts
unariseExpr us rho (StgLet bind e)
= StgLet (unariseBinding us1 rho bind) (unariseExpr us2 rho e)
where
(us1, us2) = splitUniqSupply us
unariseExpr us rho (StgLetNoEscape live_in_let live_in_bind bind e)
= StgLetNoEscape (unariseLives rho live_in_let) (unariseLives rho live_in_bind)
(unariseBinding us1 rho bind) (unariseExpr us2 rho e)
where
(us1, us2) = splitUniqSupply us
unariseExpr us rho (StgTick tick e)
= StgTick tick (unariseExpr us rho e)
------------------------
unariseAlts :: UniqSupply -> UnariseEnv -> AltType -> Id -> RepType -> [StgAlt] -> (AltType, [StgAlt])
unariseAlts us rho alt_ty _ (UnaryRep _) alts
= (alt_ty, zipWith (\us alt -> unariseAlt us rho alt) (listSplitUniqSupply us) alts)
unariseAlts us rho _ bndr (UbxTupleRep tys) ((DEFAULT, [], [], e) : _)
= (UbxTupAlt n, [(DataAlt (tupleCon UnboxedTuple n), ys, uses, unariseExpr us2' rho' e)])
where
(us2', rho', ys) = unariseIdBinder us rho bndr
uses = replicate (length ys) (not (isDeadBinder bndr))
n = length tys
unariseAlts us rho _ bndr (UbxTupleRep _) [(DataAlt _, ys, uses, e)]
= (UbxTupAlt n, [(DataAlt (tupleCon UnboxedTuple n), ys', uses', unariseExpr us2' rho'' e)])
where
(us2', rho', ys', uses') = unariseUsedIdBinders us rho ys uses
rho'' = extendVarEnv rho' bndr ys'
n = length ys'
unariseAlts _ _ _ _ (UbxTupleRep _) alts
= pprPanic "unariseExpr: strange unboxed tuple alts" (ppr alts)
--------------------------
unariseAlt :: UniqSupply -> UnariseEnv -> StgAlt -> StgAlt
unariseAlt us rho (con, xs, uses, e)
= (con, xs', uses', unariseExpr us' rho' e)
where
(us', rho', xs', uses') = unariseUsedIdBinders us rho xs uses
------------------------
unariseSRT :: UnariseEnv -> SRT -> SRT
unariseSRT _ NoSRT = NoSRT
unariseSRT rho (SRTEntries ids) = SRTEntries (concatMapVarSet (unariseId rho) ids)
unariseSRT _ (SRT {}) = panic "unariseSRT"
unariseLives :: UnariseEnv -> StgLiveVars -> StgLiveVars
unariseLives rho ids = concatMapVarSet (unariseId rho) ids
unariseArgs :: UnariseEnv -> [StgArg] -> [StgArg]
unariseArgs rho = concatMap (unariseArg rho)
unariseArg :: UnariseEnv -> StgArg -> [StgArg]
unariseArg rho (StgVarArg x) = map StgVarArg (unariseId rho x)
unariseArg _ (StgLitArg l) = [StgLitArg l]
unariseIds :: UnariseEnv -> [Id] -> [Id]
unariseIds rho = concatMap (unariseId rho)
unariseId :: UnariseEnv -> Id -> [Id]
unariseId rho x
| Just ys <- lookupVarEnv rho x
= ASSERT2( case repType (idType x) of UbxTupleRep _ -> True; _ -> x == ubxTupleId0
, text "unariseId: not unboxed tuple" <+> ppr x )
ys
| otherwise
= ASSERT2( case repType (idType x) of UbxTupleRep _ -> False; _ -> True
, text "unariseId: was unboxed tuple" <+> ppr x )
[x]
unariseUsedIdBinders :: UniqSupply -> UnariseEnv -> [Id] -> [Bool]
-> (UniqSupply, UnariseEnv, [Id], [Bool])
unariseUsedIdBinders us rho xs uses
= case mapAccumL2 do_one us rho (zipEqual "unariseUsedIdBinders" xs uses) of
(us', rho', xs_usess) -> uncurry ((,,,) us' rho') (unzip (concat xs_usess))
where
do_one us rho (x, use) = third3 (map (flip (,) use)) (unariseIdBinder us rho x)
unariseIdBinders :: UniqSupply -> UnariseEnv -> [Id] -> (UniqSupply, UnariseEnv, [Id])
unariseIdBinders us rho xs = third3 concat $ mapAccumL2 unariseIdBinder us rho xs
unariseIdBinder :: UniqSupply -> UnariseEnv -> Id -> (UniqSupply, UnariseEnv, [Id])
unariseIdBinder us rho x = case repType (idType x) of
UnaryRep _ -> (us, rho, [x])
UbxTupleRep tys -> let (us0, us1) = splitUniqSupply us
ys = unboxedTupleBindersFrom us0 x tys
rho' = extendVarEnv rho x ys
in (us1, rho', ys)
unboxedTupleBindersFrom :: UniqSupply -> Id -> [UnaryType] -> [Id]
unboxedTupleBindersFrom us x tys = zipWith (mkSysLocal fs) (uniqsFromSupply us) tys
where fs = occNameFS (getOccName x)
concatMapVarSet :: (Var -> [Var]) -> VarSet -> VarSet
concatMapVarSet f xs = mkVarSet [x' | x <- varSetElems xs, x' <- f x]
| forked-upstream-packages-for-ghcjs/ghc | compiler/simplStg/UnariseStg.hs | bsd-3-clause | 8,000 | 0 | 13 | 1,695 | 2,493 | 1,295 | 1,198 | 132 | 3 |
-----------------------------------------------------------------------------
--
-- Pretty-printing TyThings
--
-- (c) The GHC Team 2005
--
-----------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
module PprTyThing (
pprTyThing,
pprTyThingInContext,
pprTyThingLoc,
pprTyThingInContextLoc,
pprTyThingHdr,
pprTypeForUser,
pprFamInst
) where
#include "HsVersions.h"
import GhcPrelude
import Type ( TyThing(..) )
import IfaceSyn ( ShowSub(..), ShowHowMuch(..), AltPpr(..)
, showToHeader, pprIfaceDecl )
import CoAxiom ( coAxiomTyCon )
import HscTypes( tyThingParent_maybe )
import MkIface ( tyThingToIfaceDecl )
import Type ( tidyOpenType )
import FamInstEnv( FamInst(..), FamFlavor(..) )
import Type( Type, pprTypeApp, pprSigmaType )
import Name
import VarEnv( emptyTidyEnv )
import Outputable
-- -----------------------------------------------------------------------------
-- Pretty-printing entities that we get from the GHC API
{- Note [Pretty-printing TyThings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We pretty-print a TyThing by converting it to an IfaceDecl,
and pretty-printing that (see ppr_ty_thing below).
Here is why:
* When pretty-printing (a type, say), the idiomatic solution is not to
"rename type variables on the fly", but rather to "tidy" the type
(which gives each variable a distinct print-name), and then
pretty-print it (without renaming). Separate the two
concerns. Functions like tidyType do this.
* Alas, for type constructors, TyCon, tidying does not work well,
because a TyCon includes DataCons which include Types, which mention
TyCons. And tidying can't tidy a mutually recursive data structure
graph, only trees.
* One alternative would be to ensure that TyCons get type variables
with distinct print-names. That's ok for type variables but less
easy for kind variables. Processing data type declarations is
already so complicated that I don't think it's sensible to add the
extra requirement that it generates only "pretty" types and kinds.
* One place the non-pretty names can show up is in GHCi. But another
is in interface files. Look at MkIface.tyThingToIfaceDecl which
converts a TyThing (i.e. TyCon, Class etc) to an IfaceDecl. And it
already does tidying as part of that conversion! Why? Because
interface files contains fast-strings, not uniques, so the names
must at least be distinct.
So if we convert to IfaceDecl, we get a nice tidy IfaceDecl, and can
print that. Of course, that means that pretty-printing IfaceDecls
must be careful to display nice user-friendly results, but that's ok.
See #7730, #8776 for details -}
--------------------
-- | Pretty-prints a 'FamInst' (type/data family instance) with its defining location.
pprFamInst :: FamInst -> SDoc
-- * For data instances we go via pprTyThing of the representational TyCon,
-- because there is already much cleverness associated with printing
-- data type declarations that I don't want to duplicate
-- * For type instances we print directly here; there is no TyCon
-- to give to pprTyThing
--
-- FamInstEnv.pprFamInst does a more quick-and-dirty job for internal purposes
pprFamInst (FamInst { fi_flavor = DataFamilyInst rep_tc })
= pprTyThingInContextLoc (ATyCon rep_tc)
pprFamInst (FamInst { fi_flavor = SynFamilyInst, fi_axiom = axiom
, fi_tys = lhs_tys, fi_rhs = rhs })
= showWithLoc (pprDefinedAt (getName axiom)) $
hang (text "type instance" <+> pprTypeApp (coAxiomTyCon axiom) lhs_tys)
2 (equals <+> ppr rhs)
----------------------------
-- | Pretty-prints a 'TyThing' with its defining location.
pprTyThingLoc :: TyThing -> SDoc
pprTyThingLoc tyThing
= showWithLoc (pprDefinedAt (getName tyThing))
(pprTyThing showToHeader tyThing)
-- | Pretty-prints the 'TyThing' header. For functions and data constructors
-- the function is equivalent to 'pprTyThing' but for type constructors
-- and classes it prints only the header part of the declaration.
pprTyThingHdr :: TyThing -> SDoc
pprTyThingHdr = pprTyThing showToHeader
-- | Pretty-prints a 'TyThing' in context: that is, if the entity
-- is a data constructor, record selector, or class method, then
-- the entity's parent declaration is pretty-printed with irrelevant
-- parts omitted.
pprTyThingInContext :: ShowSub -> TyThing -> SDoc
pprTyThingInContext show_sub thing
= go [] thing
where
go ss thing
= case tyThingParent_maybe thing of
Just parent ->
go (getOccName thing : ss) parent
Nothing ->
pprTyThing
(show_sub { ss_how_much = ShowSome ss (AltPpr Nothing) })
thing
-- | Like 'pprTyThingInContext', but adds the defining location.
pprTyThingInContextLoc :: TyThing -> SDoc
pprTyThingInContextLoc tyThing
= showWithLoc (pprDefinedAt (getName tyThing))
(pprTyThingInContext showToHeader tyThing)
-- | Pretty-prints a 'TyThing'.
pprTyThing :: ShowSub -> TyThing -> SDoc
-- We pretty-print 'TyThing' via 'IfaceDecl'
-- See Note [Pretty-printing TyThings]
pprTyThing ss ty_thing
= pprIfaceDecl ss' (tyThingToIfaceDecl ty_thing)
where
ss' = case ss_how_much ss of
ShowHeader (AltPpr Nothing) -> ss { ss_how_much = ShowHeader ppr' }
ShowSome xs (AltPpr Nothing) -> ss { ss_how_much = ShowSome xs ppr' }
_ -> ss
ppr' = AltPpr $ ppr_bndr $ getName ty_thing
ppr_bndr :: Name -> Maybe (OccName -> SDoc)
ppr_bndr name
| isBuiltInSyntax name
= Nothing
| otherwise
= case nameModule_maybe name of
Just mod -> Just $ \occ -> getPprStyle $ \sty ->
pprModulePrefix sty mod occ <> ppr occ
Nothing -> WARN( True, ppr name ) Nothing
-- Nothing is unexpected here; TyThings have External names
pprTypeForUser :: Type -> SDoc
-- The type is tidied
pprTypeForUser ty
= pprSigmaType tidy_ty
where
(_, tidy_ty) = tidyOpenType emptyTidyEnv ty
-- Often the types/kinds we print in ghci are fully generalised
-- and have no free variables, but it turns out that we sometimes
-- print un-generalised kinds (eg when doing :k T), so it's
-- better to use tidyOpenType here
showWithLoc :: SDoc -> SDoc -> SDoc
showWithLoc loc doc
= hang doc 2 (char '\t' <> comment <+> loc)
-- The tab tries to make them line up a bit
where
comment = text "--"
| ezyang/ghc | compiler/main/PprTyThing.hs | bsd-3-clause | 6,519 | 0 | 16 | 1,382 | 869 | 480 | 389 | 76 | 4 |
module WhileM where
{-@ LIQUID "--no-termination" @-}
{-@ LIQUID "--short-names" @-}
import RIO
{-@
whileM :: forall < p :: World -> Prop
, qc :: World -> Bool -> World -> Prop
, qe :: World -> () -> World -> Prop
, q :: World -> () -> World -> Prop>.
{x::(), s1::World<p>, b::{v:Bool | Prop v}, s2::World<qc s1 b> |- World<qe s2 x> <: World<p>}
{b::{v:Bool | Prop v}, x2::(), s1::World<p>, s3::World |- World<q s3 x2> <: World<q s1 x2> }
{b::{v:Bool | not (Prop v)}, x2::(), s1::World<p> |- World<qc s1 b> <: World<q s1 x2> }
RIO <p, qc> Bool
-> RIO <{\v -> true}, qe> ()
-> RIO <p, q> ()
@-}
whileM :: RIO Bool -> RIO () -> RIO ()
whileM (RIO cond) (RIO e)
= RIO $ \s1 -> case cond s1 of {(y, s2) ->
if y
then case e s2 of {(y2, s3) -> runState (whileM (RIO cond) (RIO e)) s3}
else ((), s2)
} | ssaavedra/liquidhaskell | benchmarks/icfp15/todo/WhileM.hs | bsd-3-clause | 933 | 0 | 18 | 303 | 150 | 82 | 68 | 8 | 2 |
-- These two declarations get their derived instances
-- in two different ways
module ShouldCompile where
newtype Bar = Bar Int deriving Eq
data Baz = Baz Bar deriving Eq
| urbanslug/ghc | testsuite/tests/typecheck/should_compile/tc143.hs | bsd-3-clause | 176 | 0 | 6 | 35 | 29 | 19 | 10 | 3 | 0 |
{-# OPTIONS_GHC -O -ddump-rules #-}
-- Trac #2486
--
-- The thing to look for here is that specialisations for fib and tak
-- at both Int and Double are indeed generated; hence -ddump-rules
module Main where
import System.Environment
import Numeric
main = do
n <- getArgs >>= readIO . head
let m = n-1
a = 27 + fromIntegral n
putStr $
line "Ack" [3,n] (ack 3 n) show ++
line "Fib" [a] (fib a :: Double) (\n -> showFFloat (Just 1) n []) ++
line "Tak" [3*m,2*m,m] (tak (3*m) (2*m) m :: Int) show ++
line "Fib" [3] (fib 3 :: Int) show ++
line "Tak" [3,2,1] (tak 3 2 1 :: Double) show
where
line pre a r f = pre ++ "(" ++ csv f a "" ++ "): " ++ f r ++ "\n"
csv f [a] s = s ++ f a
csv f (a:b) s = s ++ f a ++ "," ++ csv f b s
ack :: Int -> Int -> Int
ack 0 n = n+1
ack m 0 = ack (m-1) 1
ack m n = ack (m-1) (ack m (n-1))
fib :: (Num a, Ord a) => a -> a
fib n = if n >= 2 then fib (n-1) + fib (n-2) else 1
tak :: (Num a, Ord a) => a -> a -> a -> a
tak x y z = if y < x then tak (tak (x-1) y z) (tak (y-1) z x) (tak (z-1) x y) else z
| wxwxwwxxx/ghc | testsuite/tests/simplCore/should_run/T2486.hs | bsd-3-clause | 1,211 | 0 | 16 | 440 | 634 | 331 | 303 | 25 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE BangPatterns #-}
module Main where
import Control.Category
import Criterion.Main
import Data.MyPrelude
import Data.Utils
import Data.Void
import Numeric.Neural
import Prelude hiding (id, (.))
main :: IO ()
main = defaultMain
[ bgroup "white"
[ bench "10/200" $ whnf (w 10) 200
, bench "10/2000" $ whnf (w 10) 2000
, bench "10/20000" $ whnf (w 10) 20000
, bench "100/200" $ whnf (w 100) 200
, bench "100/2000" $ whnf (w 100) 2000
, bench "100/20000" $ whnf (w 100) 20000
, bench "1000/200" $ whnf (w 1000) 200
, bench "1000/2000" $ whnf (w 1000) 2000
, bench "1000/20000" $ whnf (w 1000) 20000
]
, env setupEnv $ \ ~(m, xss) -> bgroup "linear"
[ l m xss 1 5
, l m xss 5 5
, l m xss 10 5
]
]
w :: Int -> Int -> Double
w sampleCount testCount = flip evalRand (mkStdGen 123456) $ do
stats <- mkStats'
samples <- replicateM sampleCount $ mkSample stats
let m = whiten model' samples
xss <- replicateM testCount $ mkSample stats
return $ sum [model m xs | xs <- xss]
where
mkStats' :: MonadRandom m => m (Vector Width (Double, Double))
mkStats' = sequenceA (pure $ (,) <$> getRandomR (-100, 100) <*> getRandomR (0.1, 20))
mkSample :: MonadRandom m => Vector Width (Double, Double) -> m (Vector Width Double)
mkSample = mapM $ uncurry boxMuller'
model' :: Model (Vector Width) Identity Void (Vector Width Double) Double
model' = Model (cArr $ Diff $ Identity . sum) absurd id runIdentity
type Width = 10
l :: M -> [Vector Width' Double] -> Int -> Int -> Benchmark
l m xss batchSize steps = bench (printf "%d/%d" batchSize steps) $ whnf l' steps where
l' :: Int -> Double
l' steps' =
let m' = loop steps' m
xs = pure 0
in modelError m' [(xs, xs)]
loop :: Int -> M -> M
loop 0 m' = m'
loop !n m' =
let m'' = m' `deepseq` snd $ descent m' 0.01 [(xs, xs) | xs <- take batchSize xss]
in loop (pred n) m''
setupEnv :: IO (M, [Vector Width' Double])
setupEnv = return $ flip evalRand (mkStdGen 987654) $ do
let e xs = Diff $ Identity . sqDiff (fromDouble <$> xs)
m <- modelR $ mkStdModel linearLayer e id id
xss <- replicateM 100 $ let r = getRandomR (-5, 5) in sequence $ pure r
return (m, xss)
type M = StdModel (Vector Width') (Vector Width') (Vector Width' Double) (Vector Width' Double)
type Width' = 100
| brunjlar/neural | benchmark/benchmark.hs | mit | 2,561 | 0 | 16 | 755 | 1,056 | 530 | 526 | 60 | 2 |
{-|
Module : Network.Flow.V9.Fields
Description : Field Decoding
Copyright : (c) Jan Dvořák
License : MIT
Maintainer : [email protected]
Stability : unstable
Portability : non-portable (ghc)
-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeApplications #-}
module Network.Flow.V9.Fields (decodeField)
where
import BasePrelude hiding (empty, union, lookup)
import Data.MAC (MAC, toMAC)
import Data.IP (IPv4, IPv6, toIPv4, toIPv6b)
import Data.Text.Encoding (decodeUtf8, decodeUtf8')
import Data.Serialize.Get
import Data.Serialize.IEEE754
import Data.HashMap.Lazy (HashMap, singleton, fromList, lookup, empty)
import Data.ByteString (ByteString)
import Data.Text (Text)
import Data.Aeson
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString as BS
decodeField :: Word16 -> ByteString -> Object
decodeField fid bs = case lookup fid decoders of
Just dec -> dec bs
Nothing -> decodeOther fid bs
decodeField' :: (DecodeAs a) => Text -> (a -> Value) -> ByteString -> Object
decodeField' name jsonify bs = case decodeAs bs of
Just val -> singleton name $ jsonify val
Nothing -> empty
decodeOther :: Word16 -> ByteString -> Object
decodeOther fid bs = singleton name value
where name = fromString $ show fid
value = toJSON $ decodeUtf8 $ Base64.encode bs
class DecodeAs a where
decodeAs :: ByteString -> Maybe a
instance DecodeAs Bytes where
decodeAs = Just . Bytes
instance DecodeAs Bool where
decodeAs bs = case (roll bs :: Word) of
1 -> Just True
2 -> Just False
_ -> Nothing
instance DecodeAs Word where
decodeAs = Just . roll
instance DecodeAs Word8 where
decodeAs = Just . roll
instance DecodeAs Word16 where
decodeAs = Just . roll
instance DecodeAs Word32 where
decodeAs = Just . roll
instance DecodeAs Word64 where
decodeAs = Just . roll
instance DecodeAs Int64 where
decodeAs bs =
case runGet getInt64be bs of
Left _e -> Nothing
Right v -> Just v
instance DecodeAs Text where
decodeAs bs =
case decodeUtf8' bs of
Left _e -> Nothing
Right v -> Just v
instance DecodeAs Double where
decodeAs bs =
case runGet getFloat64be bs of
Left _e -> Nothing
Right v -> Just v
instance DecodeAs Addr4 where
decodeAs bs =
case BS.length bs of
4 -> Just $ Addr4 $ toIPv4 $ map fromIntegral $ BS.unpack bs
_ -> Nothing
instance DecodeAs Addr6 where
decodeAs bs =
case BS.length bs of
16 -> Just $ Addr6 $ toIPv6b $ map fromIntegral $ BS.unpack bs
_x -> Nothing
instance DecodeAs MAC where
decodeAs bs =
case BS.length bs of
6 -> Just $ toMAC $ BS.unpack bs
_ -> Nothing
newtype Bytes = Bytes ByteString
instance ToJSON Bytes where
toJSON (Bytes bs) = toJSON $ decodeUtf8 $ Base64.encode bs
newtype Addr6 = Addr6 IPv6
instance ToJSON Addr6 where
toJSON (Addr6 ipv6) = toJSON $ show ipv6
newtype Addr4 = Addr4 IPv4
instance ToJSON Addr4 where
toJSON (Addr4 ipv4) = toJSON $ show ipv4
roll :: (Integral a, Bits a) => ByteString -> a
roll = foldl' (\c n -> c `shiftL` 8 + n) 0 . map fromIntegral . BS.unpack
decoders :: HashMap Word16 (ByteString -> Object)
decoders =
fromList
[ (1, decodeField' "OctetDeltaCount" (toJSON @Word64))
, (2, decodeField' "PacketDeltaCount" (toJSON @Word64))
, (3, decodeField' "DeltaFlowCount" (toJSON @Word64))
, (4, decodeField' "ProtocolIdentifier" (toJSON @Word8))
, (5, decodeField' "IpClassOfService" (toJSON @Word8))
, (6, decodeField' "TcpControlBits" (toJSON @Word16))
, (7, decodeField' "SourceTransportPort" (toJSON @Word16))
, (8, decodeField' "SourceIPv4Address" (toJSON @Addr4))
, (9, decodeField' "SourceIPv4PrefixLength" (toJSON @Word8))
, (10, decodeField' "IngressInterface" (toJSON @Word32))
, (11, decodeField' "DestinationTransportPort" (toJSON @Word16))
, (12, decodeField' "DestinationIPv4Address" (toJSON @Addr4))
, (13, decodeField' "DestinationIPv4PrefixLength" (toJSON @Word8))
, (14, decodeField' "EgressInterface" (toJSON @Word32))
, (15, decodeField' "IpNextHopIPv4Address" (toJSON @Addr4))
, (16, decodeField' "BgpSourceAsNumber" (toJSON @Word32))
, (17, decodeField' "BgpDestinationAsNumber" (toJSON @Word32))
, (18, decodeField' "BgpNextHopIPv4Address" (toJSON @Addr4))
, (19, decodeField' "PostMCastPacketDeltaCount" (toJSON @Word64))
, (20, decodeField' "PostMCastOctetDeltaCount" (toJSON @Word64))
, (21, decodeField' "FlowEndSysUpTime" (toJSON @Word32))
, (22, decodeField' "FlowStartSysUpTime" (toJSON @Word32))
, (23, decodeField' "PostOctetDeltaCount" (toJSON @Word64))
, (24, decodeField' "PostPacketDeltaCount" (toJSON @Word64))
, (25, decodeField' "MinimumIpTotalLength" (toJSON @Word64))
, (26, decodeField' "MaximumIpTotalLength" (toJSON @Word64))
, (27, decodeField' "SourceIPv6Address" (toJSON @Addr6))
, (28, decodeField' "DestinationIPv6Address" (toJSON @Addr6))
, (29, decodeField' "SourceIPv6PrefixLength" (toJSON @Word8))
, (30, decodeField' "DestinationIPv6PrefixLength" (toJSON @Word8))
, (31, decodeField' "FlowLabelIPv6" (toJSON @Word32))
, (32, decodeField' "IcmpTypeCodeIPv4" (toJSON @Word16))
, (33, decodeField' "IgmpType" (toJSON @Word8))
, (34, decodeField' "SamplingInterval" (toJSON @Word32))
, (35, decodeField' "SamplingAlgorithm" (toJSON @Word8))
, (36, decodeField' "FlowActiveTimeout" (toJSON @Word16))
, (37, decodeField' "FlowIdleTimeout" (toJSON @Word16))
, (38, decodeField' "EngineType" (toJSON @Word8))
, (39, decodeField' "EngineId" (toJSON @Word8))
, (40, decodeField' "ExportedOctetTotalCount" (toJSON @Word64))
, (41, decodeField' "ExportedMessageTotalCount" (toJSON @Word64))
, (42, decodeField' "ExportedFlowRecordTotalCount" (toJSON @Word64))
, (43, decodeField' "Ipv4RouterSc" (toJSON @Addr4))
, (44, decodeField' "SourceIPv4Prefix" (toJSON @Addr4))
, (45, decodeField' "DestinationIPv4Prefix" (toJSON @Addr4))
, (46, decodeField' "MplsTopLabelType" (toJSON @Word8))
, (47, decodeField' "MplsTopLabelIPv4Address" (toJSON @Addr4))
, (48, decodeField' "SamplerId" (toJSON @Word8))
, (49, decodeField' "SamplerMode" (toJSON @Word8))
, (50, decodeField' "SamplerRandomInterval" (toJSON @Word32))
, (51, decodeField' "ClassId" (toJSON @Word8))
, (52, decodeField' "MinimumTTL" (toJSON @Word8))
, (53, decodeField' "MaximumTTL" (toJSON @Word8))
, (54, decodeField' "FragmentIdentification" (toJSON @Word32))
, (55, decodeField' "PostIpClassOfService" (toJSON @Word8))
, (56, decodeField' "SourceMacAddress" (toJSON @MAC))
, (57, decodeField' "PostDestinationMacAddress" (toJSON @MAC))
, (58, decodeField' "VlanId" (toJSON @Word16))
, (59, decodeField' "PostVlanId" (toJSON @Word16))
, (60, decodeField' "IpVersion" (toJSON @Word8))
, (61, decodeField' "FlowDirection" (toJSON @Word8))
, (62, decodeField' "IpNextHopIPv6Address" (toJSON @Addr6))
, (63, decodeField' "BgpNextHopIPv6Address" (toJSON @Addr6))
, (64, decodeField' "Ipv6ExtensionHeaders" (toJSON @Word32))
, (70, decodeField' "MplsTopLabelStackSection" (toJSON @Bytes))
, (71, decodeField' "MplsLabelStackSection2" (toJSON @Bytes))
, (72, decodeField' "MplsLabelStackSection3" (toJSON @Bytes))
, (73, decodeField' "MplsLabelStackSection4" (toJSON @Bytes))
, (74, decodeField' "MplsLabelStackSection5" (toJSON @Bytes))
, (75, decodeField' "MplsLabelStackSection6" (toJSON @Bytes))
, (76, decodeField' "MplsLabelStackSection7" (toJSON @Bytes))
, (77, decodeField' "MplsLabelStackSection8" (toJSON @Bytes))
, (78, decodeField' "MplsLabelStackSection9" (toJSON @Bytes))
, (79, decodeField' "MplsLabelStackSection10" (toJSON @Bytes))
, (80, decodeField' "DestinationMacAddress" (toJSON @MAC))
, (81, decodeField' "PostSourceMacAddress" (toJSON @MAC))
, (82, decodeField' "InterfaceName" (toJSON @Text))
, (83, decodeField' "InterfaceDescription" (toJSON @Text))
, (84, decodeField' "SamplerName" (toJSON @Text))
, (85, decodeField' "OctetTotalCount" (toJSON @Word64))
, (86, decodeField' "PacketTotalCount" (toJSON @Word64))
, (87, decodeField' "FlagsAndSamplerId" (toJSON @Word32))
, (88, decodeField' "FragmentOffset" (toJSON @Word16))
, (89, decodeField' "ForwardingStatus" (toJSON @Word32))
, (90, decodeField' "MplsVpnRouteDistinguisher" (toJSON @Bytes))
, (91, decodeField' "MplsTopLabelPrefixLength" (toJSON @Word8))
, (92, decodeField' "SrcTrafficIndex" (toJSON @Word32))
, (93, decodeField' "DstTrafficIndex" (toJSON @Word32))
, (94, decodeField' "ApplicationDescription" (toJSON @Text))
, (95, decodeField' "ApplicationId" (toJSON @Bytes))
, (96, decodeField' "ApplicationName" (toJSON @Text))
, (98, decodeField' "PostIpDiffServCodePoint" (toJSON @Word8))
, (99, decodeField' "MulticastReplicationFactor" (toJSON @Word32))
, (100, decodeField' "ClassName" (toJSON @Text))
, (101, decodeField' "ClassificationEngineId" (toJSON @Word8))
, (102, decodeField' "Layer2packetSectionOffset" (toJSON @Word16))
, (103, decodeField' "Layer2packetSectionSize" (toJSON @Word16))
, (104, decodeField' "Layer2packetSectionData" (toJSON @Bytes))
, (128, decodeField' "BgpNextAdjacentAsNumber" (toJSON @Word32))
, (129, decodeField' "BgpPrevAdjacentAsNumber" (toJSON @Word32))
, (130, decodeField' "ExporterIPv4Address" (toJSON @Addr4))
, (131, decodeField' "ExporterIPv6Address" (toJSON @Addr6))
, (132, decodeField' "DroppedOctetDeltaCount" (toJSON @Word64))
, (133, decodeField' "DroppedPacketDeltaCount" (toJSON @Word64))
, (134, decodeField' "DroppedOctetTotalCount" (toJSON @Word64))
, (135, decodeField' "DroppedPacketTotalCount" (toJSON @Word64))
, (136, decodeField' "FlowEndReason" (toJSON @Word8))
, (137, decodeField' "CommonPropertiesId" (toJSON @Word64))
, (138, decodeField' "ObservationPointId" (toJSON @Word64))
, (139, decodeField' "IcmpTypeCodeIPv6" (toJSON @Word16))
, (140, decodeField' "MplsTopLabelIPv6Address" (toJSON @Addr6))
, (141, decodeField' "LineCardId" (toJSON @Word32))
, (142, decodeField' "PortId" (toJSON @Word32))
, (143, decodeField' "MeteringProcessId" (toJSON @Word32))
, (144, decodeField' "ExportingProcessId" (toJSON @Word32))
, (145, decodeField' "TemplateId" (toJSON @Word16))
, (146, decodeField' "WlanChannelId" (toJSON @Word8))
, (147, decodeField' "WlanSSID" (toJSON @Text))
, (148, decodeField' "FlowId" (toJSON @Word64))
, (149, decodeField' "ObservationDomainId" (toJSON @Word32))
, (150, decodeField' "FlowStartSeconds" (toJSON @Word))
, (151, decodeField' "FlowEndSeconds" (toJSON @Word))
, (152, decodeField' "FlowStartMilliseconds" (toJSON @Word))
, (153, decodeField' "FlowEndMilliseconds" (toJSON @Word))
, (154, decodeField' "FlowStartMicroseconds" (toJSON @Word))
, (155, decodeField' "FlowEndMicroseconds" (toJSON @Word))
, (156, decodeField' "FlowStartNanoseconds" (toJSON @Word))
, (157, decodeField' "FlowEndNanoseconds" (toJSON @Word))
, (158, decodeField' "FlowStartDeltaMicroseconds" (toJSON @Word32))
, (159, decodeField' "FlowEndDeltaMicroseconds" (toJSON @Word32))
, (160, decodeField' "SystemInitTimeMilliseconds" (toJSON @Word))
, (161, decodeField' "FlowDurationMilliseconds" (toJSON @Word32))
, (162, decodeField' "FlowDurationMicroseconds" (toJSON @Word32))
, (163, decodeField' "ObservedFlowTotalCount" (toJSON @Word64))
, (164, decodeField' "IgnoredPacketTotalCount" (toJSON @Word64))
, (165, decodeField' "IgnoredOctetTotalCount" (toJSON @Word64))
, (166, decodeField' "NotSentFlowTotalCount" (toJSON @Word64))
, (167, decodeField' "NotSentPacketTotalCount" (toJSON @Word64))
, (168, decodeField' "NotSentOctetTotalCount" (toJSON @Word64))
, (169, decodeField' "DestinationIPv6Prefix" (toJSON @Addr6))
, (170, decodeField' "SourceIPv6Prefix" (toJSON @Addr6))
, (171, decodeField' "PostOctetTotalCount" (toJSON @Word64))
, (172, decodeField' "PostPacketTotalCount" (toJSON @Word64))
, (173, decodeField' "FlowKeyIndicator" (toJSON @Word64))
, (174, decodeField' "PostMCastPacketTotalCount" (toJSON @Word64))
, (175, decodeField' "PostMCastOctetTotalCount" (toJSON @Word64))
, (176, decodeField' "IcmpTypeIPv4" (toJSON @Word8))
, (177, decodeField' "IcmpCodeIPv4" (toJSON @Word8))
, (178, decodeField' "IcmpTypeIPv6" (toJSON @Word8))
, (179, decodeField' "IcmpCodeIPv6" (toJSON @Word8))
, (180, decodeField' "UdpSourcePort" (toJSON @Word16))
, (181, decodeField' "UdpDestinationPort" (toJSON @Word16))
, (182, decodeField' "TcpSourcePort" (toJSON @Word16))
, (183, decodeField' "TcpDestinationPort" (toJSON @Word16))
, (184, decodeField' "TcpSequenceNumber" (toJSON @Word32))
, (185, decodeField' "TcpAcknowledgementNumber" (toJSON @Word32))
, (186, decodeField' "TcpWindowSize" (toJSON @Word16))
, (187, decodeField' "TcpUrgentPointer" (toJSON @Word16))
, (188, decodeField' "TcpHeaderLength" (toJSON @Word8))
, (189, decodeField' "IpHeaderLength" (toJSON @Word8))
, (190, decodeField' "TotalLengthIPv4" (toJSON @Word16))
, (191, decodeField' "PayloadLengthIPv6" (toJSON @Word16))
, (192, decodeField' "IpTTL" (toJSON @Word8))
, (193, decodeField' "NextHeaderIPv6" (toJSON @Word8))
, (194, decodeField' "MplsPayloadLength" (toJSON @Word32))
, (195, decodeField' "IpDiffServCodePoint" (toJSON @Word8))
, (196, decodeField' "IpPrecedence" (toJSON @Word8))
, (197, decodeField' "FragmentFlags" (toJSON @Word8))
, (198, decodeField' "OctetDeltaSumOfSquares" (toJSON @Word64))
, (199, decodeField' "OctetTotalSumOfSquares" (toJSON @Word64))
, (200, decodeField' "MplsTopLabelTTL" (toJSON @Word8))
, (201, decodeField' "MplsLabelStackLength" (toJSON @Word32))
, (202, decodeField' "MplsLabelStackDepth" (toJSON @Word32))
, (203, decodeField' "MplsTopLabelExp" (toJSON @Word8))
, (204, decodeField' "IpPayloadLength" (toJSON @Word32))
, (205, decodeField' "UdpMessageLength" (toJSON @Word16))
, (206, decodeField' "IsMulticast" (toJSON @Word8))
, (207, decodeField' "Ipv4IHL" (toJSON @Word8))
, (208, decodeField' "Ipv4Options" (toJSON @Word32))
, (209, decodeField' "TcpOptions" (toJSON @Word64))
, (210, decodeField' "PaddingOctets" (toJSON @Bytes))
, (211, decodeField' "CollectorIPv4Address" (toJSON @Addr4))
, (212, decodeField' "CollectorIPv6Address" (toJSON @Addr6))
, (213, decodeField' "ExportInterface" (toJSON @Word32))
, (214, decodeField' "ExportProtocolVersion" (toJSON @Word8))
, (215, decodeField' "ExportTransportProtocol" (toJSON @Word8))
, (216, decodeField' "CollectorTransportPort" (toJSON @Word16))
, (217, decodeField' "ExporterTransportPort" (toJSON @Word16))
, (218, decodeField' "TcpSynTotalCount" (toJSON @Word64))
, (219, decodeField' "TcpFinTotalCount" (toJSON @Word64))
, (220, decodeField' "TcpRstTotalCount" (toJSON @Word64))
, (221, decodeField' "TcpPshTotalCount" (toJSON @Word64))
, (222, decodeField' "TcpAckTotalCount" (toJSON @Word64))
, (223, decodeField' "TcpUrgTotalCount" (toJSON @Word64))
, (224, decodeField' "IpTotalLength" (toJSON @Word64))
, (225, decodeField' "PostNATSourceIPv4Address" (toJSON @Addr4))
, (226, decodeField' "PostNATDestinationIPv4Address" (toJSON @Addr4))
, (227, decodeField' "PostNAPTSourceTransportPort" (toJSON @Word16))
, (228, decodeField' "PostNAPTDestinationTransportPort" (toJSON @Word16))
, (229, decodeField' "NatOriginatingAddressRealm" (toJSON @Word8))
, (230, decodeField' "NatEvent" (toJSON @Word8))
, (231, decodeField' "InitiatorOctets" (toJSON @Word64))
, (232, decodeField' "ResponderOctets" (toJSON @Word64))
, (233, decodeField' "FirewallEvent" (toJSON @Word8))
, (234, decodeField' "IngressVRFID" (toJSON @Word32))
, (235, decodeField' "EgressVRFID" (toJSON @Word32))
, (236, decodeField' "VRFname" (toJSON @Text))
, (237, decodeField' "PostMplsTopLabelExp" (toJSON @Word8))
, (238, decodeField' "TcpWindowScale" (toJSON @Word16))
, (239, decodeField' "BiflowDirection" (toJSON @Word8))
, (240, decodeField' "EthernetHeaderLength" (toJSON @Word8))
, (241, decodeField' "EthernetPayloadLength" (toJSON @Word16))
, (242, decodeField' "EthernetTotalLength" (toJSON @Word16))
, (243, decodeField' "Dot1qVlanId" (toJSON @Word16))
, (244, decodeField' "Dot1qPriority" (toJSON @Word8))
, (245, decodeField' "Dot1qCustomerVlanId" (toJSON @Word16))
, (246, decodeField' "Dot1qCustomerPriority" (toJSON @Word8))
, (247, decodeField' "MetroEvcId" (toJSON @Text))
, (248, decodeField' "MetroEvcType" (toJSON @Word8))
, (249, decodeField' "PseudoWireId" (toJSON @Word32))
, (250, decodeField' "PseudoWireType" (toJSON @Word16))
, (251, decodeField' "PseudoWireControlWord" (toJSON @Word32))
, (252, decodeField' "IngressPhysicalInterface" (toJSON @Word32))
, (253, decodeField' "EgressPhysicalInterface" (toJSON @Word32))
, (254, decodeField' "PostDot1qVlanId" (toJSON @Word16))
, (255, decodeField' "PostDot1qCustomerVlanId" (toJSON @Word16))
, (256, decodeField' "EthernetType" (toJSON @Word16))
, (257, decodeField' "PostIpPrecedence" (toJSON @Word8))
, (258, decodeField' "CollectionTimeMilliseconds" (toJSON @Word))
, (259, decodeField' "ExportSctpStreamId" (toJSON @Word16))
, (260, decodeField' "MaxExportSeconds" (toJSON @Word))
, (261, decodeField' "MaxFlowEndSeconds" (toJSON @Word))
, (262, decodeField' "MessageMD5Checksum" (toJSON @Bytes))
, (263, decodeField' "MessageScope" (toJSON @Word8))
, (264, decodeField' "MinExportSeconds" (toJSON @Word))
, (265, decodeField' "MinFlowStartSeconds" (toJSON @Word))
, (266, decodeField' "OpaqueOctets" (toJSON @Bytes))
, (267, decodeField' "SessionScope" (toJSON @Word8))
, (268, decodeField' "MaxFlowEndMicroseconds" (toJSON @Word))
, (269, decodeField' "MaxFlowEndMilliseconds" (toJSON @Word))
, (270, decodeField' "MaxFlowEndNanoseconds" (toJSON @Word))
, (271, decodeField' "MinFlowStartMicroseconds" (toJSON @Word))
, (272, decodeField' "MinFlowStartMilliseconds" (toJSON @Word))
, (273, decodeField' "MinFlowStartNanoseconds" (toJSON @Word))
, (274, decodeField' "CollectorCertificate" (toJSON @Bytes))
, (275, decodeField' "ExporterCertificate" (toJSON @Bytes))
, (276, decodeField' "DataRecordsReliability" (toJSON @Bool))
, (277, decodeField' "ObservationPointType" (toJSON @Word8))
, (278, decodeField' "NewConnectionDeltaCount" (toJSON @Word32))
, (279, decodeField' "ConnectionSumDurationSeconds" (toJSON @Word64))
, (280, decodeField' "ConnectionTransactionId" (toJSON @Word64))
, (281, decodeField' "PostNATSourceIPv6Address" (toJSON @Addr6))
, (282, decodeField' "PostNATDestinationIPv6Address" (toJSON @Addr6))
, (283, decodeField' "NatPoolId" (toJSON @Word32))
, (284, decodeField' "NatPoolName" (toJSON @Text))
, (285, decodeField' "AnonymizationFlags" (toJSON @Word16))
, (286, decodeField' "AnonymizationTechnique" (toJSON @Word16))
, (287, decodeField' "InformationElementIndex" (toJSON @Word16))
, (288, decodeField' "P2pTechnology" (toJSON @Text))
, (289, decodeField' "TunnelTechnology" (toJSON @Text))
, (290, decodeField' "EncryptedTechnology" (toJSON @Text))
, (294, decodeField' "BgpValidityState" (toJSON @Word8))
, (295, decodeField' "IPSecSPI" (toJSON @Word32))
, (296, decodeField' "GreKey" (toJSON @Word32))
, (297, decodeField' "NatType" (toJSON @Word8))
, (298, decodeField' "InitiatorPackets" (toJSON @Word64))
, (299, decodeField' "ResponderPackets" (toJSON @Word64))
, (300, decodeField' "ObservationDomainName" (toJSON @Text))
, (301, decodeField' "SelectionSequenceId" (toJSON @Word64))
, (302, decodeField' "SelectorId" (toJSON @Word64))
, (303, decodeField' "InformationElementId" (toJSON @Word16))
, (304, decodeField' "SelectorAlgorithm" (toJSON @Word16))
, (305, decodeField' "SamplingPacketInterval" (toJSON @Word32))
, (306, decodeField' "SamplingPacketSpace" (toJSON @Word32))
, (307, decodeField' "SamplingTimeInterval" (toJSON @Word32))
, (308, decodeField' "SamplingTimeSpace" (toJSON @Word32))
, (309, decodeField' "SamplingSize" (toJSON @Word32))
, (310, decodeField' "SamplingPopulation" (toJSON @Word32))
, (311, decodeField' "SamplingProbability" (toJSON @Double))
, (312, decodeField' "DataLinkFrameSize" (toJSON @Word16))
, (313, decodeField' "IpHeaderPacketSection" (toJSON @Bytes))
, (314, decodeField' "IpPayloadPacketSection" (toJSON @Bytes))
, (315, decodeField' "DataLinkFrameSection" (toJSON @Bytes))
, (316, decodeField' "MplsLabelStackSection" (toJSON @Bytes))
, (317, decodeField' "MplsPayloadPacketSection" (toJSON @Bytes))
, (318, decodeField' "SelectorIdTotalPktsObserved" (toJSON @Word64))
, (319, decodeField' "SelectorIdTotalPktsSelected" (toJSON @Word64))
, (320, decodeField' "AbsoluteError" (toJSON @Double))
, (321, decodeField' "RelativeError" (toJSON @Double))
, (322, decodeField' "ObservationTimeSeconds" (toJSON @Word))
, (323, decodeField' "ObservationTimeMilliseconds" (toJSON @Word))
, (324, decodeField' "ObservationTimeMicroseconds" (toJSON @Word))
, (325, decodeField' "ObservationTimeNanoseconds" (toJSON @Word))
, (326, decodeField' "DigestHashValue" (toJSON @Word64))
, (327, decodeField' "HashIPPayloadOffset" (toJSON @Word64))
, (328, decodeField' "HashIPPayloadSize" (toJSON @Word64))
, (329, decodeField' "HashOutputRangeMin" (toJSON @Word64))
, (330, decodeField' "HashOutputRangeMax" (toJSON @Word64))
, (331, decodeField' "HashSelectedRangeMin" (toJSON @Word64))
, (332, decodeField' "HashSelectedRangeMax" (toJSON @Word64))
, (333, decodeField' "HashDigestOutput" (toJSON @Bool))
, (334, decodeField' "HashInitialiserValue" (toJSON @Word64))
, (335, decodeField' "SelectorName" (toJSON @Text))
, (336, decodeField' "UpperCILimit" (toJSON @Double))
, (337, decodeField' "LowerCILimit" (toJSON @Double))
, (338, decodeField' "ConfidenceLevel" (toJSON @Double))
, (339, decodeField' "InformationElementDataType" (toJSON @Word8))
, (340, decodeField' "InformationElementDescription" (toJSON @Text))
, (341, decodeField' "InformationElementName" (toJSON @Text))
, (342, decodeField' "InformationElementRangeBegin" (toJSON @Word64))
, (343, decodeField' "InformationElementRangeEnd" (toJSON @Word64))
, (344, decodeField' "InformationElementSemantics" (toJSON @Word8))
, (345, decodeField' "InformationElementUnits" (toJSON @Word16))
, (346, decodeField' "PrivateEnterpriseNumber" (toJSON @Word32))
, (347, decodeField' "VirtualStationInterfaceId" (toJSON @Bytes))
, (348, decodeField' "VirtualStationInterfaceName" (toJSON @Text))
, (349, decodeField' "VirtualStationUUID" (toJSON @Bytes))
, (350, decodeField' "VirtualStationName" (toJSON @Text))
, (351, decodeField' "Layer2SegmentId" (toJSON @Word64))
, (352, decodeField' "Layer2OctetDeltaCount" (toJSON @Word64))
, (353, decodeField' "Layer2OctetTotalCount" (toJSON @Word64))
, (354, decodeField' "IngressUnicastPacketTotalCount" (toJSON @Word64))
, (355, decodeField' "IngressMulticastPacketTotalCount" (toJSON @Word64))
, (356, decodeField' "IngressBroadcastPacketTotalCount" (toJSON @Word64))
, (357, decodeField' "EgressUnicastPacketTotalCount" (toJSON @Word64))
, (358, decodeField' "EgressBroadcastPacketTotalCount" (toJSON @Word64))
, (359, decodeField' "MonitoringIntervalStartMilliSeconds" (toJSON @Word))
, (360, decodeField' "MonitoringIntervalEndMilliSeconds" (toJSON @Word))
, (361, decodeField' "PortRangeStart" (toJSON @Word16))
, (362, decodeField' "PortRangeEnd" (toJSON @Word16))
, (363, decodeField' "PortRangeStepSize" (toJSON @Word16))
, (364, decodeField' "PortRangeNumPorts" (toJSON @Word16))
, (365, decodeField' "StaMacAddress" (toJSON @MAC))
, (366, decodeField' "StaIPv4Address" (toJSON @Addr4))
, (367, decodeField' "WtpMacAddress" (toJSON @MAC))
, (368, decodeField' "IngressInterfaceType" (toJSON @Word32))
, (369, decodeField' "EgressInterfaceType" (toJSON @Word32))
, (370, decodeField' "RtpSequenceNumber" (toJSON @Word16))
, (371, decodeField' "UserName" (toJSON @Text))
, (372, decodeField' "ApplicationCategoryName" (toJSON @Text))
, (373, decodeField' "ApplicationSubCategoryName" (toJSON @Text))
, (374, decodeField' "ApplicationGroupName" (toJSON @Text))
, (375, decodeField' "OriginalFlowsPresent" (toJSON @Word64))
, (376, decodeField' "OriginalFlowsInitiated" (toJSON @Word64))
, (377, decodeField' "OriginalFlowsCompleted" (toJSON @Word64))
, (378, decodeField' "DistinctCountOfSourceIPAddress" (toJSON @Word64))
, (379, decodeField' "DistinctCountOfDestinationIPAddress" (toJSON @Word64))
, (380, decodeField' "DistinctCountOfSourceIPv4Address" (toJSON @Word32))
, (381, decodeField' "DistinctCountOfDestinationIPv4Address" (toJSON @Word32))
, (382, decodeField' "DistinctCountOfSourceIPv6Address" (toJSON @Word64))
, (383, decodeField' "DistinctCountOfDestinationIPv6Address" (toJSON @Word64))
, (384, decodeField' "ValueDistributionMethod" (toJSON @Word8))
, (385, decodeField' "Rfc3550JitterMilliseconds" (toJSON @Word32))
, (386, decodeField' "Rfc3550JitterMicroseconds" (toJSON @Word32))
, (387, decodeField' "Rfc3550JitterNanoseconds" (toJSON @Word32))
, (388, decodeField' "Dot1qDEI" (toJSON @Bool))
, (389, decodeField' "Dot1qCustomerDEI" (toJSON @Bool))
, (390, decodeField' "FlowSelectorAlgorithm" (toJSON @Word16))
, (391, decodeField' "FlowSelectedOctetDeltaCount" (toJSON @Word64))
, (392, decodeField' "FlowSelectedPacketDeltaCount" (toJSON @Word64))
, (393, decodeField' "FlowSelectedFlowDeltaCount" (toJSON @Word64))
, (394, decodeField' "SelectorIDTotalFlowsObserved" (toJSON @Word64))
, (395, decodeField' "SelectorIDTotalFlowsSelected" (toJSON @Word64))
, (396, decodeField' "SamplingFlowInterval" (toJSON @Word64))
, (397, decodeField' "SamplingFlowSpacing" (toJSON @Word64))
, (398, decodeField' "FlowSamplingTimeInterval" (toJSON @Word64))
, (399, decodeField' "FlowSamplingTimeSpacing" (toJSON @Word64))
, (400, decodeField' "HashFlowDomain" (toJSON @Word16))
, (401, decodeField' "TransportOctetDeltaCount" (toJSON @Word64))
, (402, decodeField' "TransportPacketDeltaCount" (toJSON @Word64))
, (403, decodeField' "OriginalExporterIPv4Address" (toJSON @Addr4))
, (404, decodeField' "OriginalExporterIPv6Address" (toJSON @Addr6))
, (405, decodeField' "OriginalObservationDomainId" (toJSON @Word32))
, (406, decodeField' "IntermediateProcessId" (toJSON @Word32))
, (407, decodeField' "IgnoredDataRecordTotalCount" (toJSON @Word64))
, (408, decodeField' "DataLinkFrameType" (toJSON @Word16))
, (409, decodeField' "SectionOffset" (toJSON @Word16))
, (410, decodeField' "SectionExportedOctets" (toJSON @Word16))
, (411, decodeField' "Dot1qServiceInstanceTag" (toJSON @Bytes))
, (412, decodeField' "Dot1qServiceInstanceId" (toJSON @Word32))
, (413, decodeField' "Dot1qServiceInstancePriority" (toJSON @Word8))
, (414, decodeField' "Dot1qCustomerSourceMacAddress" (toJSON @MAC))
, (415, decodeField' "Dot1qCustomerDestinationMacAddress" (toJSON @MAC))
, (417, decodeField' "PostLayer2OctetDeltaCount" (toJSON @Word64))
, (418, decodeField' "PostMCastLayer2OctetDeltaCount" (toJSON @Word64))
, (420, decodeField' "PostLayer2OctetTotalCount" (toJSON @Word64))
, (421, decodeField' "PostMCastLayer2OctetTotalCount" (toJSON @Word64))
, (422, decodeField' "MinimumLayer2TotalLength" (toJSON @Word64))
, (423, decodeField' "MaximumLayer2TotalLength" (toJSON @Word64))
, (424, decodeField' "DroppedLayer2OctetDeltaCount" (toJSON @Word64))
, (425, decodeField' "DroppedLayer2OctetTotalCount" (toJSON @Word64))
, (426, decodeField' "IgnoredLayer2OctetTotalCount" (toJSON @Word64))
, (427, decodeField' "NotSentLayer2OctetTotalCount" (toJSON @Word64))
, (428, decodeField' "Layer2OctetDeltaSumOfSquares" (toJSON @Word64))
, (429, decodeField' "Layer2OctetTotalSumOfSquares" (toJSON @Word64))
, (430, decodeField' "Layer2FrameDeltaCount" (toJSON @Word64))
, (431, decodeField' "Layer2FrameTotalCount" (toJSON @Word64))
, (432, decodeField' "PseudoWireDestinationIPv4Address" (toJSON @Addr4))
, (433, decodeField' "IgnoredLayer2FrameTotalCount" (toJSON @Word64))
, (434, decodeField' "MibObjectValueInteger" (toJSON @Int64))
, (435, decodeField' "MibObjectValueOctetString" (toJSON @Bytes))
, (436, decodeField' "MibObjectValueOID" (toJSON @Bytes))
, (437, decodeField' "MibObjectValueBits" (toJSON @Bytes))
, (438, decodeField' "MibObjectValueIPAddress" (toJSON @Addr4))
, (439, decodeField' "MibObjectValueCounter" (toJSON @Word64))
, (440, decodeField' "MibObjectValueGauge" (toJSON @Word32))
, (441, decodeField' "MibObjectValueTimeTicks" (toJSON @Word32))
, (442, decodeField' "MibObjectValueUnsigned" (toJSON @Word64))
, (445, decodeField' "MibObjectIdentifier" (toJSON @Bytes))
, (446, decodeField' "MibSubIdentifier" (toJSON @Word32))
, (447, decodeField' "MibIndexIndicator" (toJSON @Word64))
, (448, decodeField' "MibCaptureTimeSemantics" (toJSON @Word8))
, (449, decodeField' "MibContextEngineID" (toJSON @Bytes))
, (450, decodeField' "MibContextName" (toJSON @Text))
, (451, decodeField' "MibObjectName" (toJSON @Text))
, (452, decodeField' "MibObjectDescription" (toJSON @Text))
, (453, decodeField' "MibObjectSyntax" (toJSON @Text))
, (454, decodeField' "MibModuleName" (toJSON @Text))
, (455, decodeField' "MobileIMSI" (toJSON @Text))
, (456, decodeField' "MobileMSISDN" (toJSON @Text))
, (457, decodeField' "HttpStatusCode" (toJSON @Word16))
]
-- vim:set ft=haskell sw=2 ts=2 et:
| techlib/netflow | lib/Network/Flow/V9/Fields.hs | mit | 31,635 | 0 | 12 | 6,130 | 10,304 | 5,594 | 4,710 | 513 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Debug.NoTrace
-- Copyright : (c) Cindy Wang (CindyLinz) 2013
-- License : MIT
--
-- Maintainer : Cindy Wang (CindyLinz)
-- Stability : provisional
-- Portability : portable
--
-- This module introduce functions that have identical types with functions in the "Debug.Trace" module.
--
-- You might write some programs like this:
--
-- > import Debug.Trace
-- >
-- > fib 0 = 1
-- > fib 1 = 1
-- > fib n = ("fib " ++ show n) `trace` fib (n - 1) + fib (n - 2)
--
-- And after you finish the debugging process, just change the line
--
-- > import Debug.Trace
--
-- into
--
-- > import Debug.NoTrace
--
-- Then all the tracing functions are silently removed.
-------------------------------------------------------------------------------
module Debug.NoTrace where
trace :: String -> a -> a
trace _ = id
traceId :: String -> String
traceId = id
traceShow :: Show a => a -> b -> b
traceShow _ = id
traceShowId :: Show a => a -> a
traceShowId = id
traceStack :: String -> a -> a
traceStack _ = id
traceIO :: String -> IO ()
traceIO _ = return ()
traceM :: Monad m => String -> m ()
traceM _ = return ()
traceShowM :: (Show a, Monad m) => a -> m ()
traceShowM _ = return ()
putTraceMsg :: String -> IO ()
putTraceMsg _ = return ()
traceEvent :: String -> a -> a
traceEvent _ = id
traceEventIO :: String -> IO ()
traceEventIO _ = return ()
traceMarker :: String -> a -> a
traceMarker _ = id
traceMarkerIO :: String -> IO ()
traceMarkerIO _ = return ()
| CindyLinz/Haskell-NoTrace | src/Debug/NoTrace.hs | mit | 1,572 | 0 | 8 | 323 | 367 | 200 | 167 | 27 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.