code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE Rank2Types #-}
module TeX.Parser.Assignment
where
import Text.Parsec ((<|>), (<?>), modifyState, getState, anyToken)
import Control.Lens ((.~), (^.))
import TeX.Alias
import TeX.Category
import TeX.Count
import TeX.Def hiding (definition)
import TeX.Parser.MacroParser
import TeX.Parser.Parser
import TeX.Parser.Prim
import TeX.Parser.Util
import TeX.State
import TeX.Token
definition :: Expander -> TeXParser Def
definition = parseDef
prefix :: TeXParser Token
prefix = unimplemented
macroAssignment :: Expander -> TeXParser ()
macroAssignment expand =
(definition expand >>= doSet)
<|> (prefix >> macroAssignment expand)
where
doSet def@(Def name _ _) = modifyState (stateDefinition name .~ Just def)
setIntegerVariable :: Bool -> IntegerVariable -> Maybe Count -> TeXParser ()
setIntegerVariable _ (IntegerParameter _) _ = unimplemented
setIntegerVariable _ (CountDefToken _) _ = unimplemented
setIntegerVariable True (LiteralCount counter) value =
modifyState (globalStateCount (fromInteger counter) .~ value)
setIntegerVariable False (LiteralCount counter) value =
modifyState (stateCount (fromInteger counter) .~ value)
modifyIntegerVariable :: Bool -> IntegerVariable -> (Count -> Count) -> TeXParser ()
modifyIntegerVariable _ (IntegerParameter _) _ = unimplemented
modifyIntegerVariable _ (CountDefToken _) _ = unimplemented
modifyIntegerVariable global var@(LiteralCount counter) modify = do
currValue <- (^.) <$> getState <*> (return (stateCount (fromInteger counter)))
setIntegerVariable global var (currValue >>= (return . modify))
arithmetic :: Expander -> Bool -> TeXParser ()
arithmetic expand global =
advance <|> multiply <|> divide
where
optionalBy =
(expand (exactToken (CharToken 'b' Letter)) >>
expand (exactToken (CharToken 'y' Letter)) >>
return ()) <|> (return ()) <?> "optional by"
-- TODO(emily): make this work for dimen/glue/muglue
advance = do
_ <- expand (exactToken (ControlSequence "advance"))
variable <- integerVariable expand
optionalBy
value <- count expand
modifyIntegerVariable global variable (\x -> x + value)
multiply = do
_ <- expand (exactToken (ControlSequence "multiply"))
variable <- integerVariable expand
optionalBy
value <- count expand
modifyIntegerVariable global variable (\x -> x * value)
divide = do
_ <- expand (exactToken (ControlSequence "divide"))
variable <- integerVariable expand
optionalBy
value <- count expand
modifyIntegerVariable global variable (\x -> x `div` value)
integerVariableAssignment :: Expander -> Bool -> TeXParser ()
integerVariableAssignment expand global = do
variable <- integerVariable expand
equals expand
value <- count expand
setIntegerVariable global variable (Just value)
variableAssignment :: Expander -> Bool -> TeXParser ()
variableAssignment expand global =
integerVariableAssignment expand global
letAssignment :: Expander -> Bool -> TeXParser ()
letAssignment expand global = do
_ <- expand $ exactToken (ControlSequence "let")
cs <- controlSequence
equals expand
optionalSpace expand
token <- anyToken
case token of
ControlSequence "iftrue" -> letAlias cs AliasIfTrue
ControlSequence "iffalse" -> letAlias cs AliasIfFalse
ControlSequence setTo -> letMacro (extractControlSequence cs) setTo
_ -> unimplemented
where
myDefSetter name
| global = globalStateDefinition name
| otherwise = stateDefinition name
letMacro :: String -> String -> TeXParser ()
letMacro cs setTo = do
maybeDef <- (^.) <$> getState <*> (return $ stateDefinition setTo)
case maybeDef of
Just (Def _ pts rts) ->
modifyState (myDefSetter cs .~ Just (Def cs pts rts))
Nothing -> unimplemented
myAliasSetter name
| global = globalStateAlias name
| otherwise = stateAlias name
letAlias :: Token -> Alias -> TeXParser ()
letAlias tok setAlias =
modifyState (myAliasSetter tok .~ Just setAlias)
simpleAssignment :: Expander -> Bool -> TeXParser ()
simpleAssignment expand global =
variableAssignment expand global <|>
arithmetic expand global <|>
letAssignment expand global
nonMacroAssignment :: Expander -> Bool -> TeXParser ()
nonMacroAssignment expand global =
simpleAssignment expand global <|> (expand (exactToken (ControlSequence "global")) >> nonMacroAssignment expand True)
assignment :: Expander -> TeXParser ()
assignment expand = nonMacroAssignment expand False <|> macroAssignment expand
| xymostech/tex-parser | src/TeX/Parser/Assignment.hs | mit | 4,587 | 0 | 17 | 873 | 1,438 | 707 | 731 | 108 | 5 |
module Observable.Examples (
betaBinomial
, linearFit
, sinusoidal
, sinusoidalModel
, simulate
, ast
, condition
) where
import Observable
-- | A simple beta-binomial model for testing.
betaBinomial :: Int -> Double -> Double -> Observable Int
betaBinomial n a b = do
p <- observe "p" (beta a b)
observe "x" (binomial n p)
-- | An example Bayesian linear regression model.
linearFit :: Double -> Double -> [Double] -> Observable [Double]
linearFit c d xs = do
a <- observe "intercept" standard
b <- observe "slope" standard
v <- observe "variance" (gamma c d)
let model x = a + b * x
observe "ys" (isoGauss (fmap model xs) (sqrt v))
-- | An example Bayesian sinusoidal regression model.
sinusoidal :: [Double] -> Observable [Double]
sinusoidal xs = do
a <- observe "cosParam" (normal 0 10)
b <- observe "sinParam" (normal 0 10)
v <- observe "variance" (invGamma 1 2)
let model x = a*cos x + b*sin x
observe "ys" (isoGauss (fmap model xs) (sqrt v))
-- | The sinusoidal model prior, separated.
prior :: Observable (Double, Double, Double)
prior = do
a <- observe "cosParam" (normal 0 10)
b <- observe "sinParam" (normal 0 10)
v <- observe "variance" (invGamma 1 2)
return (a, b, v)
-- | The sinusoidal model likelihood, separated.
likelihood :: [Double] -> (Double, Double, Double) -> Observable [Double]
likelihood xs (a, b, v) = do
let model x = a*cos x + b*sin x
observe "ys" (isoGauss (fmap model xs) (sqrt v))
-- | An alternative spec for the sinusoidal model.
sinusoidalModel :: [Double] -> Observable [Double]
sinusoidalModel xs = prior >>= likelihood xs
| jtobin/observable-examples | Observable/Examples.hs | mit | 1,626 | 0 | 13 | 339 | 624 | 310 | 314 | 39 | 1 |
{-# LANGUAGE Arrows #-}
module Logic (logic) where
import Prelude hiding (id, (.))
import Control.Applicative (pure)
import Control.Category
import Control.Arrow
import Control.Coroutine
import Control.Coroutine.FRP
import Lambda.OpenGL (KeyEvent(..))
import Lambda.Vector
import ViewModel
logic :: Coroutine [KeyEvent] ViewModel
logic = pure $ ViewModel
{ ball = Ball
{ ballPos = Vec2 100 100
, ballRadius = 25
}
} | leonidas/lambda-frp | src/Logic.hs | mit | 477 | 0 | 10 | 117 | 126 | 78 | 48 | 16 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Net.UDP_Client(
initialize,UDP_API,Interface(..),Packet(..),template,Port(..)
) where
import Net.Concurrent
import Control.Monad.State
import Control.Monad.Trans(lift)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.List((\\))
--import System.Random(randomRIO)
import Net.UDP
import qualified Net.IPv4 as IPv4
import qualified Net.Interface as Net
import Net.Utils(doReq)
import Net.Packet(InPacket,OutPacket)
import Net.Wire
import Monad.Util
type UDP_API m =
Net.TimedInterface m (IPv4.Addr,Packet InPacket) (IPv4.Addr,Packet OutPacket)
data Interface m
= Interface {
listen :: Port -> m (UDP_API m),
listenAny :: m (Port,UDP_API m),
unlisten :: Port -> m ()
}
data Req m
= Listen Port (UDP_API m->m ())
| ListenAny ((Port,UDP_API m)->m ())
| Unlisten Port
| FromNetwork (IPv4.Packet (Packet InPacket))
type Clients m = Map Port ((IPv4.Addr,Packet InPacket)->m ())
initialize putStrLn myIP iface =
do reqChan <- newChan
fork $ loop $ writeChan reqChan . FromNetwork =<< Net.rx iface
fork $ server debug myIP iface reqChan
let listen = doReq reqChan . Listen
listenAny = doReq reqChan ListenAny
unlisten = writeChan reqChan . Unlisten
return $ Interface listen listenAny unlisten
where
debug = putStrLn . ("UDP: "++)
-- I copied this type signature ghci.
server :: (Eq (r ()), RefIO r m, ChannelIO c m, DelayIO m, ForkIO m)
=> ([Char] -> m ())
-> IPv4.Addr
-> Net.Interface m i (IPv4.Packet (Packet OutPacket))
-> c (Req m)
-> m a
server debug myIP iface reqChan =
flip evalStateT init $ loop (handle=<<readChan reqChan)
where
init = Map.empty
handle req =
case req of
Listen port reply -> listen port reply
ListenAny reply -> do port <- pickPort
let reply' iface = reply (port,iface)
listen port reply'
Unlisten port -> modify (Map.delete port)
FromNetwork ipPack ->
do let udpPacket = IPv4.content ipPack
src = IPv4.source ipPack
dst = IPv4.dest ipPack
sp = sourcePort udpPacket
port = destPort udpPacket
clients <- get
case Map.lookup port clients of
Just toClient -> lift $ toClient (src,udpPacket)
_ -> lift $ debug $ "Dropped packet from "++show (src,sp)
++" to "++show (IPv4.dest ipPack,port)
where
listen port reply =
do Net.TimedInterface{Net.rxT=rx,Net.txT=toClient} <- timedWire()
lift $ reply (Net.TimedInterface rx tx)
modify (\clients -> Map.insert port toClient clients)
pickPort = do inuse <- gets Map.keys
return $ head (map Port [32768..65535]\\inuse)
tx (destIP,updPacket) =
Net.tx iface (IPv4.template IPv4.UDP myIP destIP updPacket)
| nh2/network-house | Net/UDP_Client.hs | gpl-2.0 | 2,779 | 34 | 18 | 631 | 1,043 | 555 | 488 | 73 | 5 |
module Main where
main :: IO ()
main = do
print (2 * 3 + 5)
print (2 + 2 * 3 + 1)
print (3 ** 4 + 5 * 2 ** 5 + 1)
| llscm0202/BIGDATA2017 | ATIVIDADE1/exerciciosBasicos/ex1.hs | gpl-3.0 | 122 | 0 | 13 | 44 | 86 | 43 | 43 | 6 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module ConnectionM
( ConnStateConfig (..)
, ConnectionM
, ConnState (..)
, ConnReader (..)
, evalConnectionM
, initConnStateConfig
) where
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as BS
import Data.Map.Strict (Map)
import Control.Concurrent.STM
import Control.Monad.Except (ExceptT, MonadError)
import qualified Control.Monad.Except as Except
import Control.Monad.RWS
import ProjectPrelude
import Streams
import Settings
import Frame (Frame)
import ErrorCodes
import ServerConfig
import Hpack
data ConnStateConfig mode = ConnStateConfig ConnState (ConnReader mode)
data ConnState = ConnState
{ stBuffer :: ByteString
, stMaxStreamId :: StreamId
, stStreams :: Map StreamId PerStreamData
, stExpectMoreHeaders :: Maybe (StreamId)
, stLocalDynTable :: DynamicTable
, stRemoteDynTable :: DynamicTable
}
data ConnReader mode = ConnReader
{ stSettings :: TVar ConnSettings
, stSendChan :: TChan Frame
, stEndStream :: TVar Bool
, stServerConfig :: ServerConfig mode
, stConnSendWindow :: TVar Int64
, stConnResvWindow :: TVar Int64
, stSocket :: ConnModeSocket mode
}
newtype ConnectionM mode a = ConnectionM (ExceptT ConnError (RWST (ConnReader mode) () ConnState IO) a)
deriving ( Functor
, Applicative
, Monad
, MonadError ConnError
, MonadState ConnState
, MonadReader (ConnReader mode)
, MonadIO
)
initConnStateConfig :: ConnModeSocket mode -> ServerConfig mode -> IO (ConnStateConfig mode)
initConnStateConfig sock config = do
reader <- initConnReader sock config
state <- initConnState
return $ ConnStateConfig state reader
initConnState :: IO ConnState
initConnState = do
streams <- initStreamsVar
return $ ConnState
{ stBuffer = BS.empty
, stMaxStreamId = StreamId 0
, stStreams = streams
, stExpectMoreHeaders = Nothing
, stLocalDynTable = emptyDynTable
, stRemoteDynTable = emptyDynTable
}
initConnReader :: ConnModeSocket mode -> ServerConfig mode -> IO (ConnReader mode)
initConnReader stSocket stServerConfig = do
stSettings <- newTVarIO initConnSettings
stSendChan <- newTChanIO
stEndStream <- newTVarIO False
stConnSendWindow <- newTVarIO initialWindowSize
stConnResvWindow <- newTVarIO initialWindowSize
return $ ConnReader
{ stSettings
, stSendChan
, stSocket
, stServerConfig
, stEndStream
, stConnSendWindow
, stConnResvWindow
}
evalConnectionM :: ConnectionM mode a -> ConnStateConfig mode -> IO (Either ConnError a)
evalConnectionM (ConnectionM conn) (ConnStateConfig state config) = do
(a, _) <- evalRWST (Except.runExceptT $ conn) config state
return a
| authchir/SoSe17-FFP-haskell-http2-server | src/ConnectionM.hs | gpl-3.0 | 3,760 | 0 | 11 | 1,537 | 704 | 387 | 317 | -1 | -1 |
module System.DevUtils.Base.Data.List (
split,
splitBy
) where
split :: (Eq a) => a -> [a] -> [[a]]
split d l = splitBy (/= d) l
splitBy :: (Eq a) => (a -> Bool) -> [a] -> [[a]]
splitBy _ [] = []
splitBy f l = h :
case t of
[] -> []
_ -> splitBy f (tail t)
where
(h, t) = (takeWhile f l, dropWhile f l)
| adarqui/DevUtils-Base | src/System/DevUtils/Base/Data/List.hs | gpl-3.0 | 315 | 0 | 11 | 82 | 194 | 107 | 87 | 12 | 2 |
module Jet.Hist
(
histData
, mkHist
) where
import Data.Map (Map, fromList)
import Interface.Histogram1D
basicCut :: String
basicCut = " AND nl >= 1 AND nb >= 3 AND nj >= 4 AND pTj1 > 90 AND met > 150"
histData :: Map String HistFill
histData = fromList [ ("m_bl_theta",
HistFill ("m_bl_theta > 0 AND m_bl_theta < 1000" ++ basicCut)
100 0 1000)
, ("mT",
HistFill ("mT > 0 AND mT < 1000" ++ basicCut)
50 0 1000)
, ("meff",
HistFill ("meff > 0 AND meff < 5000" ++ basicCut)
50 0 5000)
, ("met",
HistFill ("met > 0 AND met < 1000" ++ basicCut)
50 0 1000)
, ("nj",
HistFill ("nj > 0 AND nj < 20" ++ basicCut)
20 0 20)
]
| cbpark/GluinoStopPolarization | lib/Jet/Hist.hs | gpl-3.0 | 1,001 | 0 | 10 | 509 | 185 | 105 | 80 | 24 | 1 |
module Experiment.Planets.Types where
-- import Data.Word
import Linear.V2
import Linear.V3
import Linear.Vector
import Physics
import Render.Sprite
import Render.Surface
-- import Data.Colour
data Planet = Planet { planetName :: String
, planetRadius :: Double
, planetColor :: Color
, planetBody :: Body
} deriving (Show)
data PlanetList = PlanetList { planetListZoom :: Double
, planetListPlanets :: [Planet]
} deriving (Show)
instance HasSprite Planet where
toSprite (Planet _ r c (Body _ (V3 x y _))) =
Sprite (V2 x y) (Circle r Filled) c 1
instance HasSurface PlanetList where
toSurface (PlanetList z ps) =
Surface zero (transScale z) 1 (map (EntSprite . toSprite) ps)
| mstksg/netwire-experiments | src/Experiment/Planets/Types.hs | gpl-3.0 | 851 | 0 | 12 | 279 | 233 | 130 | 103 | 21 | 0 |
{-# LANGUAGE CPP, PackageImports #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Safe #-}
#endif
module System.IO.Error (
-- * I\/O errors
IOError, -- = IOException
userError, -- :: String -> IOError
mkIOError, -- :: IOErrorType -> String -> Maybe Handle
-- -> Maybe FilePath -> IOError
annotateIOError, -- :: IOError -> String -> Maybe Handle
-- -> Maybe FilePath -> IOError
-- ** Classifying I\/O errors
isAlreadyExistsError, -- :: IOError -> Bool
isDoesNotExistError,
isAlreadyInUseError,
isFullError,
isEOFError,
isIllegalOperation,
isPermissionError,
isUserError,
-- ** Attributes of I\/O errors
ioeGetErrorString, -- :: IOError -> String
ioeGetHandle, -- :: IOError -> Maybe Handle
ioeGetFileName, -- :: IOError -> Maybe FilePath
-- * Types of I\/O error
IOErrorType, -- abstract
alreadyExistsErrorType, -- :: IOErrorType
doesNotExistErrorType,
alreadyInUseErrorType,
fullErrorType,
eofErrorType,
illegalOperationErrorType,
permissionErrorType,
userErrorType,
-- * Throwing and catching I\/O errors
ioError, -- :: IOError -> IO a
catch, -- :: IO a -> (IOError -> IO a) -> IO a
try -- :: IO a -> IO (Either IOError a)
) where
import qualified "base" Control.Exception as Exception
import "base" System.IO.Error hiding (IOError)
import qualified "base" System.IO.Error as Base
import Prelude hiding (IOError,catch)
-- | Errors of type 'IOError' are used by the 'IO' monad. This is an
-- abstract type; the module "System.IO.Error" provides functions to
-- interrogate and construct values of type 'IOError'.
type IOError = Base.IOError
-- SDM: duplicated docs for catch and try, omitting the part about non-IO
-- exceptions.
-- | The 'catch' function establishes a handler that receives any 'IOError'
-- raised in the action protected by 'catch'. An 'IOError' is caught by
-- the most recent handler established by 'catch'. These handlers are
-- not selective: all 'IOError's are caught. Exception propagation
-- must be explicitly provided in a handler by re-raising any unwanted
-- exceptions. For example, in
--
-- > f = catch g (\e -> if IO.isEOFError e then return [] else ioError e)
--
-- the function @f@ returns @[]@ when an end-of-file exception
-- (cf. 'System.IO.Error.isEOFError') occurs in @g@; otherwise, the
-- exception is propagated to the next outer handler.
--
-- When an exception propagates outside the main program, the Haskell
-- system prints the associated 'IOError' value and exits the program.
--
catch :: IO a -> (IOError -> IO a) -> IO a
catch = Exception.catch
-- | The construct 'try' @comp@ exposes IO errors which occur within a
-- computation, and which are not fully handled.
--
try :: IO a -> IO (Either IOError a)
try = Exception.try
| jwiegley/ghc-release | libraries/haskell2010/System/IO/Error.hs | gpl-3.0 | 3,099 | 0 | 9 | 841 | 256 | 178 | 78 | 38 | 1 |
module Main where
import ADC.Lib
import ADC.Config
import ADC.DB
import ADC.Types.Types
import Data.Pool
import Control.Concurrent
import Control.Monad (forever)
import ADC.Options
main :: IO ()
main = do
opts <- options
cfg <- readCfg $ config opts
withResource (connPool cfg) initMigrations
loadLastModified cfg
forkIO $ forever $ updAucJson cfg
forkIO $ forever $ do
addReqToQ cfg (ReqRealms cfg)
threadDelay $ 120 * oneSecond
forever $ do
forkIO $ runJob cfg
threadDelay oneSecond | gore-v/AuctionParser | app/Main.hs | gpl-3.0 | 518 | 0 | 12 | 104 | 179 | 87 | 92 | 22 | 1 |
-------------------------------------------------------------------------------
-- Dictionaries implemented by using AVL Trees
--
-- Data Structures. Grado en Informática. UMA.
-- Pepe Gallardo, 2012
-------------------------------------------------------------------------------
module AVLDictionary
( Dictionary
, empty
, isEmpty
, size
, insert
, updateOrInsert
, valueOf
, isDefinedAt
, delete
, keys
, values
, keysValues
, foldKeys
, foldValues
, foldKeysValues
) where
import Data.Function(on)
import Data.List(intercalate)
import Data.Maybe(isJust)
--import Test.QuickCheck
import qualified AVL as T
data Rel a b = a :-> b
key :: Rel a b -> a
key (k :-> _) = k
value :: Rel a b -> b
value (_ :-> v) = v
withKey :: a -> Rel a b
withKey k = k :-> undefined
-- Relations are compared by using only their keys
instance (Eq a) => Eq (Rel a b) where
(==) = (==) `on` key
instance (Ord a) => Ord (Rel a b) where
compare = compare `on` key
newtype Dictionary a b = D (T.AVL (Rel a b))
empty :: Dictionary a b
empty = D T.empty
isEmpty :: Dictionary a b -> Bool
isEmpty (D avl) = T.isEmpty avl
size :: Dictionary a b -> Int
size (D avl) = T.size avl
insert :: (Ord a) => a -> b -> Dictionary a b -> Dictionary a b
insert k v (D avl) = D (T.insert (k :-> v) avl)
updateOrInsert :: (Ord a) => a -> (b -> b) -> b -> Dictionary a b -> Dictionary a b
updateOrInsert k f v (D avl) = D (T.updateOrInsert f' (k :-> v) avl)
where f' (k :-> v) = k :-> f v
valueOf :: (Ord a) => a -> Dictionary a b -> Maybe b
valueOf k (D avl) =
case T.search (withKey k) avl of
Nothing -> Nothing
Just (_ :-> v') -> Just v'
isDefinedAt :: (Ord a) => a -> Dictionary a b -> Bool
isDefinedAt k d = isJust (valueOf k d)
delete :: (Ord a) => a -> Dictionary a b -> Dictionary a b
delete k (D avl) = D (T.delete (withKey k) avl)
keys :: Dictionary a b -> [a]
keys (D avl) = map key (T.inOrder avl)
values :: Dictionary a b -> [b]
values (D avl) = map value (T.inOrder avl)
keysValues :: Dictionary a b -> [(a,b)]
keysValues (D avl) = map toTuple (T.inOrder avl)
where toTuple (k :-> v) = (k,v)
foldKeys :: (a -> c -> c) -> c -> Dictionary a b -> c
foldKeys f z (D avl) = T.foldInOrder (f . key) z avl
foldValues :: (b -> c -> c) -> c -> Dictionary a b -> c
foldValues f z (D avl) = T.foldInOrder (f . value) z avl
foldKeysValues :: (a -> b -> c -> c) -> c -> Dictionary a b -> c
foldKeysValues f z (D avl) = T.foldInOrder (\(k :-> v) -> f k v) z avl
instance (Show a, Show b) => Show (Dictionary a b) where
show (D avl) = "AVLDictionary(" ++ intercalate "," (aux (T.inOrder avl)) ++ ")"
where
aux [] = []
aux (x:->y : xys) = (show x++"->"++show y) : aux xys
instance (Eq a, Eq b) => Eq (Dictionary a b) where
d == d' = keysValues d == keysValues d'
| danipozodg/dependency | AVLDictionary.hs | gpl-3.0 | 2,963 | 0 | 13 | 791 | 1,330 | 694 | 636 | 71 | 2 |
module Moonbase.Util.Widget.Chart where
import qualified Graphics.UI.Gtk as Gtk
import qualified Graphics.Rendering.Cairo as Cairo
import qualified System.Glib as Glib
import System.IO.Unsafe (unsafePerformIO)
import Control.Monad
import Control.Lens
import qualified Data.Vector as Vec
import qualified Data.Sequence as Seq
import qualified Data.Map as Map
import Moonbase.Core
import Moonbase.Theme
import Moonbase.Util.Gtk
-- DataTableValue --------------------------------------------------------------
data DataTableValue b = JustValue b
-- DataTableRow ----------------------------------------------------------------
data DataTableRow b = DataTableRow
{ tableColor :: Maybe Color
, tableRow :: Seq.Seq (DataTableValue b) }
newRow :: Maybe Color -> [DataTableValue b] -> DataTableRow b
newRow color values = DataTableRow color $ Seq.fromList values
everyValueM_ :: (Monad m) => (Int -> DataTableValue b -> m ()) -> DataTableRow b -> m ()
everyValueM_ f (DataTableRow _ values)= Seq.foldlWithIndex (const f) (return ()) values
-- DataTable -------------------------------------------------------------------
data (Show a) => DataTable a b = DataTable (Map.Map a (DataTableRow b))
dataFromList :: (Ord a, Show a) => [(a, [b])] -> DataTable a b
dataFromList = DataTable . Map.fromList . genRow . genValues
where
genValues :: [(a, [b])] -> [(a, [DataTableValue b])]
genValues x = traverse . _2 . traverse %~ JustValue $ x
genRow :: [(a, [DataTableValue b])] -> [(a, DataTableRow b)]
genRow s = traverse . _2 %~ newRow Nothing $ s
everyRowM_ :: (Show a, Monad m) => (a -> DataTableRow b -> m ()) -> DataTable a b -> m ()
everyRowM_ f (DataTable table)= Map.foldlWithKey (const f) (return ()) table
-- ChartConfig -----------------------------------------------------------------
data ChartConfig = ChartConfig
{ chartShowLabels :: Bool
, chartDrawFrame :: Bool
, chartMin :: Maybe Int
, chartMax :: Maybe Int }
defaultChartConfig :: ChartConfig
defaultChartConfig = ChartConfig
{ chartShowLabels = True
, chartDrawFrame = False
, chartMin = Nothing
, chartMax = Nothing }
-- ChartRenderer ---------------------------------------------------------------
class ChartRenderer a where
chartRenderWith :: a
-> (Int, Int)
-> ChartConfig
-> DataTable n v
-> Cairo.Render ()
-- Chart - Area ----------------------------------------------------------------
-- Chart - Line ----------------------------------------------------------------
-- Chart - Bar -----------------------------------------------------------------
-- Chart -----------------------------------------------------------------------
data Chart = Chart { chartArea :: Gtk.DrawingArea }
class Gtk.WidgetClass o => ChartClass o
toGraph :: Gtk.DrawingAreaClass o => o -> Chart
toGraph = Chart . Gtk.toDrawingArea
instance Glib.GObjectClass Chart where
toGObject = Glib.toGObject . chartArea
unsafeCastGObject = Chart . Glib.unsafeCastGObject
instance Gtk.WidgetClass Chart
maybeChartConfig :: Gtk.Attr Chart (Maybe ChartConfig)
maybeChartConfig = unsafePerformIO Glib.objectCreateAttribute
{-# NOINLINE maybeChartConfig #-}
maybeChartData :: Gtk.Attr Chart (Maybe (DataTable a b))
maybeChartData = unsafePerformIO Glib.objectCreateAttribute
{-# NOINLINE maybeChartData #-}
chartConfig :: Gtk.Attr Chart ChartConfig
chartConfig = Gtk.newAttr get' set'
where
get' chart = maybe fetchingFailed return =<< Gtk.get chart maybeChartConfig
set' chart config = do
Gtk.set chart [maybeChartConfig Gtk.:= Just config]
ioasync $ Gtk.widgetQueueDraw chart
fetchingFailed = error "Could not fetch chart configuration..."
chartData :: Gtk.Attr Chart (DataTable a b)
chartData = Gtk.newAttr get' set'
where
get' chart = maybe fetchingFailed return =<< Gtk.get chart maybeChartData
set' chart dat = do
Gtk.set chart [maybeChartData Gtk.:= Just dat]
ioasync $ Gtk.widgetQueueDraw chart
fetchingFailed = error "Could not fetch chart data..."
chartNew :: (ChartRenderer renderer) => renderer -> IO Chart
chartNew renderer = do
chart <- Chart <$> Gtk.drawingAreaNew
Gtk.set chart [maybeChartConfig Gtk.:= Just defaultChartConfig]
_ <- Gtk.on chart Gtk.draw $ do
dat <- liftIO $ Gtk.get chart chartData
config <- liftIO $ Gtk.get chart chartConfig
sizes <- liftIO $ getSize chart
void $ chartRenderWith renderer sizes config dat
return chart
where
getSize chart = do
area <- Gtk.widgetGetWindow chart
case area of
Nothing -> return (0,0)
Just win -> do
w <- Gtk.drawWindowGetWidth win
h <- Gtk.drawWindowGetHeight win
return (w, h)
chartNewWithData :: (ChartRenderer renderer) => renderer -> DataTable a b -> IO Chart
chartNewWithData = undefined
-- Testing ---------------------------------------------------------------------
example :: DataTable String Integer
example = dataFromList [("cpu0", [10,23,89,12,90]), ("cpu1", [0,11,29,100,100])]
| felixsch/moonbase-ng | src/Moonbase/Util/Widget/Chart.hs | lgpl-2.1 | 5,126 | 0 | 16 | 956 | 1,451 | 764 | 687 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Lib where
import Control.Monad.Trans.State
import Data.Char
import Data.Data
import Data.Generics.Aliases (extQ)
import Data.Maybe
import Test.Hspec
{-
-- https://chrisdone.com/posts/data-typeable/
Data.Typeable / Data.Data for generic programming : SYB
do not know what data types are given, but work on them as if you did
Requirements
data types must be instances of Typeable and Data via DeriveDataTypeable
-}
data X = X deriving (Data, Typeable)
{-
Use-case 1: Print the type of something
λ: :i typeOf
typeOf :: Typeable a => a -> TypeRep -- Defined in ‘Data.Typeable’
λ: :i typeRep
typeRep ::
forall k (proxy :: k -> *) (a :: k).
Typeable a =>
proxy a -> TypeRep
-- Defined in ‘Data.Typeable’
λ> :t typeOf 'a'
:: TypeRep
λ> typeOf 'a'
Char
-}
e1 = it "typeOf" $ showsTypeRep (typeOf 'a') "" `shouldBe` "Char"
{-
useful for
- debugging
- generic encoders
- any code that needs an identifier to be associated with some generic value
Use-case 2: Compare the types of two things
-}
e2 = it "typeOf 'a' == typeOf 'b'" $ typeOf 'a' == typeOf 'b' `shouldBe` True
e3 = it "typeOf 'a' == typeOf ()" $ typeOf 'a' == typeOf () `shouldBe` False
{-
Use-case 3: Reifying from generic to concrete
given a generic value, work with the value as the concrete type, not a polymorphic type.
e.g., printing function:
-}
-- Given an Char, return its string rep, else "unknown".
-- Uses 'cast' to convert from polymorphic to concrete value.
-- cast :: (Typeable a, Typeable b) => a -> Maybe b
char :: Typeable a => a -> String
char x = case cast x of
Just (x' :: Char) -> show x'
Nothing -> "unknown"
e4 = it "char 'a'" $ char 'a' `shouldBe` "'a'"
e5 = it "char 5" $ char (5::Int) `shouldBe` "unknown"
{-
------------------------------------------------------------------------------
Data.Data class
functions for traversing/folding over a type’s constructors and fields
Use-case 1: Get the data type (similar to Typeable.TypeRep) : dataTypeOf :: Data a => a -> DataType
-}
e6 = it "dataTypeOf" $ dataTypeRep (dataTypeOf 'a') `shouldBe` dataTypeRep (mkCharType "a")
-- dataTypeOf 'a' : DataType {tycon = "Prelude.Char", datarep = CharRep}
{-
Representations (so-called FooRep) tend to be references which can be reified into more concrete values.
Use-case 2: Inspecting a data type : get a list of constructors : dataTypeConstrs :: DataType -> [Constr]
-}
e7 = it "dataTypeConstrs" $ dataTypeConstrs (dataTypeOf (Nothing :: Maybe ())) `shouldBe`
[ mkConstr (dataTypeOf (Nothing:: Maybe ())) "Nothing" [ ] Prefix
, mkConstr (dataTypeOf (Just ())) "Just" ["a"] Prefix ]
-- constructor at a particular index
e8 = it "indexConstr" $ indexConstr (dataTypeOf (Nothing :: Maybe ())) 2 `shouldBe`
mkConstr (dataTypeOf (Just ())) "Just" ["a"] Prefix
-- is algebraic? (does it have constructors and not a built-in types like Int/Float/etc)?
e9 = it "isAlgType Just" $ isAlgType (dataTypeOf (Just 'a')) `shouldBe` True
e10 = it "isAlgType 'a'" $ isAlgType (dataTypeOf 'a') `shouldBe` False
{-
Use-case 3: Get the constructor of a value : toConstr :: a -> Constr
Which given any instance of Data will yield a constructor.
-}
e11 = it "toConstr Just" $ show (toConstr (Just 'a')) `shouldBe` "Just"
e12 = it "Constr == Constr" $ toConstr (Just 'a') == toConstr (Nothing :: Maybe Char) `shouldBe`
False
-- get the DataRep of a constructor:
e13 = it "constrType . toConstr" $ show (constrType (toConstr (Just 'a'))) `shouldBe`
"DataType {tycon = \"Maybe\", datarep = AlgRep [Nothing,Just]}"
-- Use-case 4: Get fields of a constructor : constrFields (for debugging/serialization)
data Y = Y { foo :: Int, bar :: Char } deriving (Data, Typeable)
e14 = it "constrFields" $ constrFields (toConstr (Y 0 'a')) `shouldBe` ["foo","bar"]
-- Use-case 5: Make a real value from its constructor : fromConstr :: Data a => Constr -> a
e15 = it "fromConstr" $ (fromConstr (toConstr (Nothing :: Maybe ())) :: Maybe ()) `shouldBe` Nothing
{-
for one arg constructors
fromConstrB :: forall a. Data a => (forall d. Data d => d) -> Constr -> a
Uses rank-N : fromConstrB determines type of d itself, by looking at Constr.
It’s not provided externally by the caller (as it would be if forall d. were at same level as a).
Think of it like scope:
let a = d in let d = … doesn’t make sense: the d is in a lower scope
so cannot write:
fromConstrB (5 :: Int) (toConstr (Just 1 :: Maybe Int)) :: Maybe Int
The Int cannot unify with the d because the quantification is one level lower.
It does not exist outside of the (forall d. Data d => d) (nor can it escape).
There is a type-class constraint to be generic via:
λ> :t fromConstr (toConstr (1 :: Int))
fromConstr (toConstr (1 :: Int)) :: Data a => a
-}
e16 = it "fromConstrB" $ (fromConstrB (fromConstr (toConstr (1 :: Int)))
(toConstr (Just 1 :: Maybe Int))
:: Maybe Int) `shouldBe`
Just 1
{-
If > 1 fields:
fromConstrM :: forall m a. (Monad m, Data a)
=> (forall d. Data d => m d) -> Constr -> m a
monadic, so use a state monad to keep an index
-}
data Foo = Foo Int Char deriving (Data, Eq, Typeable, Show)
e17 = it "fromConstrM" $
(evalState
(fromConstrM
(do i <- get
modify (+1)
return
(case i of
0 -> fromConstr (toConstr (5::Int))
1 -> fromConstr (toConstr 'b')
n -> error (show n)))
(toConstr (Foo 4 'a')))
0 :: Foo)
`shouldBe` Foo 5 'b'
{-
Use-case 6: mapping over data structures generically
gmapT :: forall a. Data a
=> (forall b. Data b => b -> b) -> a -> a
like fromConstr* : rank-n type b refers to each type in constructor of type a.
-}
e1819f d = case cast d of -- use cast to reify generic d into concrete Char.
Nothing -> d
Just x -> fromJust (cast (if isUpper x then '!' else x)) -- cast concrete Char back to generic d
e18 = it "gmapT 1" $ gmapT e1819f (Foo 4 'a') `shouldBe` Foo 4 'a'
e19 = it "gmapT 2" $ gmapT e1819f (Foo 4 'A') `shouldBe` Foo 4 '!'
{-
like fromConstrM above, to operate on exact indices of the constructor rather than by type,
use gmapM and a state monad
Use-case 7: generating from data structures generically
walk over values of a data structure, collecting the resul
can do via gmapM + state monad or a writer, or:
gmapQ :: forall a. Data a => (forall d. Data d => d -> u) -> a -> [u]
Trivial example:
-}
e20 = it "gmapQ" $ show (gmapQ toConstr (Foo 5 'a')) `shouldBe` "[5,'a']"
{-
examples:
in structured-haskell-mode
- walks over Haskell syntax tree
- collects source spans into list
in 'present package'
in Fay to encode types to JSON with a specific Fay-runtime-specific encoding.
Printer example
-}
gshows :: Data a => a -> ShowS
gshows = render `extQ` (shows :: String -> ShowS)
where
render t
| isTuple = showChar '('
. drop 1
. commaSlots
. showChar ')'
| isNull = showString "[]"
| isList = showChar '['
. drop 1
. listSlots
. showChar ']'
| otherwise = showChar '('
. constructor
. slots
. showChar ')'
where
constructor = showString . showConstr . toConstr $ t
slots = foldr (.) id . gmapQ ((showChar ' ' .) . gshows) $ t
commaSlots = foldr (.) id . gmapQ ((showChar ',' .) . gshows) $ t
listSlots = foldr (.) id . init . gmapQ ((showChar ',' .) . gshows) $ t
isTuple = all (==',') (filter (not . flip elem "()") (constructor ""))
isNull = not (any (not . flip elem "[]") (constructor ""))
isList = constructor "" == "(:)"
data Bar = Bar Char Int deriving (Data, Eq, Typeable) -- NO Show
e21 = it "gshows" $
gshows ([Just (2::Int)], 'c', Bar 'a' 5)
""
`shouldBe` "([(Just (2))],('c'),(Bar ('a') (5)))"
{-
gshows motivation: GHC API does not have Show instances many of its data types,
therefore hard to inspect in REPL.
Summary
- query
- cast
- walk or generate
See also: Data.Generics.Aliases
-}
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/generics/data-typeable/2014-04-chris-done-data-typeable/src/Lib.hs | unlicense | 8,575 | 0 | 22 | 2,211 | 1,618 | 851 | 767 | 89 | 3 |
module ProjectM36.Notifications where
import ProjectM36.Base
import ProjectM36.Error
import ProjectM36.RelationalExpression
import ProjectM36.StaticOptimizer
import qualified Data.Map as M
import Data.Either (isRight)
-- | Returns the notifications which should be triggered based on the transition from the first 'DatabaseContext' to the second 'DatabaseContext'.
notificationChanges :: Notifications -> TransactionGraph -> DatabaseContext -> DatabaseContext -> Notifications
notificationChanges nots graph context1 context2 = M.filter notificationFilter nots
where
notificationFilter (Notification chExpr _ _) = oldChangeEval /= newChangeEval && isRight oldChangeEval
where
oldChangeEval = evalChangeExpr chExpr (mkRelationalExprEnv context1 graph)
newChangeEval = evalChangeExpr chExpr (mkRelationalExprEnv context2 graph)
evalChangeExpr :: RelationalExpr -> RelationalExprEnv -> Either RelationalError Relation
evalChangeExpr chExpr env =
optimizeAndEvalRelationalExpr env chExpr
| agentm/project-m36 | src/lib/ProjectM36/Notifications.hs | unlicense | 1,072 | 0 | 11 | 192 | 189 | 100 | 89 | 15 | 1 |
{- Copyright 2014 David Farrell <[email protected]>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
{-# LANGUAGE LambdaCase #-}
module IRCD.Logic (doLogic) where
import Control.Monad.State
import Hoist
import IRCD.Types
import IRCD.Message
doLogic :: Client -> String -> StateT Env IO ()
doLogic client line = do
actions <- gets envHandlers >>= hoistState . mapM fh >>= return . concat
ts <- gets envTransformers
actions' <- mapM (ft ts ts) actions >>= return . concat
liftIO (print actions')
--mapM_ actionSpec as
--hoistState (mapM (ft ts ts) actions) >>= mapM_ actionSpec . concat
where msg = parseMessage line
fh (GenericHandler spec) = spec (ClientSrc client) msg
fh (CommandHandler cmd spec)
| cmd == command msg = spec (ClientSrc client) msg
| otherwise = return []
ft ts [] action = return [action]
ft ts (Transformer spec _ : xs) action = hoistState (spec action) >>= \case
(False, actions) -> mapM (ft ts ts) actions >>= return . concat
(True, actions) -> do
this <- ft ts xs action
mapM_ actionSpec this
rest <- mapM (ft ts ts) actions >>= return . concat
return (concat [this, rest])
doLogic :: Client -> String -> StateT Env IO ()
doLogic client line = do
actions <- gets envHandlers >>= hoistState . mapM fh >>= return . concat
ts <- gets envTransformers
map (transformAction ts) actions
where transformAction (Transformer spec _ : ts) action = hoistState (spec action)\
| shockkolate/lambdircd | src/IRCD/Logic.hs | apache-2.0 | 2,083 | 1 | 19 | 516 | 522 | 255 | 267 | -1 | -1 |
module Combinatorics where
import Control.Lens
triplets :: [a] -> [(a,a,a)]
triplets (a : b : c : xs) = [(a,b,c)] ++ [(x,y,z) | x <- [a,b,c], (y,z) <- pairs xs] ++ [(x,y,z) | (x,y) <- pairs [a,b,c], z <- xs] ++ triplets xs
triplets _ = []
-- return the list of all unordered pairs
pairs :: [a] -> [(a,a)]
pairs (a : b : xs) = [(a,b)] ++ [(x,y) | x <- [a,b], y <- xs] ++ pairs xs
pairs _ = []
boxed :: Num a => a -> (a,a) -> [(a,a)]
boxed sz (x,y) = fmap (\pt -> pt & both *~ sz & _1 +~ x & _2 +~ y) box
box :: Num a => [(a,a)]
box = cartesian [-1,1] [-1,1]
cartesian :: [a] -> [b] -> [(a,b)]
cartesian as bs = [(a,b) | a <- as, b <- bs]
extremes :: Ord t => [t] -> [t]
extremes xs = [minimum xs, maximum xs]
| epeld/zatacka | old/Combinatorics.hs | apache-2.0 | 715 | 0 | 13 | 167 | 527 | 298 | 229 | 16 | 1 |
module Main where
import System.IO ( hSetBuffering
, BufferMode( LineBuffering )
, stdout
)
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.STM.TChan
-----------------------------------------------------------------
-- An alternative "stateless" Ziria semantics
--
-- The main idea is to make a distinction, on the one hand, between
-- (a) the graph of a Ziria computation, which is just a static
-- description of the program's dataflow, together with some
-- control-flow constraints, and (b) the execution model, which is a
-- particular (dynamic) scheduling strategy for executing the nodes of
-- (a).
--
-- Below is a draft implementation of (a) in which all nodes in the
-- dataflow graph run concurrently. I think it's possible to view the
-- previous tick-process execution model as a particular dynamic
-- scheduling policy applied to this "soup-of-nodes". Note that,
-- while I use the term "dataflow graph", there is actually some
-- control flow hidden inside the implementation of `zbind', in the
-- form of a set of control-flow constraints: When we `zbind z1 f',
-- 'f' must block on the private control channel between `z1' and
-- itself before proceeding.
--
-- NOTE that the soup-of-nodes model has the same problems (wrt.
-- parallel pipe-in-bind, etc.) as the tick-process model. See the
-- comment near 'test_pipe_in_bind' below for more discussion. This
-- file might be a good place to explore possible solutions, but I
-- haven't yet done so.
--
-----------------------------------------------------------------
-- Used to register the threads that are forked as the dataflow graph
-- is unfolded
type Children = MVar [MVar ()]
newtype Zir a b v = Zir { unZir :: -- Special parameter:
Children -- A handle to a global list of forked threads
-- Main parameters:
-> TChan a -- Input channel
-> TChan b -- Output channel
-> TChan v -- Control channel
-- Producing an IO action
-> IO ()
}
ztake :: Zir a b a
ztake = Zir (\_ as _ ctl -> copy_one as ctl)
zemit :: b -> Zir a b ()
zemit b = Zir (\_ _ bs ctl -> atomically $ writeTChan bs b >> writeTChan ctl ())
{-
'zbind z1 (\v -> z2)' is modeled as the dataflow graph that looks
like:
|-------|
/----| z1 |----\
/ |_______| \
a ----/ | \-----> b
\ | v /
\ |---v---| /
\____| z2 |____/
|_______|
where 'a' and 'b' are streams of input/output values and 'v' is the
(private) control channel between 'z1' and 'z2'. 'z2' blocks until the
control channel 'v' is nonempty.
-}
zbind :: Zir a b v1 -> (v1 -> Zir a b v2) -> Zir a b v2
zbind z1 f = Zir go
where go children as bs ctl
= do { -- A private control channel between 'z1' and 'f'
z1_done <- newTChanIO
; unZir z1 children as bs z1_done
-- Fork a new thread to block until z1 sends a control value...
; forkChild children $ unZir (block_on z1_done f) children as bs ctl
-- No need to join here, we're just unfolding the graph...
; return ()
}
zpipe :: Zir a b v -> Zir b c v -> Zir a c v
zpipe z1 z2 = Zir go
where go children as cs ctl
= do { bs <- newTChanIO
; unZir z1 children as bs ctl
; unZir z2 children bs cs ctl
; return ()
}
zrepeat :: Zir a b v -> Zir a b ()
zrepeat z = zbind z (\_ -> zrepeat z)
zmap f = zrepeat $ zbind ztake (\v -> zemit (f v))
test_zir :: Show b => Zir a b v -> [a] -> IO ()
test_zir z as
= do { in_chan <- newTChanIO
; out_chan <- newTChanIO
; ctl_chan <- newTChanIO
; children <- newMVar []
; write_to in_chan as
; forkChild children $ forever $ print_one out_chan
; unZir z children in_chan out_chan ctl_chan
; waitForChildren children
}
where write_to ch [] = return ()
write_to ch (v : vs)
= do { atomically $ writeTChan ch v
; write_to ch vs
}
zinc = zmap (+ 1)
zsum = zrepeat $ zbind ztake (\v1 -> zbind ztake (\v2 -> zemit (v1 + v2)))
-- Tests may not terminate; zmap, etc., produces nodes that zrepeat.
test_bind = test_zir zinc [1..10]
test_pipe = test_zir ((zpipe zinc zinc) `zpipe` (zpipe zinc zinc)) [1..10]
test_zsum = test_zir zsum [1..10]
main =
do { hSetBuffering stdout LineBuffering
; test_zir ((zinc `zpipe` zinc) `zpipe` zsum `zpipe` (zinc `zpipe` zinc)) [1..10]
}
-- The following program may produce result '4 5' (though I haven't
-- yet observed it) due to a pipe-bind race condition (I think the
-- same one that makes an appearance in parpipe-in-bind in the
-- tick-process execution model):
-- When (zemit 0) sends () on the private control channel sitting
-- between (zemit 0 `zpipe` zemit 5) and (zemit 4), it does not
-- synchronize with (zemit 5), which therefore continues to execute,
-- possibly writing 5 to the output channel after (zemit 4) has
-- written 4. My feeling is that pipe-in-bind is a general problem,
-- even with the tick-process model, and probably orthogonal to the
-- particular choice of execution model.
--
-- It's possible to prevent the race condition from occurring by
-- introducing extra synchronizations (before (zemit 5) does anything
-- observable, e.g., writing 5 to the output channel, make it check
-- that (zemit 0) hasn't yet signalled). However, the extra
-- synchronizations probably incur a performance pernalty.
--
-- ALSO: none of what's done in this file deals with the related
-- mismatched-inputs problem, in which one component reads more from
-- the input stream than it should have, causing a later-bound
-- component from skipping some of its inputs.
test_pipe_in_bind
= test_zir (zbind (zemit 0 `zpipe` zemit 5) (\_ -> zemit 4)) [1..5]
-----------------------------------------------------------------
--
-- Auxiliary functions for programming concurrency
--
-----------------------------------------------------------------
copy_one :: TChan a -> TChan a -> IO ()
copy_one ch_in ch_out
= do { a <- atomically $ readTChan ch_in
; atomically $ writeTChan ch_out a
}
forever :: IO a -> IO a
forever a = a >> forever a
block_on :: TChan v1 -> (v1 -> Zir a b v2) -> Zir a b v2
block_on ch f = Zir go
where go children as bs ctl
= do { v <- atomically $ readTChan ch
; unZir (f v) children as bs ctl
}
print_one ch
= do { mv <- atomically $ tryReadTChan ch
; case mv of
Nothing -> return ()
Just v -> putStrLn (show v)
}
forkChild :: MVar [MVar ()] -> IO () -> IO ThreadId
forkChild children f
= do { mvar <- newEmptyMVar
; cs <- takeMVar children
; putMVar children (mvar : cs)
; forkFinally f (\_ -> putMVar mvar ())
}
waitForChildren :: MVar [MVar ()] -> IO ()
waitForChildren children
= do { cs <- takeMVar children
; case cs of
[] -> return ()
(c : cs') ->
do { putMVar children cs'
; takeMVar c
; waitForChildren children
}
}
| dimitriv/ziria-sem | Haskell/ZirPar.hs | apache-2.0 | 7,526 | 2 | 14 | 2,219 | 1,495 | 787 | 708 | 91 | 2 |
{-# LANGUAGE RecordWildCards #-}
module NLP.LTAG.Tree
(
-- * Tree
Tree (..)
, showTree
, showTree'
, toWord
-- * Path
, Path
, follow
-- * Substitution
, subst
-- * Adjoining
, AuxTree (..)
, adjoin
-- * Derivation
, Deriv
, Trans
, derive
-- * Traversal
, walk
) where
import Control.Applicative ((<$>))
import Control.Arrow (first)
import Control.Monad (foldM)
-- | A tree with values of type 'a' kept in the interior nodes,
-- and values of type 'b' kept in the leaf nodes.
data Tree a b
= INode -- ^ Interior node
{ labelI :: a
, subTrees :: [Tree a b] }
| FNode -- ^ Frontier node
{ labelF :: b }
deriving (Show, Eq, Ord)
-- | List of frontier values.
toWord :: Tree a b -> [b]
toWord t = case t of
INode{..} -> concatMap toWord subTrees
FNode{..} -> [labelF]
-- | Show a tree given the showing functions for label values.
showTree :: (a -> String) -> (b -> String) -> Tree a b -> String
showTree f g = unlines . go
where
go t = case t of
INode{..} -> ("INode " ++ f labelI)
: map (" " ++) (concatMap go subTrees)
FNode{..} -> ["FNode " ++ g labelF]
-- | Like `showTree`, but using the default `Show` instances
-- to present label values.
showTree' :: (Show a, Show b) => Tree a b -> String
showTree' = showTree show show
-- | Replace the tree on the given position.
replaceChild :: Tree a b -> Int -> Tree a b -> Tree a b
replaceChild t@INode{..} k t' = t { subTrees = replace subTrees k t' }
replaceChild _ _ _ = error "replaceChild: frontier node"
---------------------------------------------------------------------
-- Path
---------------------------------------------------------------------
-- | A path can be used to extract a particular tree node.
type Path = [Int]
-- | Follow the path to a particular tree node.
follow :: Path -> Tree a b -> Maybe (Tree a b)
follow = flip $ foldM step
-- | Follow one step of the `Path`.
step :: Tree a b -> Int -> Maybe (Tree a b)
step (FNode _) _ = Nothing
step (INode _ xs) k = xs !? k
---------------------------------------------------------------------
-- Substitution
---------------------------------------------------------------------
-- | Perform substitution on a tree.
subst
:: Path -- ^ Place of the substitution
-> Tree a b -- ^ Tree to be substituted
-> Tree a b -- ^ Original tree
-> Maybe (Tree a b) -- ^ Resulting tree
subst (k:ks) st t = do
replaceChild t k <$> (step t k >>= subst ks st)
subst [] st _ = Just st
---------------------------------------------------------------------
-- Adjoining
---------------------------------------------------------------------
-- | An auxiliary tree.
data AuxTree a b = AuxTree
{ auxTree :: Tree a b
, auxFoot :: Path }
deriving (Show, Eq, Ord)
-- | Perform adjoining operation on a tree.
adjoin
:: Path -- ^ Where to adjoin
-> AuxTree a b -- ^ Tree to be adjoined
-> Tree a b -- ^ Tree with the node to be modified
-> Maybe (Tree a b) -- ^ Resulting tree
adjoin (k:ks) aux t = do
replaceChild t k <$> (step t k >>= adjoin ks aux)
adjoin [] AuxTree{..} t = do
subst auxFoot t auxTree
---------------------------------------------------------------------
-- Derivation
---------------------------------------------------------------------
-- | A derived tree is constructed by applying a sequence of
-- transforming (substitution or adjoining) rules on particular
-- positions of a tree. The `Deriv` sequence represents a
-- derivation process. One could also construct a derivation
-- tree, which to some extent abstracts over the particular order
-- of derivations (when it doesn't matter).
type Deriv a b = [(Path, Trans a b)]
-- | Transformation of a tree.
type Trans a b = Either (Tree a b) (AuxTree a b)
-- | Derive a tree.
derive :: Deriv a b -> Tree a b -> Maybe (Tree a b)
derive =
flip $ foldM m
where
m t (pos, op) = case op of
Left x -> subst pos x t
Right x -> adjoin pos x t
---------------------------------------------------------------------
-- Traversal
---------------------------------------------------------------------
-- | Return all tree paths with corresponding subtrees.
walk :: Tree a b -> [(Path, Tree a b)]
walk =
map (first reverse) . go []
where
go acc n@INode{..} = (acc, n) : concat
[ go (k:acc) t
| (k, t) <- zip [0..] subTrees ]
go acc n@FNode{..} = [(acc, n)]
---------------------------------------------------------------------
-- Misc
---------------------------------------------------------------------
-- | Maybe a k-th element of a list.
(!?) :: [a] -> Int -> Maybe a
(x:xs) !? k
| k > 0 = xs !? (k-1)
| otherwise = Just x
[] !? _ = Nothing
-- | Replace the k-th element of a list. If the given position is outside
-- of the list domain, the returned list will be unchanged. It the given
-- index is negative, the first element will be replaced.
replace :: [a] -> Int -> a -> [a]
replace (x:xs) k y
| k > 0 = x : replace xs (k - 1) y
| otherwise = y : xs
replace [] _ _ = []
| kawu/ltag | src/NLP/LTAG/Tree.hs | bsd-2-clause | 5,210 | 0 | 13 | 1,248 | 1,382 | 749 | 633 | 93 | 2 |
{-# LANGUAGE FlexibleInstances, RankNTypes #-}
{-# OPTIONS_GHC -Wall #-}
{- |
Module : SystemFI
Description : Abstract syntax and pretty printer for SystemFI.
Copyright : (c) 2014—2015 The F2J Project Developers (given in AUTHORS.txt)
License : BSD3
Maintainer : Zhiyuan Shi <[email protected]>, Haoyuan Zhang <[email protected]>
Stability : experimental
Portability : portable
-}
module SystemFI
( Type(..)
, Expr(..)
, FExp(..)
, Constructor(..)
, Alt(..)
, DataBind(..)
--, TypeContext
--, ValueContext
--, Index
--, alphaEq
, mapTVar
--, mapVar
, fsubstTT
, fsubstTE
, fsubstEE
, joinType
, prettyType
, prettyExpr
) where
import qualified Src
import JavaUtils
import PrettyUtils
import Text.PrettyPrint.ANSI.Leijen
import qualified Language.Java.Pretty (prettyPrint)
import Data.List (intersperse)
import qualified Data.Map as Map
import qualified Data.Set as Set
data Type t
= TVar Src.ReaderId t -- a
| JClass ClassName -- C
| Fun (Type t) (Type t) -- t1 -> t2
| Forall Src.ReaderId (t -> Type t) -- forall a. t
| Product [Type t] -- (t1, ..., tn)
| Unit
| And (Type t) (Type t) -- t1 & t2
| RecordType (Src.Label, Type t)
| Datatype Src.ReaderId [Type t] [Src.ReaderId]
| ListOf (Type t)
-- Warning: If you ever add a case to this, you *must* also define the
-- binary relations on your new case. Namely, add cases for your data
-- constructor in `alphaEq' (below) and `coerce' (in Simplify.hs). Consult
-- George if you're not sure.
data Expr t e
= Var Src.ReaderId e
| Lit Src.Lit
-- Binders we have: λ, fix, letrec, and Λ
| Lam Src.ReaderId (Type t) (e -> Expr t e)
| Fix Src.ReaderId Src.ReaderId
(e -> e -> Expr t e)
(Type t) -- t1
(Type t) -- t
-- fix x (x1 : t1) : t. e Syntax in the tal-toplas paper
-- fix (x : t1 -> t). \x1. e Alternative syntax, which is arguably clear
-- <name>: Fix funcName paraName func paraType returnType
| Let Src.ReaderId (Expr t e) (e -> Expr t e)
| LetRec [Src.ReaderId] -- Names
[Type t] -- Signatures
([e] -> [Expr t e]) -- Bindings
([e] -> Expr t e) -- Body
| BLam Src.ReaderId (t -> Expr t e)
| App (Expr t e) (Expr t e)
| TApp (Expr t e) (Type t)
| If (Expr t e) (Expr t e) (Expr t e)
| PrimOp (Expr t e) Src.Operator (Expr t e)
-- SystemF extension from:
-- https://www.cs.princeton.edu/~dpw/papers/tal-toplas.pdf
-- (no int restriction)
| Tuple [Expr t e] -- Tuple introduction
| Proj Int (Expr t e) -- Tuple elimination
-- Java
| JNew ClassName [Expr t e]
| JMethod (Src.JCallee (Expr t e)) MethodName [Expr t e] ClassName
| JField (Src.JCallee (Expr t e)) FieldName ClassName
| PolyList [Expr t e] (Type t)
| JProxyCall (Expr t e) (Type t)
| Seq [Expr t e]
| Merge (Expr t e) (Expr t e) -- e1 ,, e2
| RecordCon (Src.Label, Expr t e)
| RecordProj (Expr t e) Src.Label
| RecordUpdate (Expr t e) (Src.Label, Expr t e)
| Data Src.RecFlag [DataBind t] (Expr t e)
| Constr (Constructor t) [Expr t e]
| Case (Expr t e) [Alt t e]
| Premise (Expr t e) (Expr t e)
newtype FExp = HideF { revealF :: forall t e. Expr t e }
data Alt t e = ConstrAlt (Constructor t) [Src.ReaderId] ([e] -> Expr t e)
-- | Default (Expr t e)
data DataBind t = DataBind Src.ReaderId [Src.ReaderId] ([t] -> [Constructor t])
data Constructor t = Constructor {constrName :: Src.ReaderId, constrParams :: [Type t]}
-- newtype Typ = HideTyp { revealTyp :: forall t. Type t } -- type of closed types
-- newtype Exp = HideExp { revealExp :: forall t e. Expr t e }
type TypeContext t = Set.Set t
type ValueContext t e = Map.Map e (Type t)
type Index = Int
alphaEq :: Int -> Type Index -> Type Index -> Bool
alphaEq _ (TVar _ a) (TVar _ b) = a == b
alphaEq _ (JClass c) (JClass d) = c == d
alphaEq i (Fun s1 s2) (Fun t1 t2) = alphaEq i s1 t1 && alphaEq i s2 t2
alphaEq i (Forall _ f) (Forall _ g) = alphaEq (succ i) (f i) (g i)
alphaEq i (Product ss) (Product ts) = length ss == length ts && uncurry (alphaEq i) `all` zip ss ts
alphaEq _ Unit Unit = True
alphaEq i (And s1 s2) (And t1 t2) = alphaEq i s1 t1 && alphaEq i s2 t2
alphaEq i (ListOf t1) (ListOf t2) = alphaEq i t1 t2
alphaEq _ _ _ = False
mapTVar :: (Src.ReaderId -> t -> Type t) -> Type t -> Type t
mapTVar g (TVar n a) = g n a
mapTVar _ (JClass c) = JClass c
mapTVar g (Fun t1 t2) = Fun (mapTVar g t1) (mapTVar g t2)
mapTVar g (Forall n f) = Forall n (mapTVar g . f)
mapTVar g (Product ts) = Product (map (mapTVar g) ts)
mapTVar _ Unit = Unit
mapTVar g (ListOf t) = ListOf (mapTVar g t)
mapTVar g (And t1 t2) = And (mapTVar g t1) (mapTVar g t2)
mapTVar g (RecordType (l,t)) = RecordType (l, mapTVar g t)
mapTVar g (Datatype n ts ns) = Datatype n (map (mapTVar g) ts) ns
mapVar :: (Src.ReaderId -> e -> Expr t e) -> (Type t -> Type t) -> Expr t e -> Expr t e
mapVar g _ (Var n a) = g n a
mapVar _ _ (Lit n) = Lit n
mapVar g h (Lam n t f) = Lam n (h t) (mapVar g h . f)
mapVar g h (BLam n f) = BLam n (mapVar g h . f)
mapVar g h (Fix n1 n2 f t1 t) = Fix n1 n2 (\x x1 -> mapVar g h (f x x1)) (h t1) (h t)
mapVar g h (Let n b e) = Let n (mapVar g h b) (mapVar g h . e)
mapVar g h (LetRec ns ts bs e) = LetRec ns (map h ts) (map (mapVar g h) . bs) (mapVar g h . e)
mapVar g h (Data rec databinds e) = Data rec (map mapDatabind databinds) (mapVar g h e)
where mapDatabind (DataBind name params ctrs) = DataBind name params (map mapCtr . ctrs)
mapCtr (Constructor n ts) = Constructor n (map h ts)
mapVar g h (Constr (Constructor n ts) es) = Constr c' (map (mapVar g h) es)
where c' = Constructor n (map h ts)
mapVar g h (Case e alts) = Case (mapVar g h e) (map mapAlt alts)
where mapAlt (ConstrAlt (Constructor n ts) ns f) = ConstrAlt (Constructor n (map h ts)) ns ((mapVar g h) . f)
mapVar g h (App f e) = App (mapVar g h f) (mapVar g h e)
mapVar g h (TApp f t) = TApp (mapVar g h f) (h t)
mapVar g h (If p b1 b2) = If (mapVar g h p) (mapVar g h b1) (mapVar g h b2)
mapVar g h (PrimOp e1 op e2) = PrimOp (mapVar g h e1) op (mapVar g h e2)
mapVar g h (Tuple es) = Tuple (map (mapVar g h) es)
mapVar g h (Proj i e) = Proj i (mapVar g h e)
mapVar g h (JNew c args) = JNew c (map (mapVar g h) args)
mapVar g h (JMethod callee m args c) = JMethod (fmap (mapVar g h) callee) m (map (mapVar g h) args) c
mapVar g h (JField callee f c) = JField (fmap (mapVar g h) callee) f c
mapVar g h (PolyList es t) = PolyList (map (mapVar g h) es) (h t)
mapVar g h (JProxyCall jmethod t) = JProxyCall (mapVar g h jmethod) (h t)
mapVar g h (Seq es) = Seq (map (mapVar g h) es)
mapVar g h (Merge e1 e2) = Merge (mapVar g h e1) (mapVar g h e2)
mapVar g h (RecordCon (l, e)) = RecordCon (l, mapVar g h e)
mapVar g h (RecordProj e l) = RecordProj (mapVar g h e) l
mapVar g h (RecordUpdate e (l1,e1)) = RecordUpdate (mapVar g h e) (l1, mapVar g h e1)
fsubstTT :: Eq a => a -> Type a -> Type a -> Type a
fsubstTT x r = mapTVar (\n a -> if a == x then r else TVar n a)
fsubstTE :: Eq t => t -> Type t -> Expr t e -> Expr t e
fsubstTE x r = mapVar Var (fsubstTT x r)
fsubstEE :: Eq a => a -> Expr t a -> Expr t a -> Expr t a
fsubstEE x r = mapVar (\n a -> if a == x then r else Var n a) id
joinType :: Type (Type t) -> Type t
joinType (TVar n a) = a
joinType (JClass c) = JClass c
joinType (Fun t1 t2) = Fun (joinType t1) (joinType t2)
joinType (Forall n g) = Forall n (joinType . g . TVar "_") -- Right?
joinType (Product ts) = Product (map joinType ts)
joinType Unit = Unit
joinType (And t1 t2) = And (joinType t1) (joinType t2)
joinType (RecordType (l,t)) = RecordType (l, joinType t)
joinType (Datatype n ts ns) = Datatype n (map joinType ts) ns
joinType (ListOf t) = ListOf (joinType t)
-- instance Show (Type Index) where
-- show = show . pretty
-- instance Pretty (Type Index) where
-- pretty = prettyType
prettyType :: Type Index -> Doc
prettyType = prettyType' basePrec 0
prettyType' :: Prec -> Index -> Type Index -> Doc
prettyType' _ _ (TVar n a) = text n
prettyType' p i (Datatype n ts _) = hsep $ text n : map (prettyType' p i) ts
prettyType' p i (Fun t1 t2) =
parensIf p 2
(prettyType' (2,PrecPlus) i t1 <+> arrow <+> prettyType' (2,PrecMinus) i t2)
prettyType' p i (Forall n f) =
parensIf p 1
(forall <+> text n <> dot <+>
prettyType' (1,PrecMinus) (succ i) (f i))
prettyType' _ i (Product ts) = parens $ hcat (intersperse comma (map (prettyType' basePrec i) ts))
prettyType' _ _ Unit = text "Unit"
prettyType' _ _ (JClass "java.lang.Integer") = text "Int"
prettyType' _ _ (JClass "java.lang.String") = text "String"
prettyType' _ _ (JClass "java.lang.Boolean") = text "Bool"
prettyType' _ _ (JClass "java.lang.Character") = text "Char"
prettyType' _ _ (JClass c) = text c
prettyType' p i (ListOf t) = text "List" <+> prettyType' p i t
prettyType' p i (And t1 t2) =
parensIf p 2
(prettyType' (2,PrecMinus) i t1 <+>
ampersand <+>
prettyType' (2,PrecPlus) i t2)
prettyType' _ i (RecordType (l,t)) = lbrace <+> text l <+> colon <+> prettyType' basePrec i t <+> rbrace
-- instance Show (Expr Index Index) where
-- show = show . pretty
-- instance Pretty (Expr Index Index) where
-- pretty = prettyExpr
prettyExpr :: Expr Index Index -> Doc
prettyExpr = prettyExpr' basePrec (0, 0)
prettyExpr' :: Prec -> (Index, Index) -> Expr Index Index -> Doc
prettyExpr' _ _ (Var n _) = text n
prettyExpr' p (i,j) (Lam n t f)
= parensIf p 2 $ group $ hang 2 $
lambda <+> parens (text n <+> colon <+> prettyType' basePrec i t) <> dot <$>
prettyExpr' (2,PrecMinus) (i, j + 1) (f j)
prettyExpr' p (i,j) (App e1 e2)
= parensIf p 4 $
group $ hang 2 $ prettyExpr' (4,PrecMinus) (i,j) e1 <$> prettyExpr' (4,PrecPlus) (i,j) e2
prettyExpr' p (i,j) (BLam n f) =
parensIf p 2
(biglambda <+> text n <> dot <+>
prettyExpr' (2,PrecMinus) (succ i, j) (f i))
prettyExpr' p (i,j) (TApp e t) =
parensIf p 4
(group $ hang 2 $ prettyExpr' (4,PrecMinus) (i,j) e <$> prettyType' (4,PrecPlus) i t)
prettyExpr' _ _ (Lit (Src.Int n)) = integer n
prettyExpr' _ _ (Lit (Src.String s)) = dquotes (string s)
prettyExpr' _ _ (Lit (Src.Bool b)) = bool b
prettyExpr' _ _ (Lit (Src.Char c)) = char c
prettyExpr' _ _ (Lit Src.UnitLit) = unit
prettyExpr' p (i,j) (If e1 e2 e3)
= parensIf p prec
(hang 3 (text "if" <+> prettyExpr' (prec,PrecMinus) (i,j) e1 <+>
text "then" <+> prettyExpr' (prec,PrecMinus) (i,j) e2 <+>
text "else" <+> prettyExpr' (prec,PrecMinus) (i,j) e3))
where prec = 3
prettyExpr' p (i,j) (PrimOp e1 op e2)
= parens (prettyExpr' p (i,j) e1 <+> pretty_op <+> prettyExpr' p (i,j) e2)
where
pretty_op = text (Language.Java.Pretty.prettyPrint java_op)
java_op = case op of
Src.Arith op' -> op'
Src.Compare op' -> op'
Src.Logic op' -> op'
prettyExpr' _ (i,j) (Tuple es) = tupled (map (prettyExpr' basePrec (i,j)) es)
prettyExpr' p i (Proj n e) =
parensIf p 5
(prettyExpr' (5,PrecMinus) i e <> dot <> char '_' <> int n)
prettyExpr' _ (i,j) (JNew c args) =
parens (text "new" <+> text c <> tupled (map (prettyExpr' basePrec (i,j)) args))
prettyExpr' _ i (JMethod name m args _) = methodStr name <> dot <> text m <> tupled (map (prettyExpr' basePrec i) args)
where
methodStr (Src.Static x) = text x
methodStr (Src.NonStatic x) = prettyExpr' (6,PrecMinus) i x
prettyExpr' _ i (JField name f _) = fieldStr name <> dot <> text f
where
fieldStr (Src.Static x) = text x
fieldStr (Src.NonStatic x) = prettyExpr' (6,PrecMinus) i x
prettyExpr' p (i,j) (Seq es) = semiBraces (map (prettyExpr' p (i,j)) es)
prettyExpr' p i (PolyList es t) = brackets. hcat . intersperse comma . map (prettyExpr' p i ) $ es
prettyExpr' p (i,j) (Data recflag databinds e) =
text "data" <+> (pretty recflag) <+> (align .vsep) (map prettyDatabind databinds) <$> prettyExpr' p (i,j) e
where prettyCtr i' (Constructor ctrName ctrParams) = (text ctrName) <+> (hsep. map (prettyType' p i') $ ctrParams)
prettyDatabind (DataBind n tvars cons) = hsep (map text $ n:tvars) <+> align
(equals <+> intersperseBar (map (prettyCtr (length tvars + i)) $ cons [i..length tvars +i-1]) <$$> semi)
prettyExpr' p i (JProxyCall jmethod t) = prettyExpr' p i jmethod
prettyExpr' p (i,j) (Fix n1 n2 f t1 t)
= parens $ group $ hang 2 $
text "fix" <+> text n1 <+>
parens (text n2 <+> colon <+> prettyType' p i t1) <+>
colon <+> prettyType' p i t <> dot <$>
prettyExpr' p (i, j + 2) (f j (j + 1))
prettyExpr' p (i,j) (Let n b e) =
parensIf p 2 (text "let" <+> text n <+> equals <+> prettyExpr' basePrec (i, j + 1) b <$> text "in" <$>
prettyExpr' basePrec (i, j + 1) (e j))
prettyExpr' p (i,j) (LetRec names sigs binds body)
= text "let" <+> text "rec" <$>
vcat (intersperse (text "and") (map (indent 2) pretty_binds)) <$>
text "in" <$>
pretty_body
where
n = length sigs
ids = [i..(i+n-1)]
pretty_ids = map text names
pretty_sigs = map (prettyType' p i) sigs
pretty_defs = map (prettyExpr' p (i, j + n)) (binds ids)
pretty_binds = zipWith3 (\pretty_id pretty_sig pretty_def ->
pretty_id <+> colon <+> pretty_sig <$> indent 2 (equals <+> pretty_def))
pretty_ids pretty_sigs pretty_defs
pretty_body = prettyExpr' p (i, j + n) (body ids)
prettyExpr' p (i,j) (Merge e1 e2) =
parens $ prettyExpr' p (i,j) e1 <+> dcomma <+> prettyExpr' p (i,j) e2
prettyExpr' _ (i,j) (RecordCon (l, e)) = lbrace <+> text l <+> equals <+> prettyExpr' basePrec (i,j) e <+> rbrace
prettyExpr' p (i,j) (RecordProj e l) = prettyExpr' p (i,j) e <> dot <> text l
prettyExpr' p (i,j) (RecordUpdate e (l, e1)) = prettyExpr' p (i,j) e <+> text "with" <+> prettyExpr' p (i,j) (RecordCon (l, e1))
prettyExpr' p (i,j) (Constr c es) = parens $ hsep $ text (constrName c) : map (prettyExpr' p (i,j)) es
prettyExpr' p (i,j) (Case e alts) =
hang 2 $ text "case" <+> prettyExpr' p (i,j) e <+> text "of" <$> text " " <+> Src.intersperseBar (map pretty_alt alts)
where pretty_alt (ConstrAlt c ns es) =
let n = length ns
ids = [j..j+n-1]
in hsep (text (constrName c) : map prettyVar ids) <+> arrow <+> prettyExpr' p (i, j+n) (es ids)
| wxzh/fcore | lib/SystemFI.hs | bsd-2-clause | 15,358 | 1 | 20 | 4,464 | 6,852 | 3,505 | 3,347 | 262 | 5 |
{-# LANGUAGE QuasiQuotes #-}
module Test0 () where
import LiquidHaskell
[lq| type Nat = { v:Int | 0 <= v } |]
[lq| ok :: Nat -> Nat |]
ok x = hi
where
hi :: [lq| Nat |]
hi = x
| spinda/liquidhaskell | benchmarks/gsoc15/pos/test2.hs | bsd-3-clause | 191 | 0 | 6 | 57 | 49 | 33 | 16 | 8 | 1 |
-- | Metamodel bridge to Snap permissions wrt roles from
-- 'Snap.Snaplet.Auth'.
module Snap.Snaplet.Redson.Permissions
( SuperUser(..)
-- * Commit checking
, checkWrite
, filterUnreadable
-- * Whole-model functions
, getModelPermissions
, stripModel
)
where
import Data.Lens.Common
import Data.List
import qualified Data.Map as M
import Snap.Core (Method(..))
import Snap.Snaplet.Auth
import Snap.Snaplet.Redson.Snapless.Metamodel
-- | User who has all permissions (used in security-disabled mode).
data SuperUser = SuperUser
-- | Either superuser or logged in user.
type User = Either SuperUser AuthUser
-- | Map between CRUD methods and form permission lenses.
methodMap :: [(Method, Lens Model Permissions)]
methodMap = [ (POST, canCreateM)
, (GET, canReadM)
, (PUT, canUpdateM)
, (DELETE, canDeleteM)
]
-- | Check if provided roles meet the permission requirements.
--
-- Always succeed in case Everyone is required, always fail in case
-- Nobody is required, otherwise succeeds when intersection is non-nil
--
-- We assume that Role in Snap is defined as a newtype for ByteString
-- (which is what Metamodel uses for its roles).
intersectPermissions :: Permissions -- ^ Required permissions
-> [Role] -- ^ Provided roles
-> Bool
intersectPermissions required provided =
case required of
Everyone -> True
Nobody -> False
Roles rls -> not $ null $ intersect rls $
map (\(Role r) -> r) provided
-- | Get lists of metamodel fields which are readable and writable by
-- given user.
--
-- 'SuperUser' can read and write all fields.
--
-- TODO: Cache this.
getFieldPermissions :: User -> Model -> ([FieldName], [FieldName])
getFieldPermissions (Left SuperUser) model =
let
f = map name $ fields model
in
(f, f)
getFieldPermissions (Right user) model =
let
-- Get names of metamodel fields for which the given function
-- has non-null intersection with user roles
getFields getRoles =
map name $
filter (\field -> intersectPermissions
(getRoles field)
(userRoles user))
(fields model)
in
(union (getFields _canRead) (getFields _canWrite), getFields _canWrite)
-- | Get list of CRUD/HTTP methods accessible by user for model.
--
-- 'SuperUser' has all methods.
--
-- POST permission implies PUT.
--
-- TODO: Cache this.
getModelPermissions :: User -> Model -> [Method]
getModelPermissions (Left SuperUser) _ = [POST, GET, PUT, DELETE]
getModelPermissions (Right user) model =
let
askPermission perm = intersectPermissions
(model ^. perm)
(userRoles user)
rawPerms = map fst $
filter (\(_, p) -> askPermission p) methodMap
in
if (elem POST rawPerms)
then rawPerms ++ [PUT]
else rawPerms
-- | Check permissions to write the given set of model fields.
--
-- 'SuperUser' can always write to any set of fields. When there's no
-- model, always succeed.
checkWrite :: User -> (Maybe Model) -> Commit -> Bool
checkWrite (Left SuperUser) _ _ = True
checkWrite _ Nothing _ = True
checkWrite user@(Right _) (Just model) commit =
let
writables = snd $ getFieldPermissions user model
commitFields = M.keys commit
in
all (flip elem writables) commitFields
-- | Filter out commit fields which are not readable by user.
--
-- 'SuperUser' can always read all fields.
filterUnreadable :: User -> Maybe Model -> Commit -> Commit
filterUnreadable (Left SuperUser) _ commit = commit
filterUnreadable _ Nothing commit = commit
filterUnreadable user@(Right _) (Just model) commit =
let
readables = fst $ getFieldPermissions user model
in
M.filterWithKey (\k _ -> elem k readables) commit
-- | Filter out unreadable fields from model description, set
-- per-field "canEdit" to boolean depending on current user's
-- permissions, set whole-form C-R-U-D permissions to booleans in
-- similar fashion.
stripModel :: User -> Model -> Model
stripModel user model =
let
-- To set permission value to boolean depending on user roles
stripMapper :: Bool -> Permissions
stripMapper b = if b then Everyone else Nobody
(readables, writables) = getFieldPermissions user model
-- Only fields readable by current user
readableFields = filter
(\f -> elem (name f) readables)
(fields model)
-- Fields with boolean canWrite's
strippedFields = map (\f -> f{_canWrite = stripMapper $
elem (name f) writables})
readableFields
formPerms = getModelPermissions user model
-- List of lens setters to be applied to model
boolFormPerms = map (\(m, p) ->
p ^= (stripMapper $ elem m formPerms))
methodMap
in
foldl' (\m f -> f m) model{fields = strippedFields} boolFormPerms
| dzhus/snaplet-redson | src/Snap/Snaplet/Redson/Permissions.hs | bsd-3-clause | 5,310 | 0 | 17 | 1,582 | 1,068 | 591 | 477 | 86 | 3 |
{-# LANGUAGE CPP, ViewPatterns #-}
module TcFlatten(
FlattenMode(..),
flatten, flattenManyNom,
unflatten,
) where
#include "HsVersions.h"
import TcRnTypes
import TcType
import Type
import TcEvidence
import TyCon
import TyCoRep -- performs delicate algorithm on types
import Coercion
import Var
import VarEnv
import NameEnv
import Outputable
import TcSMonad as TcS
import DynFlags( DynFlags )
import Util
import Bag
import Pair
import Control.Monad
import MonadUtils ( zipWithAndUnzipM )
import GHC.Exts ( inline )
import Control.Arrow ( first )
{-
Note [The flattening story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* A CFunEqCan is either of form
[G] <F xis> : F xis ~ fsk -- fsk is a FlatSkol
[W] x : F xis ~ fmv -- fmv is a unification variable,
-- but untouchable,
-- with MetaInfo = FlatMetaTv
where
x is the witness variable
fsk/fmv is a flatten skolem
xis are function-free
CFunEqCans are always [Wanted], or [Given], never [Derived]
fmv untouchable just means that in a CTyVarEq, say,
fmv ~ Int
we do NOT unify fmv.
* KEY INSIGHTS:
- A given flatten-skolem, fsk, is known a-priori to be equal to
F xis (the LHS), with <F xis> evidence
- A unification flatten-skolem, fmv, stands for the as-yet-unknown
type to which (F xis) will eventually reduce
* Inert set invariant: if F xis1 ~ fsk1, F xis2 ~ fsk2
then xis1 /= xis2
i.e. at most one CFunEqCan with a particular LHS
* Each canonical CFunEqCan x : F xis ~ fsk/fmv has its own
distinct evidence variable x and flatten-skolem fsk/fmv.
Why? We make a fresh fsk/fmv when the constraint is born;
and we never rewrite the RHS of a CFunEqCan.
* Function applications can occur in the RHS of a CTyEqCan. No reason
not allow this, and it reduces the amount of flattening that must occur.
* Flattening a type (F xis):
- If we are flattening in a Wanted/Derived constraint
then create new [W] x : F xis ~ fmv
else create new [G] x : F xis ~ fsk
with fresh evidence variable x and flatten-skolem fsk/fmv
- Add it to the work list
- Replace (F xis) with fsk/fmv in the type you are flattening
- You can also add the CFunEqCan to the "flat cache", which
simply keeps track of all the function applications you
have flattened.
- If (F xis) is in the cache already, just
use its fsk/fmv and evidence x, and emit nothing.
- No need to substitute in the flat-cache. It's not the end
of the world if we start with, say (F alpha ~ fmv1) and
(F Int ~ fmv2) and then find alpha := Int. Athat will
simply give rise to fmv1 := fmv2 via [Interacting rule] below
* Canonicalising a CFunEqCan [G/W] x : F xis ~ fsk/fmv
- Flatten xis (to substitute any tyvars; there are already no functions)
cos :: xis ~ flat_xis
- New wanted x2 :: F flat_xis ~ fsk/fmv
- Add new wanted to flat cache
- Discharge x = F cos ; x2
* Unification flatten-skolems, fmv, ONLY get unified when either
a) The CFunEqCan takes a step, using an axiom
b) During un-flattening
They are never unified in any other form of equality.
For example [W] ffmv ~ Int is stuck; it does not unify with fmv.
* We *never* substitute in the RHS (i.e. the fsk/fmv) of a CFunEqCan.
That would destroy the invariant about the shape of a CFunEqCan,
and it would risk wanted/wanted interactions. The only way we
learn information about fsk is when the CFunEqCan takes a step.
However we *do* substitute in the LHS of a CFunEqCan (else it
would never get to fire!)
* [Interacting rule]
(inert) [W] x1 : F tys ~ fmv1
(work item) [W] x2 : F tys ~ fmv2
Just solve one from the other:
x2 := x1
fmv2 := fmv1
This just unites the two fsks into one.
Always solve given from wanted if poss.
* For top-level reductions, see Note [Top-level reductions for type functions]
in TcInteract
Why given-fsks, alone, doesn't work
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Could we get away with only flatten meta-tyvars, with no flatten-skolems? No.
[W] w : alpha ~ [F alpha Int]
---> flatten
w = ...w'...
[W] w' : alpha ~ [fsk]
[G] <F alpha Int> : F alpha Int ~ fsk
--> unify (no occurs check)
alpha := [fsk]
But since fsk = F alpha Int, this is really an occurs check error. If
that is all we know about alpha, we will succeed in constraint
solving, producing a program with an infinite type.
Even if we did finally get (g : fsk ~ Boo)l by solving (F alpha Int ~ fsk)
using axiom, zonking would not see it, so (x::alpha) sitting in the
tree will get zonked to an infinite type. (Zonking always only does
refl stuff.)
Why flatten-meta-vars, alone doesn't work
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Look at Simple13, with unification-fmvs only
[G] g : a ~ [F a]
---> Flatten given
g' = g;[x]
[G] g' : a ~ [fmv]
[W] x : F a ~ fmv
--> subst a in x
x = F g' ; x2
[W] x2 : F [fmv] ~ fmv
And now we have an evidence cycle between g' and x!
If we used a given instead (ie current story)
[G] g : a ~ [F a]
---> Flatten given
g' = g;[x]
[G] g' : a ~ [fsk]
[G] <F a> : F a ~ fsk
---> Substitute for a
[G] g' : a ~ [fsk]
[G] F (sym g'); <F a> : F [fsk] ~ fsk
Why is it right to treat fmv's differently to ordinary unification vars?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
f :: forall a. a -> a -> Bool
g :: F Int -> F Int -> Bool
Consider
f (x:Int) (y:Bool)
This gives alpha~Int, alpha~Bool. There is an inconsistency,
but really only one error. SherLoc may tell you which location
is most likely, based on other occurrences of alpha.
Consider
g (x:Int) (y:Bool)
Here we get (F Int ~ Int, F Int ~ Bool), which flattens to
(fmv ~ Int, fmv ~ Bool)
But there are really TWO separate errors.
** We must not complain about Int~Bool. **
Moreover these two errors could arise in entirely unrelated parts of
the code. (In the alpha case, there must be *some* connection (eg
v:alpha in common envt).)
Note [Orientation of equalities with fmvs] and
Note [Unflattening can force the solver to iterate]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here is a bad dilemma concerning flatten meta-vars (fmvs).
This example comes from IndTypesPerfMerge, T10226, T10009.
From the ambiguity check for
f :: (F a ~ a) => a
we get:
[G] F a ~ a
[W] F alpha ~ alpha, alpha ~ a
From Givens we get
[G] F a ~ fsk, fsk ~ a
Now if we flatten we get
[W] alpha ~ fmv, F alpha ~ fmv, alpha ~ a
Now, processing the first one first, choosing alpha := fmv
[W] F fmv ~ fmv, fmv ~ a
And now we are stuck. We must either *unify* fmv := a, or
use the fmv ~ a to rewrite F fmv ~ fmv, so we can make it
meet up with the given F a ~ blah.
Old solution: always put fmvs on the left, so we get
[W] fmv ~ alpha, F alpha ~ fmv, alpha ~ a
BUT this works badly for Trac #10340:
get :: MonadState s m => m s
instance MonadState s (State s) where ...
foo :: State Any Any
foo = get
For 'foo' we instantiate 'get' at types mm ss
[W] MonadState ss mm, [W] mm ss ~ State Any Any
Flatten, and decompose
[W] MonadState ss mm, [W] Any ~ fmv, [W] mm ~ State fmv, [W] fmv ~ ss
Unify mm := State fmv:
[W] MonadState ss (State fmv), [W] Any ~ fmv, [W] fmv ~ ss
If we orient with (untouchable) fmv on the left we are now stuck:
alas, the instance does not match!! But if instead we orient with
(touchable) ss on the left, we unify ss:=fmv, to get
[W] MonadState fmv (State fmv), [W] Any ~ fmv
Now we can solve.
This is a real dilemma. CURRENT SOLUTION:
* Orient with touchable variables on the left. This is the
simple, uniform thing to do. So we would orient ss ~ fmv,
not the other way round.
* In the 'f' example, we get stuck with
F fmv ~ fmv, fmv ~ a
But during unflattening we will fail to dischargeFmv for the
CFunEqCan F fmv ~ fmv, because fmv := F fmv would make an ininite
type. Instead we unify fmv:=a, AND record that we have done so.
If any such "non-CFunEqCan unifications" take place (in
unflatten_eq in TcFlatten.unflatten) iterate the entire process.
This is done by the 'go' loop in solveSimpleWanteds.
This story does not feel right but it's the best I can do; and the
iteration only happens in pretty obscure circumstances.
************************************************************************
* *
* Other notes (Oct 14)
I have not revisted these, but I didn't want to discard them
* *
************************************************************************
Try: rewrite wanted with wanted only for fmvs (not all meta-tyvars)
But: fmv ~ alpha[0]
alpha[0] ~ fmv’
Now we don’t see that fmv ~ fmv’, which is a problem for injectivity detection.
Conclusion: rewrite wanteds with wanted for all untouchables.
skol ~ untch, must re-orieint to untch ~ skol, so that we can use it to rewrite.
************************************************************************
* *
* Examples
Here is a long series of examples I had to work through
* *
************************************************************************
Simple20
~~~~~~~~
axiom F [a] = [F a]
[G] F [a] ~ a
-->
[G] fsk ~ a
[G] [F a] ~ fsk (nc)
-->
[G] F a ~ fsk2
[G] fsk ~ [fsk2]
[G] fsk ~ a
-->
[G] F a ~ fsk2
[G] a ~ [fsk2]
[G] fsk ~ a
-----------------------------------
----------------------------------------
indexed-types/should_compile/T44984
[W] H (F Bool) ~ H alpha
[W] alpha ~ F Bool
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2
fmv1 ~ fmv2
fmv0 ~ alpha
flatten
~~~~~~~
fmv0 := F Bool
fmv1 := H (F Bool)
fmv2 := H alpha
alpha := F Bool
plus
fmv1 ~ fmv2
But these two are equal under the above assumptions.
Solve by Refl.
--- under plan B, namely solve fmv1:=fmv2 eagerly ---
[W] H (F Bool) ~ H alpha
[W] alpha ~ F Bool
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2
fmv1 ~ fmv2
fmv0 ~ alpha
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2 fmv2 := fmv1
fmv0 ~ alpha
flatten
fmv0 := F Bool
fmv1 := H fmv0 = H (F Bool)
retain H alpha ~ fmv2
because fmv2 has been filled
alpha := F Bool
----------------------------
indexed-types/should_failt/T4179
after solving
[W] fmv_1 ~ fmv_2
[W] A3 (FCon x) ~ fmv_1 (CFunEqCan)
[W] A3 (x (aoa -> fmv_2)) ~ fmv_2 (CFunEqCan)
----------------------------------------
indexed-types/should_fail/T7729a
a) [W] BasePrimMonad (Rand m) ~ m1
b) [W] tt m1 ~ BasePrimMonad (Rand m)
---> process (b) first
BasePrimMonad (Ramd m) ~ fmv_atH
fmv_atH ~ tt m1
---> now process (a)
m1 ~ s_atH ~ tt m1 -- An obscure occurs check
----------------------------------------
typecheck/TcTypeNatSimple
Original constraint
[W] x + y ~ x + alpha (non-canonical)
==>
[W] x + y ~ fmv1 (CFunEqCan)
[W] x + alpha ~ fmv2 (CFuneqCan)
[W] fmv1 ~ fmv2 (CTyEqCan)
(sigh)
----------------------------------------
indexed-types/should_fail/GADTwrong1
[G] Const a ~ ()
==> flatten
[G] fsk ~ ()
work item: Const a ~ fsk
==> fire top rule
[G] fsk ~ ()
work item fsk ~ ()
Surely the work item should rewrite to () ~ ()? Well, maybe not;
it'a very special case. More generally, our givens look like
F a ~ Int, where (F a) is not reducible.
----------------------------------------
indexed_types/should_fail/T8227:
Why using a different can-rewrite rule in CFunEqCan heads
does not work.
Assuming NOT rewriting wanteds with wanteds
Inert: [W] fsk_aBh ~ fmv_aBk -> fmv_aBk
[W] fmv_aBk ~ fsk_aBh
[G] Scalar fsk_aBg ~ fsk_aBh
[G] V a ~ f_aBg
Worklist includes [W] Scalar fmv_aBi ~ fmv_aBk
fmv_aBi, fmv_aBk are flatten unificaiton variables
Work item: [W] V fsk_aBh ~ fmv_aBi
Note that the inert wanteds are cyclic, because we do not rewrite
wanteds with wanteds.
Then we go into a loop when normalise the work-item, because we
use rewriteOrSame on the argument of V.
Conclusion: Don't make canRewrite context specific; instead use
[W] a ~ ty to rewrite a wanted iff 'a' is a unification variable.
----------------------------------------
Here is a somewhat similar case:
type family G a :: *
blah :: (G a ~ Bool, Eq (G a)) => a -> a
blah = error "urk"
foo x = blah x
For foo we get
[W] Eq (G a), G a ~ Bool
Flattening
[W] G a ~ fmv, Eq fmv, fmv ~ Bool
We can't simplify away the Eq Bool unless we substitute for fmv.
Maybe that doesn't matter: we would still be left with unsolved
G a ~ Bool.
--------------------------
Trac #9318 has a very simple program leading to
[W] F Int ~ Int
[W] F Int ~ Bool
We don't want to get "Error Int~Bool". But if fmv's can rewrite
wanteds, we will
[W] fmv ~ Int
[W] fmv ~ Bool
--->
[W] Int ~ Bool
************************************************************************
* *
* FlattenEnv & FlatM
* The flattening environment & monad
* *
************************************************************************
-}
type FlatWorkListRef = TcRef [Ct] -- See Note [The flattening work list]
data FlattenEnv
= FE { fe_mode :: FlattenMode
, fe_loc :: CtLoc -- See Note [Flattener CtLoc]
, fe_flavour :: CtFlavour
, fe_eq_rel :: EqRel -- See Note [Flattener EqRels]
, fe_work :: FlatWorkListRef } -- See Note [The flattening work list]
data FlattenMode -- Postcondition for all three: inert wrt the type substitution
= FM_FlattenAll -- Postcondition: function-free
| FM_SubstOnly -- See Note [Flattening under a forall]
-- | FM_Avoid TcTyVar Bool -- See Note [Lazy flattening]
-- -- Postcondition:
-- -- * tyvar is only mentioned in result under a rigid path
-- -- e.g. [a] is ok, but F a won't happen
-- -- * If flat_top is True, top level is not a function application
-- -- (but under type constructors is ok e.g. [F a])
mkFlattenEnv :: FlattenMode -> CtEvidence -> FlatWorkListRef -> FlattenEnv
mkFlattenEnv fm ctev ref = FE { fe_mode = fm
, fe_loc = ctEvLoc ctev
, fe_flavour = ctEvFlavour ctev
, fe_eq_rel = ctEvEqRel ctev
, fe_work = ref }
-- | The 'FlatM' monad is a wrapper around 'TcS' with the following
-- extra capabilities: (1) it offers access to a 'FlattenEnv';
-- and (2) it maintains the flattening worklist.
-- See Note [The flattening work list].
newtype FlatM a
= FlatM { runFlatM :: FlattenEnv -> TcS a }
instance Monad FlatM where
m >>= k = FlatM $ \env ->
do { a <- runFlatM m env
; runFlatM (k a) env }
instance Functor FlatM where
fmap = liftM
instance Applicative FlatM where
pure x = FlatM $ const (pure x)
(<*>) = ap
liftTcS :: TcS a -> FlatM a
liftTcS thing_inside
= FlatM $ const thing_inside
emitFlatWork :: Ct -> FlatM ()
-- See Note [The flattening work list]
emitFlatWork ct = FlatM $ \env -> updTcRef (fe_work env) (ct :)
runFlatten :: FlattenMode -> CtEvidence -> FlatM a -> TcS a
-- Run thing_inside (which does flattening), and put all
-- the work it generates onto the main work list
-- See Note [The flattening work list]
-- NB: The returned evidence is always the same as the original, but with
-- perhaps a new CtLoc
runFlatten mode ev thing_inside
= do { flat_ref <- newTcRef []
; let fmode = mkFlattenEnv mode ev flat_ref
; res <- runFlatM thing_inside fmode
; new_flats <- readTcRef flat_ref
; updWorkListTcS (add_flats new_flats)
; return res }
where
add_flats new_flats wl
= wl { wl_funeqs = add_funeqs new_flats (wl_funeqs wl) }
add_funeqs [] wl = wl
add_funeqs (f:fs) wl = add_funeqs fs (f:wl)
-- add_funeqs fs ws = reverse fs ++ ws
-- e.g. add_funeqs [f1,f2,f3] [w1,w2,w3,w4]
-- = [f3,f2,f1,w1,w2,w3,w4]
traceFlat :: String -> SDoc -> FlatM ()
traceFlat herald doc = liftTcS $ traceTcS herald doc
getFlatEnvField :: (FlattenEnv -> a) -> FlatM a
getFlatEnvField accessor
= FlatM $ \env -> return (accessor env)
getEqRel :: FlatM EqRel
getEqRel = getFlatEnvField fe_eq_rel
getRole :: FlatM Role
getRole = eqRelRole <$> getEqRel
getFlavour :: FlatM CtFlavour
getFlavour = getFlatEnvField fe_flavour
getFlavourRole :: FlatM CtFlavourRole
getFlavourRole
= do { flavour <- getFlavour
; eq_rel <- getEqRel
; return (flavour, eq_rel) }
getMode :: FlatM FlattenMode
getMode = getFlatEnvField fe_mode
getLoc :: FlatM CtLoc
getLoc = getFlatEnvField fe_loc
checkStackDepth :: Type -> FlatM ()
checkStackDepth ty
= do { loc <- getLoc
; liftTcS $ checkReductionDepth loc ty }
-- | Change the 'EqRel' in a 'FlatM'.
setEqRel :: EqRel -> FlatM a -> FlatM a
setEqRel new_eq_rel thing_inside
= FlatM $ \env ->
if new_eq_rel == fe_eq_rel env
then runFlatM thing_inside env
else runFlatM thing_inside (env { fe_eq_rel = new_eq_rel })
-- | Change the 'FlattenMode' in a 'FlattenEnv'.
setMode :: FlattenMode -> FlatM a -> FlatM a
setMode new_mode thing_inside
= FlatM $ \env ->
if new_mode `eq` fe_mode env
then runFlatM thing_inside env
else runFlatM thing_inside (env { fe_mode = new_mode })
where
FM_FlattenAll `eq` FM_FlattenAll = True
FM_SubstOnly `eq` FM_SubstOnly = True
-- FM_Avoid tv1 b1 `eq` FM_Avoid tv2 b2 = tv1 == tv2 && b1 == b2
_ `eq` _ = False
-- | Use when flattening kinds/kind coercions. See
-- Note [No derived kind equalities] in TcCanonical
flattenKinds :: FlatM a -> FlatM a
flattenKinds thing_inside
= FlatM $ \env ->
let kind_flav = case fe_flavour env of
Given -> Given
_ -> Wanted
in
runFlatM thing_inside (env { fe_eq_rel = NomEq, fe_flavour = kind_flav })
bumpDepth :: FlatM a -> FlatM a
bumpDepth (FlatM thing_inside)
= FlatM $ \env -> do { let env' = env { fe_loc = bumpCtLocDepth (fe_loc env) }
; thing_inside env' }
-- Flatten skolems
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
newFlattenSkolemFlatM :: TcType -- F xis
-> FlatM (CtEvidence, Coercion, TcTyVar) -- [W] x:: F xis ~ fsk
newFlattenSkolemFlatM ty
= do { flavour <- getFlavour
; loc <- getLoc
; liftTcS $ newFlattenSkolem flavour loc ty }
{-
Note [The flattening work list]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The "flattening work list", held in the fe_work field of FlattenEnv,
is a list of CFunEqCans generated during flattening. The key idea
is this. Consider flattening (Eq (F (G Int) (H Bool)):
* The flattener recursively calls itself on sub-terms before building
the main term, so it will encounter the terms in order
G Int
H Bool
F (G Int) (H Bool)
flattening to sub-goals
w1: G Int ~ fuv0
w2: H Bool ~ fuv1
w3: F fuv0 fuv1 ~ fuv2
* Processing w3 first is BAD, because we can't reduce i t,so it'll
get put into the inert set, and later kicked out when w1, w2 are
solved. In Trac #9872 this led to inert sets containing hundreds
of suspended calls.
* So we want to process w1, w2 first.
* So you might think that we should just use a FIFO deque for the work-list,
so that putting adding goals in order w1,w2,w3 would mean we processed
w1 first.
* BUT suppose we have 'type instance G Int = H Char'. Then processing
w1 leads to a new goal
w4: H Char ~ fuv0
We do NOT want to put that on the far end of a deque! Instead we want
to put it at the *front* of the work-list so that we continue to work
on it.
So the work-list structure is this:
* The wl_funeqs (in TcS) is a LIFO stack; we push new goals (such as w4) on
top (extendWorkListFunEq), and take new work from the top
(selectWorkItem).
* When flattening, emitFlatWork pushes new flattening goals (like
w1,w2,w3) onto the flattening work list, fe_work, another
push-down stack.
* When we finish flattening, we *reverse* the fe_work stack
onto the wl_funeqs stack (which brings w1 to the top).
The function runFlatten initialises the fe_work stack, and reverses
it onto wl_fun_eqs at the end.
Note [Flattener EqRels]
~~~~~~~~~~~~~~~~~~~~~~~
When flattening, we need to know which equality relation -- nominal
or representation -- we should be respecting. The only difference is
that we rewrite variables by representational equalities when fe_eq_rel
is ReprEq, and that we unwrap newtypes when flattening w.r.t.
representational equality.
Note [Flattener CtLoc]
~~~~~~~~~~~~~~~~~~~~~~
The flattener does eager type-family reduction.
Type families might loop, and we
don't want GHC to do so. A natural solution is to have a bounded depth
to these processes. A central difficulty is that such a solution isn't
quite compositional. For example, say it takes F Int 10 steps to get to Bool.
How many steps does it take to get from F Int -> F Int to Bool -> Bool?
10? 20? What about getting from Const Char (F Int) to Char? 11? 1? Hard to
know and hard to track. So, we punt, essentially. We store a CtLoc in
the FlattenEnv and just update the environment when recurring. In the
TyConApp case, where there may be multiple type families to flatten,
we just copy the current CtLoc into each branch. If any branch hits the
stack limit, then the whole thing fails.
A consequence of this is that setting the stack limits appropriately
will be essentially impossible. So, the official recommendation if a
stack limit is hit is to disable the check entirely. Otherwise, there
will be baffling, unpredictable errors.
Note [Lazy flattening]
~~~~~~~~~~~~~~~~~~~~~~
The idea of FM_Avoid mode is to flatten less aggressively. If we have
a ~ [F Int]
there seems to be no great merit in lifting out (F Int). But if it was
a ~ [G a Int]
then we *do* want to lift it out, in case (G a Int) reduces to Bool, say,
which gets rid of the occurs-check problem. (For the flat_top Bool, see
comments above and at call sites.)
HOWEVER, the lazy flattening actually seems to make type inference go
*slower*, not faster. perf/compiler/T3064 is a case in point; it gets
*dramatically* worse with FM_Avoid. I think it may be because
floating the types out means we normalise them, and that often makes
them smaller and perhaps allows more re-use of previously solved
goals. But to be honest I'm not absolutely certain, so I am leaving
FM_Avoid in the code base. What I'm removing is the unique place
where it is *used*, namely in TcCanonical.canEqTyVar.
See also Note [Conservative unification check] in TcUnify, which gives
other examples where lazy flattening caused problems.
Bottom line: FM_Avoid is unused for now (Nov 14).
Note: T5321Fun got faster when I disabled FM_Avoid
T5837 did too, but it's pathalogical anyway
Note [Phantoms in the flattener]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
data Proxy p = Proxy
and we're flattening (Proxy ty) w.r.t. ReprEq. Then, we know that `ty`
is really irrelevant -- it will be ignored when solving for representational
equality later on. So, we omit flattening `ty` entirely. This may
violate the expectation of "xi"s for a bit, but the canonicaliser will
soon throw out the phantoms when decomposing a TyConApp. (Or, the
canonicaliser will emit an insoluble, in which case the unflattened version
yields a better error message anyway.)
-}
{- *********************************************************************
* *
* Externally callable flattening functions *
* *
* They are all wrapped in runFlatten, so their *
* flattening work gets put into the work list *
* *
********************************************************************* -}
flatten :: FlattenMode -> CtEvidence -> TcType
-> TcS (Xi, TcCoercion)
flatten mode ev ty
= runFlatten mode ev (flatten_one ty)
flattenManyNom :: CtEvidence -> [TcType] -> TcS ([Xi], [TcCoercion])
-- Externally-callable, hence runFlatten
-- Flatten a bunch of types all at once; in fact they are
-- always the arguments of a saturated type-family, so
-- ctEvFlavour ev = Nominal
-- and we want to flatten all at nominal role
flattenManyNom ev tys
= runFlatten FM_FlattenAll ev (flatten_many_nom tys)
{- *********************************************************************
* *
* The main flattening functions
* *
********************************************************************* -}
{- Note [Flattening]
~~~~~~~~~~~~~~~~~~~~
flatten ty ==> (xi, co)
where
xi has no type functions, unless they appear under ForAlls
co :: xi ~ ty
Note that it is flatten's job to flatten *every type function it sees*.
flatten is only called on *arguments* to type functions, by canEqGiven.
Flattening also:
* zonks, removing any metavariables, and
* applies the substitution embodied in the inert set
Because flattening zonks and the returned coercion ("co" above) is also
zonked, it's possible that (co :: xi ~ ty) isn't quite true, as ty (the
input to the flattener) might not be zonked. After zonking everything,
(co :: xi ~ ty) will be true, however. It is for this reason that we
occasionally have to explicitly zonk, when (co :: xi ~ ty) is important
even before we zonk the whole program. (In particular, this is why the
zonk in flattenTyVar is necessary.)
Flattening a type also means flattening its kind. In the case of a type
variable whose kind mentions a type family, this might mean that the result
of flattening has a cast in it.
Recall that in comments we use alpha[flat = ty] to represent a
flattening skolem variable alpha which has been generated to stand in
for ty.
----- Example of flattening a constraint: ------
flatten (List (F (G Int))) ==> (xi, cc)
where
xi = List alpha
cc = { G Int ~ beta[flat = G Int],
F beta ~ alpha[flat = F beta] }
Here
* alpha and beta are 'flattening skolem variables'.
* All the constraints in cc are 'given', and all their coercion terms
are the identity.
NB: Flattening Skolems only occur in canonical constraints, which
are never zonked, so we don't need to worry about zonking doing
accidental unflattening.
Note that we prefer to leave type synonyms unexpanded when possible,
so when the flattener encounters one, it first asks whether its
transitive expansion contains any type function applications. If so,
it expands the synonym and proceeds; if not, it simply returns the
unexpanded synonym.
Note [flatten_many performance]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In programs with lots of type-level evaluation, flatten_many becomes
part of a tight loop. For example, see test perf/compiler/T9872a, which
calls flatten_many a whopping 7,106,808 times. It is thus important
that flatten_many be efficient.
Performance testing showed that the current implementation is indeed
efficient. It's critically important that zipWithAndUnzipM be
specialized to TcS, and it's also quite helpful to actually `inline`
it. On test T9872a, here are the allocation stats (Dec 16, 2014):
* Unspecialized, uninlined: 8,472,613,440 bytes allocated in the heap
* Specialized, uninlined: 6,639,253,488 bytes allocated in the heap
* Specialized, inlined: 6,281,539,792 bytes allocated in the heap
To improve performance even further, flatten_many_nom is split off
from flatten_many, as nominal equality is the common case. This would
be natural to write using mapAndUnzipM, but even inlined, that function
is not as performant as a hand-written loop.
* mapAndUnzipM, inlined: 7,463,047,432 bytes allocated in the heap
* hand-written recursion: 5,848,602,848 bytes allocated in the heap
If you make any change here, pay close attention to the T9872{a,b,c} tests
and T5321Fun.
If we need to make this yet more performant, a possible way forward is to
duplicate the flattener code for the nominal case, and make that case
faster. This doesn't seem quite worth it, yet.
-}
flatten_many :: [Role] -> [Type] -> FlatM ([Xi], [Coercion])
-- Coercions :: Xi ~ Type, at roles given
-- Returns True iff (no flattening happened)
-- NB: The EvVar inside the 'fe_ev :: CtEvidence' is unused,
-- we merely want (a) Given/Solved/Derived/Wanted info
-- (b) the GivenLoc/WantedLoc for when we create new evidence
flatten_many roles tys
-- See Note [flatten_many performance]
= inline zipWithAndUnzipM go roles tys
where
go Nominal ty = setEqRel NomEq $ flatten_one ty
go Representational ty = setEqRel ReprEq $ flatten_one ty
go Phantom ty = -- See Note [Phantoms in the flattener]
do { ty <- liftTcS $ zonkTcType ty
; return ( ty, mkReflCo Phantom ty ) }
-- | Like 'flatten_many', but assumes that every role is nominal.
flatten_many_nom :: [Type] -> FlatM ([Xi], [Coercion])
flatten_many_nom [] = return ([], [])
-- See Note [flatten_many performance]
flatten_many_nom (ty:tys)
= do { (xi, co) <- flatten_one ty
; (xis, cos) <- flatten_many_nom tys
; return (xi:xis, co:cos) }
------------------
flatten_one :: TcType -> FlatM (Xi, Coercion)
-- Flatten a type to get rid of type function applications, returning
-- the new type-function-free type, and a collection of new equality
-- constraints. See Note [Flattening] for more detail.
--
-- Postcondition: Coercion :: Xi ~ TcType
-- The role on the result coercion matches the EqRel in the FlattenEnv
flatten_one xi@(LitTy {})
= do { role <- getRole
; return (xi, mkReflCo role xi) }
flatten_one (TyVarTy tv)
= flattenTyVar tv
flatten_one (AppTy ty1 ty2)
= do { (xi1,co1) <- flatten_one ty1
; eq_rel <- getEqRel
; case (eq_rel, nextRole xi1) of
(NomEq, _) -> flatten_rhs xi1 co1 NomEq
(ReprEq, Nominal) -> flatten_rhs xi1 co1 NomEq
(ReprEq, Representational) -> flatten_rhs xi1 co1 ReprEq
(ReprEq, Phantom) ->
do { ty2 <- liftTcS $ zonkTcType ty2
; return ( mkAppTy xi1 ty2
, mkAppCo co1 (mkNomReflCo ty2)) } }
where
flatten_rhs xi1 co1 eq_rel2
= do { (xi2,co2) <- setEqRel eq_rel2 $ flatten_one ty2
; role1 <- getRole
; let role2 = eqRelRole eq_rel2
; traceFlat "flatten/appty"
(ppr ty1 $$ ppr ty2 $$ ppr xi1 $$
ppr xi2 $$ ppr role1 $$ ppr role2)
; return ( mkAppTy xi1 xi2
, mkTransAppCo role1 co1 xi1 ty1
role2 co2 xi2 ty2
role1 ) } -- output should match fmode
flatten_one (TyConApp tc tys)
-- Expand type synonyms that mention type families
-- on the RHS; see Note [Flattening synonyms]
| Just (tenv, rhs, tys') <- expandSynTyCon_maybe tc tys
, let expanded_ty = mkAppTys (substTy (mkTvSubstPrs tenv) rhs) tys'
= do { mode <- getMode
; let used_tcs = tyConsOfType rhs
; case mode of
FM_FlattenAll | anyNameEnv isTypeFamilyTyCon used_tcs
-> flatten_one expanded_ty
_ -> flatten_ty_con_app tc tys }
-- Otherwise, it's a type function application, and we have to
-- flatten it away as well, and generate a new given equality constraint
-- between the application and a newly generated flattening skolem variable.
| isTypeFamilyTyCon tc
= flatten_fam_app tc tys
-- For * a normal data type application
-- * data family application
-- we just recursively flatten the arguments.
| otherwise
-- FM_Avoid stuff commented out; see Note [Lazy flattening]
-- , let fmode' = case fmode of -- Switch off the flat_top bit in FM_Avoid
-- FE { fe_mode = FM_Avoid tv _ }
-- -> fmode { fe_mode = FM_Avoid tv False }
-- _ -> fmode
= flatten_ty_con_app tc tys
flatten_one (ForAllTy (Anon ty1) ty2)
= do { (xi1,co1) <- flatten_one ty1
; (xi2,co2) <- flatten_one ty2
; role <- getRole
; return (mkFunTy xi1 xi2, mkFunCo role co1 co2) }
flatten_one ty@(ForAllTy (Named {}) _)
-- TODO (RAE): This is inadequate, as it doesn't flatten the kind of
-- the bound tyvar. Doing so will require carrying around a substitution
-- and the usual substTyVarBndr-like silliness. Argh.
-- We allow for-alls when, but only when, no type function
-- applications inside the forall involve the bound type variables.
= do { let (bndrs, rho) = splitNamedPiTys ty
tvs = map (binderVar "flatten") bndrs
; (rho', co) <- setMode FM_SubstOnly $ flatten_one rho
-- Substitute only under a forall
-- See Note [Flattening under a forall]
; return (mkForAllTys bndrs rho', mkHomoForAllCos tvs co) }
flatten_one (CastTy ty g)
= do { (xi, co) <- flatten_one ty
; (g', _) <- flatten_co g
; return (mkCastTy xi g', castCoercionKind co g' g) }
flatten_one (CoercionTy co) = first mkCoercionTy <$> flatten_co co
-- | "Flatten" a coercion. Really, just flatten the types that it coerces
-- between and then use transitivity.
flatten_co :: Coercion -> FlatM (Coercion, Coercion)
flatten_co co
= do { let (Pair ty1 ty2, role) = coercionKindRole co
; co <- liftTcS $ zonkCo co -- squeeze out any metavars from the original
; (co1, co2) <- flattenKinds $
do { (_, co1) <- flatten_one ty1
; (_, co2) <- flatten_one ty2
; return (co1, co2) }
; let co' = downgradeRole role Nominal co1 `mkTransCo`
co `mkTransCo`
mkSymCo (downgradeRole role Nominal co2)
-- kco :: (ty1' ~r ty2') ~N (ty1 ~r ty2)
kco = mkTyConAppCo Nominal (equalityTyCon role)
[ mkKindCo co1, mkKindCo co2, co1, co2 ]
; traceFlat "flatten_co" (vcat [ ppr co, ppr co1, ppr co2, ppr co' ])
; env_role <- getRole
; return (co', mkProofIrrelCo env_role kco co' co) }
flatten_ty_con_app :: TyCon -> [TcType] -> FlatM (Xi, Coercion)
flatten_ty_con_app tc tys
= do { eq_rel <- getEqRel
; let role = eqRelRole eq_rel
; (xis, cos) <- case eq_rel of
NomEq -> flatten_many_nom tys
ReprEq -> flatten_many (tyConRolesRepresentational tc) tys
; return (mkTyConApp tc xis, mkTyConAppCo role tc cos) }
{-
Note [Flattening synonyms]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Not expanding synonyms aggressively improves error messages, and
keeps types smaller. But we need to take care.
Suppose
type T a = a -> a
and we want to flatten the type (T (F a)). Then we can safely flatten
the (F a) to a skolem, and return (T fsk). We don't need to expand the
synonym. This works because TcTyConAppCo can deal with synonyms
(unlike TyConAppCo), see Note [TcCoercions] in TcEvidence.
But (Trac #8979) for
type T a = (F a, a) where F is a type function
we must expand the synonym in (say) T Int, to expose the type function
to the flattener.
Note [Flattening under a forall]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Under a forall, we
(a) MUST apply the inert substitution
(b) MUST NOT flatten type family applications
Hence FMSubstOnly.
For (a) consider c ~ a, a ~ T (forall b. (b, [c]))
If we don't apply the c~a substitution to the second constraint
we won't see the occurs-check error.
For (b) consider (a ~ forall b. F a b), we don't want to flatten
to (a ~ forall b.fsk, F a b ~ fsk)
because now the 'b' has escaped its scope. We'd have to flatten to
(a ~ forall b. fsk b, forall b. F a b ~ fsk b)
and we have not begun to think about how to make that work!
************************************************************************
* *
Flattening a type-family application
* *
************************************************************************
-}
flatten_fam_app :: TyCon -> [TcType] -> FlatM (Xi, Coercion)
-- flatten_fam_app can be over-saturated
-- flatten_exact_fam_app is exactly saturated
-- flatten_exact_fam_app_fully lifts out the application to top level
-- Postcondition: Coercion :: Xi ~ F tys
flatten_fam_app tc tys -- Can be over-saturated
= ASSERT2( tyConArity tc <= length tys
, ppr tc $$ ppr (tyConArity tc) $$ ppr tys)
-- Type functions are saturated
-- The type function might be *over* saturated
-- in which case the remaining arguments should
-- be dealt with by AppTys
do { let (tys1, tys_rest) = splitAt (tyConArity tc) tys
; (xi1, co1) <- flatten_exact_fam_app tc tys1
-- co1 :: xi1 ~ F tys1
-- all Nominal roles b/c the tycon is oversaturated
; (xis_rest, cos_rest) <- flatten_many (repeat Nominal) tys_rest
-- cos_res :: xis_rest ~ tys_rest
; return ( mkAppTys xi1 xis_rest -- NB mkAppTys: rhs_xi might not be a type variable
-- cf Trac #5655
, mkAppCos co1 cos_rest
-- (rhs_xi :: F xis) ; (F cos :: F xis ~ F tys)
) }
flatten_exact_fam_app, flatten_exact_fam_app_fully ::
TyCon -> [TcType] -> FlatM (Xi, Coercion)
flatten_exact_fam_app tc tys
= do { mode <- getMode
; role <- getRole
; case mode of
FM_FlattenAll -> flatten_exact_fam_app_fully tc tys
FM_SubstOnly -> do { (xis, cos) <- flatten_many roles tys
; return ( mkTyConApp tc xis
, mkTyConAppCo role tc cos ) }
where
-- These are always going to be Nominal for now,
-- but not if #8177 is implemented
roles = tyConRolesX role tc }
-- FM_Avoid tv flat_top ->
-- do { (xis, cos) <- flatten_many fmode roles tys
-- ; if flat_top || tv `elemVarSet` tyCoVarsOfTypes xis
-- then flatten_exact_fam_app_fully fmode tc tys
-- else return ( mkTyConApp tc xis
-- , mkTcTyConAppCo (feRole fmode) tc cos ) }
flatten_exact_fam_app_fully tc tys
-- See Note [Reduce type family applications eagerly]
= try_to_reduce tc tys False id $
do { -- First, flatten the arguments
; (xis, cos) <- setEqRel NomEq $ flatten_many_nom tys
; eq_rel <- getEqRel
; let role = eqRelRole eq_rel
ret_co = mkTyConAppCo role tc cos
-- ret_co :: F xis ~ F tys
-- Now, look in the cache
; mb_ct <- liftTcS $ lookupFlatCache tc xis
; fr <- getFlavourRole
; case mb_ct of
Just (co, rhs_ty, flav) -- co :: F xis ~ fsk
| (flav, NomEq) `funEqCanDischargeFR` fr
-> -- Usable hit in the flat-cache
-- We certainly *can* use a Wanted for a Wanted
do { traceFlat "flatten/flat-cache hit" $ (ppr tc <+> ppr xis $$ ppr rhs_ty)
; (fsk_xi, fsk_co) <- flatten_one rhs_ty
-- The fsk may already have been unified, so flatten it
-- fsk_co :: fsk_xi ~ fsk
; return ( fsk_xi
, fsk_co `mkTransCo`
maybeSubCo eq_rel (mkSymCo co) `mkTransCo`
ret_co ) }
-- :: fsk_xi ~ F xis
-- Try to reduce the family application right now
-- See Note [Reduce type family applications eagerly]
_ -> try_to_reduce tc xis True (`mkTransCo` ret_co) $
do { let fam_ty = mkTyConApp tc xis
; (ev, co, fsk) <- newFlattenSkolemFlatM fam_ty
; let fsk_ty = mkTyVarTy fsk
; liftTcS $ extendFlatCache tc xis ( co
, fsk_ty, ctEvFlavour ev)
-- The new constraint (F xis ~ fsk) is not necessarily inert
-- (e.g. the LHS may be a redex) so we must put it in the work list
; let ct = CFunEqCan { cc_ev = ev
, cc_fun = tc
, cc_tyargs = xis
, cc_fsk = fsk }
; emitFlatWork ct
; traceFlat "flatten/flat-cache miss" $ (ppr fam_ty $$ ppr fsk $$ ppr ev)
; (fsk_xi, fsk_co) <- flatten_one fsk_ty
; return (fsk_xi, fsk_co
`mkTransCo`
maybeSubCo eq_rel (mkSymCo co)
`mkTransCo` ret_co ) }
}
where
try_to_reduce :: TyCon -- F, family tycon
-> [Type] -- args, not necessarily flattened
-> Bool -- add to the flat cache?
-> ( Coercion -- :: xi ~ F args
-> Coercion ) -- what to return from outer function
-> FlatM (Xi, Coercion) -- continuation upon failure
-> FlatM (Xi, Coercion)
try_to_reduce tc tys cache update_co k
= do { checkStackDepth (mkTyConApp tc tys)
; mb_match <- liftTcS $ matchFam tc tys
; case mb_match of
Just (norm_co, norm_ty)
-> do { traceFlat "Eager T.F. reduction success" $
vcat [ ppr tc, ppr tys, ppr norm_ty
, ppr norm_co <+> dcolon
<+> ppr (coercionKind norm_co)
, ppr cache]
; (xi, final_co) <- bumpDepth $ flatten_one norm_ty
; eq_rel <- getEqRel
; let co = maybeSubCo eq_rel norm_co
`mkTransCo` mkSymCo final_co
; flavour <- getFlavour
-- NB: only extend cache with nominal equalities
; when (cache && eq_rel == NomEq) $
liftTcS $
extendFlatCache tc tys ( co, xi, flavour )
; return ( xi, update_co $ mkSymCo co ) }
Nothing -> k }
{- Note [Reduce type family applications eagerly]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we come across a type-family application like (Append (Cons x Nil) t),
then, rather than flattening to a skolem etc, we may as well just reduce
it on the spot to (Cons x t). This saves a lot of intermediate steps.
Examples that are helped are tests T9872, and T5321Fun.
Performance testing indicates that it's best to try this *twice*, once
before flattening arguments and once after flattening arguments.
Adding the extra reduction attempt before flattening arguments cut
the allocation amounts for the T9872{a,b,c} tests by half.
An example of where the early reduction appears helpful:
type family Last x where
Last '[x] = x
Last (h ': t) = Last t
workitem: (x ~ Last '[1,2,3,4,5,6])
Flattening the argument never gets us anywhere, but trying to flatten
it at every step is quadratic in the length of the list. Reducing more
eagerly makes simplifying the right-hand type linear in its length.
Testing also indicated that the early reduction should *not* use the
flat-cache, but that the later reduction *should*. (Although the
effect was not large.) Hence the Bool argument to try_to_reduce. To
me (SLPJ) this seems odd; I get that eager reduction usually succeeds;
and if don't use the cache for eager reduction, we will miss most of
the opportunities for using it at all. More exploration would be good
here.
At the end, once we've got a flat rhs, we extend the flatten-cache to record
the result. Doing so can save lots of work when the same redex shows up more
than once. Note that we record the link from the redex all the way to its
*final* value, not just the single step reduction. Interestingly, using the
flat-cache for the first reduction resulted in an increase in allocations
of about 3% for the four T9872x tests. However, using the flat-cache in
the later reduction is a similar gain. I (Richard E) don't currently (Dec '14)
have any knowledge as to *why* these facts are true.
************************************************************************
* *
Flattening a type variable
* *
********************************************************************* -}
-- | The result of flattening a tyvar "one step".
data FlattenTvResult
= FTRNotFollowed
-- ^ The inert set doesn't make the tyvar equal to anything else
| FTRFollowed TcType Coercion
-- ^ The tyvar flattens to a not-necessarily flat other type.
-- co :: new type ~r old type, where the role is determined by
-- the FlattenEnv
flattenTyVar :: TyVar -> FlatM (Xi, Coercion)
flattenTyVar tv
= do { mb_yes <- flatten_tyvar1 tv
; case mb_yes of
FTRFollowed ty1 co1 -- Recur
-> do { (ty2, co2) <- flatten_one ty1
-- ; traceFlat "flattenTyVar2" (ppr tv $$ ppr ty2)
; return (ty2, co2 `mkTransCo` co1) }
FTRNotFollowed -- Done
-> do { let orig_kind = tyVarKind tv
; (_new_kind, kind_co) <- setMode FM_SubstOnly $
flattenKinds $
flatten_one orig_kind
; let Pair _ zonked_kind = coercionKind kind_co
-- NB: kind_co :: _new_kind ~ zonked_kind
-- But zonked_kind is not necessarily the same as orig_kind
-- because that may have filled-in metavars.
-- Moreover the returned Xi type must be well-kinded
-- (e.g. in canEqTyVarTyVar we use getCastedTyVar_maybe)
-- If you remove it, then e.g. dependent/should_fail/T11407 panics
-- See also Note [Flattening]
-- An alternative would to use (zonkTcType orig_kind),
-- but some simple measurements suggest that's a little slower
; let tv' = setTyVarKind tv zonked_kind
tv_ty' = mkTyVarTy tv'
ty' = tv_ty' `mkCastTy` mkSymCo kind_co
; role <- getRole
; return (ty', mkReflCo role tv_ty'
`mkCoherenceLeftCo` mkSymCo kind_co) } }
flatten_tyvar1 :: TcTyVar -> FlatM FlattenTvResult
-- "Flattening" a type variable means to apply the substitution to it
-- Specifically, look up the tyvar in
-- * the internal MetaTyVar box
-- * the inerts
-- See also the documentation for FlattenTvResult
flatten_tyvar1 tv
| not (isTcTyVar tv) -- Happens when flatten under a (forall a. ty)
= return FTRNotFollowed
-- So ty contains references to the non-TcTyVar a
| otherwise
= do { mb_ty <- liftTcS $ isFilledMetaTyVar_maybe tv
; role <- getRole
; case mb_ty of
Just ty -> do { traceFlat "Following filled tyvar" (ppr tv <+> equals <+> ppr ty)
; return (FTRFollowed ty (mkReflCo role ty)) } ;
Nothing -> do { traceFlat "Unfilled tyvar" (ppr tv)
; fr <- getFlavourRole
; flatten_tyvar2 tv fr } }
flatten_tyvar2 :: TcTyVar -> CtFlavourRole -> FlatM FlattenTvResult
-- Try in the inert equalities
-- See Definition [Applying a generalised substitution] in TcSMonad
-- See Note [Stability of flattening] in TcSMonad
flatten_tyvar2 tv fr@(flavour, eq_rel)
| Derived <- flavour -- For derived equalities, consult the inert_model (only)
= do { model <- liftTcS $ getInertModel
; case lookupDVarEnv model tv of
Just (CTyEqCan { cc_rhs = rhs })
-> return (FTRFollowed rhs (pprPanic "flatten_tyvar2" (ppr tv $$ ppr rhs)))
-- Evidence is irrelevant for Derived contexts
_ -> return FTRNotFollowed }
| otherwise -- For non-derived equalities, consult the inert_eqs (only)
= do { ieqs <- liftTcS $ getInertEqs
; case lookupDVarEnv ieqs tv of
Just (ct:_) -- If the first doesn't work,
-- the subsequent ones won't either
| CTyEqCan { cc_ev = ctev, cc_tyvar = tv, cc_rhs = rhs_ty } <- ct
, ctEvFlavourRole ctev `eqCanRewriteFR` fr
-> do { traceFlat "Following inert tyvar" (ppr tv <+> equals <+> ppr rhs_ty $$ ppr ctev)
; let rewrite_co1 = mkSymCo $ ctEvCoercion ctev
rewrite_co = case (ctEvEqRel ctev, eq_rel) of
(ReprEq, _rel) -> ASSERT( _rel == ReprEq )
-- if this ASSERT fails, then
-- eqCanRewriteFR answered incorrectly
rewrite_co1
(NomEq, NomEq) -> rewrite_co1
(NomEq, ReprEq) -> mkSubCo rewrite_co1
; return (FTRFollowed rhs_ty rewrite_co) }
-- NB: ct is Derived then fmode must be also, hence
-- we are not going to touch the returned coercion
-- so ctEvCoercion is fine.
_other -> return FTRNotFollowed }
{-
Note [An alternative story for the inert substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(This entire note is just background, left here in case we ever want
to return the the previousl state of affairs)
We used (GHC 7.8) to have this story for the inert substitution inert_eqs
* 'a' is not in fvs(ty)
* They are *inert* in the weaker sense that there is no infinite chain of
(i1 `eqCanRewrite` i2), (i2 `eqCanRewrite` i3), etc
This means that flattening must be recursive, but it does allow
[G] a ~ [b]
[G] b ~ Maybe c
This avoids "saturating" the Givens, which can save a modest amount of work.
It is easy to implement, in TcInteract.kick_out, by only kicking out an inert
only if (a) the work item can rewrite the inert AND
(b) the inert cannot rewrite the work item
This is signifcantly harder to think about. It can save a LOT of work
in occurs-check cases, but we don't care about them much. Trac #5837
is an example; all the constraints here are Givens
[G] a ~ TF (a,Int)
-->
work TF (a,Int) ~ fsk
inert fsk ~ a
--->
work fsk ~ (TF a, TF Int)
inert fsk ~ a
--->
work a ~ (TF a, TF Int)
inert fsk ~ a
---> (attempting to flatten (TF a) so that it does not mention a
work TF a ~ fsk2
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (substitute for a)
work TF (fsk2, TF Int) ~ fsk2
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (top-level reduction, re-orient)
work fsk2 ~ (TF fsk2, TF Int)
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (attempt to flatten (TF fsk2) to get rid of fsk2
work TF fsk2 ~ fsk3
work fsk2 ~ (fsk3, TF Int)
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
--->
work TF fsk2 ~ fsk3
inert fsk2 ~ (fsk3, TF Int)
inert a ~ ((fsk3, TF Int), TF Int)
inert fsk ~ ((fsk3, TF Int), TF Int)
Because the incoming given rewrites all the inert givens, we get more and
more duplication in the inert set. But this really only happens in pathalogical
casee, so we don't care.
************************************************************************
* *
Unflattening
* *
************************************************************************
An unflattening example:
[W] F a ~ alpha
flattens to
[W] F a ~ fmv (CFunEqCan)
[W] fmv ~ alpha (CTyEqCan)
We must solve both!
-}
unflatten :: Cts -> Cts -> TcS Cts
unflatten tv_eqs funeqs
= do { dflags <- getDynFlags
; tclvl <- getTcLevel
; traceTcS "Unflattening" $ braces $
vcat [ text "Funeqs =" <+> pprCts funeqs
, text "Tv eqs =" <+> pprCts tv_eqs ]
-- Step 1: unflatten the CFunEqCans, except if that causes an occurs check
-- Occurs check: consider [W] alpha ~ [F alpha]
-- ==> (flatten) [W] F alpha ~ fmv, [W] alpha ~ [fmv]
-- ==> (unify) [W] F [fmv] ~ fmv
-- See Note [Unflatten using funeqs first]
; funeqs <- foldrBagM (unflatten_funeq dflags) emptyCts funeqs
; traceTcS "Unflattening 1" $ braces (pprCts funeqs)
-- Step 2: unify the tv_eqs, if possible
; tv_eqs <- foldrBagM (unflatten_eq dflags tclvl) emptyCts tv_eqs
; traceTcS "Unflattening 2" $ braces (pprCts tv_eqs)
-- Step 3: fill any remaining fmvs with fresh unification variables
; funeqs <- mapBagM finalise_funeq funeqs
; traceTcS "Unflattening 3" $ braces (pprCts funeqs)
-- Step 4: remove any tv_eqs that look like ty ~ ty
; tv_eqs <- foldrBagM finalise_eq emptyCts tv_eqs
; let all_flat = tv_eqs `andCts` funeqs
; traceTcS "Unflattening done" $ braces (pprCts all_flat)
-- Step 5: zonk the result
-- Motivation: makes them nice and ready for the next step
-- (see TcInteract.solveSimpleWanteds)
; zonkSimples all_flat }
where
----------------
unflatten_funeq :: DynFlags -> Ct -> Cts -> TcS Cts
unflatten_funeq dflags ct@(CFunEqCan { cc_fun = tc, cc_tyargs = xis
, cc_fsk = fmv, cc_ev = ev }) rest
= do { -- fmv should be an un-filled flatten meta-tv;
-- we now fix its final value by filling it, being careful
-- to observe the occurs check. Zonking will eliminate it
-- altogether in due course
rhs' <- zonkTcType (mkTyConApp tc xis)
; case occurCheckExpand dflags fmv rhs' of
OC_OK rhs'' -- Normal case: fill the tyvar
-> do { setEvBindIfWanted ev
(EvCoercion (mkTcReflCo (ctEvRole ev) rhs''))
; unflattenFmv fmv rhs''
; return rest }
_ -> -- Occurs check
return (ct `consCts` rest) }
unflatten_funeq _ other_ct _
= pprPanic "unflatten_funeq" (ppr other_ct)
----------------
finalise_funeq :: Ct -> TcS Ct
finalise_funeq (CFunEqCan { cc_fsk = fmv, cc_ev = ev })
= do { demoteUnfilledFmv fmv
; return (mkNonCanonical ev) }
finalise_funeq ct = pprPanic "finalise_funeq" (ppr ct)
----------------
unflatten_eq :: DynFlags -> TcLevel -> Ct -> Cts -> TcS Cts
unflatten_eq dflags tclvl ct@(CTyEqCan { cc_ev = ev, cc_tyvar = tv, cc_rhs = rhs }) rest
| isFmvTyVar tv -- Previously these fmvs were untouchable,
-- but now they are touchable
-- NB: unlike unflattenFmv, filling a fmv here does
-- bump the unification count; it is "improvement"
-- Note [Unflattening can force the solver to iterate]
= do { lhs_elim <- tryFill dflags tv rhs ev
; if lhs_elim then return rest else
do { rhs_elim <- try_fill dflags tclvl ev rhs (mkTyVarTy tv)
; if rhs_elim then return rest else
return (ct `consCts` rest) } }
| otherwise
= return (ct `consCts` rest)
unflatten_eq _ _ ct _ = pprPanic "unflatten_irred" (ppr ct)
----------------
finalise_eq :: Ct -> Cts -> TcS Cts
finalise_eq (CTyEqCan { cc_ev = ev, cc_tyvar = tv
, cc_rhs = rhs, cc_eq_rel = eq_rel }) rest
| isFmvTyVar tv
= do { ty1 <- zonkTcTyVar tv
; ty2 <- zonkTcType rhs
; let is_refl = ty1 `tcEqType` ty2
; if is_refl then do { setEvBindIfWanted ev
(EvCoercion $
mkTcReflCo (eqRelRole eq_rel) rhs)
; return rest }
else return (mkNonCanonical ev `consCts` rest) }
| otherwise
= return (mkNonCanonical ev `consCts` rest)
finalise_eq ct _ = pprPanic "finalise_irred" (ppr ct)
----------------
try_fill dflags tclvl ev ty1 ty2
| Just tv1 <- tcGetTyVar_maybe ty1
, isTouchableOrFmv tclvl tv1
, typeKind ty1 `eqType` tyVarKind tv1
= tryFill dflags tv1 ty2 ev
| otherwise
= return False
tryFill :: DynFlags -> TcTyVar -> TcType -> CtEvidence -> TcS Bool
-- (tryFill tv rhs ev) sees if 'tv' is an un-filled MetaTv
-- If so, and if tv does not appear in 'rhs', set tv := rhs
-- bind the evidence (which should be a CtWanted) to Refl<rhs>
-- and return True. Otherwise return False
tryFill dflags tv rhs ev
= ASSERT2( not (isGiven ev), ppr ev )
do { is_filled <- isFilledMetaTyVar tv
; if is_filled then return False else
do { rhs' <- zonkTcType rhs
; case occurCheckExpand dflags tv rhs' of
OC_OK rhs'' -- Normal case: fill the tyvar
-> do { setEvBindIfWanted ev
(EvCoercion (mkTcReflCo (ctEvRole ev) rhs''))
; unifyTyVar tv rhs''
; return True }
_ -> -- Occurs check
return False } }
{-
Note [Unflatten using funeqs first]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
[W] G a ~ Int
[W] F (G a) ~ G a
do not want to end up with
[W] F Int ~ Int
because that might actually hold! Better to end up with the two above
unsolved constraints. The flat form will be
G a ~ fmv1 (CFunEqCan)
F fmv1 ~ fmv2 (CFunEqCan)
fmv1 ~ Int (CTyEqCan)
fmv1 ~ fmv2 (CTyEqCan)
Flatten using the fun-eqs first.
-}
| vikraman/ghc | compiler/typecheck/TcFlatten.hs | bsd-3-clause | 59,811 | 50 | 23 | 18,112 | 6,144 | 3,270 | 2,874 | -1 | -1 |
{-|
Module : Reactive.DOM.Internal.Mutation
Description : Definition of Mutation, to describe DOM updates.
Copyright : (c) Alexander Vieth, 2016
Licence : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (GHC only)
-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Reactive.DOM.Internal.Mutation where
import Prelude hiding ((.), id)
import Data.Text (pack)
import Control.Category
import Data.Semigroup
import Data.Bifunctor
import GHCJS.DOM.Node
import GHCJS.DOM.Types (Text)
import GHCJS.DOM.Element (setAttribute, IsElement)
import Data.JSString.Text
import Data.Unique
-- | Formal representation of the DOM Node children mutation methods.
data ChildrenMutation old new =
AppendChild new
| InsertBefore new old
| ReplaceChild new old -- New then old, for consistency with the JS method.
| RemoveChild old
deriving instance (Show old, Show new) => Show (ChildrenMutation old new)
instance Bifunctor ChildrenMutation where
bimap f g x = case x of
AppendChild new -> AppendChild (g new)
InsertBefore new old -> InsertBefore (g new) (f old)
ReplaceChild new old -> ReplaceChild (g new) (f old)
RemoveChild old -> RemoveChild (f old)
data SomeNode where
SomeNode :: IsNode n => Unique -> n -> SomeNode
instance Show SomeNode where
show (SomeNode u _) = show (hashUnique u)
instance Eq SomeNode where
(SomeNode u1 _) == (SomeNode u2 _) = u1 == u2
instance Ord SomeNode where
(SomeNode u1 _) `compare` (SomeNode u2 _) = u1 `compare` u2
someText :: Text -> IO SomeNode
someText node = do
u <- newUnique
pure (SomeNode u node)
{-# NOINLINE someElement #-}
someElement :: IsElement e => e -> IO SomeNode
someElement node = do
u <- newUnique
let hash = hashUnique u
setAttribute node (textToJSString "virtual_id") (textToJSString (pack (show hash)))
pure (SomeNode u node)
runChildrenMutationIO
:: IsNode parent
=> ChildrenMutation SomeNode SomeNode
-> parent
-> IO ()
runChildrenMutationIO x parent = action >> pure ()
where
action = case x of
AppendChild (SomeNode _ el) -> parent `appendChild` (Just el)
InsertBefore (SomeNode _ new) (SomeNode _ old) ->
insertBefore parent (Just new) (Just old)
ReplaceChild (SomeNode _ new) (SomeNode _ old) ->
replaceChild parent (Just new) (Just old)
RemoveChild (SomeNode _ el) -> parent `removeChild` (Just el)
runChildrenMutationsIO
:: IsNode parent
=> [ChildrenMutation SomeNode SomeNode]
-> parent
-> IO ()
runChildrenMutationsIO ms parent =
traverse (flip runChildrenMutationIO parent) ms >> pure ()
| avieth/reactive-dom | Reactive/DOM/Internal/Mutation.hs | bsd-3-clause | 2,787 | 0 | 13 | 599 | 823 | 421 | 402 | 66 | 4 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Volume.HR.Tests
( tests ) where
import Data.String
import Prelude
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Volume.HR.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "HR Tests"
[ makeCorpusTest [Seal Volume] corpus
]
| facebookincubator/duckling | tests/Duckling/Volume/HR/Tests.hs | bsd-3-clause | 500 | 0 | 9 | 77 | 79 | 50 | 29 | 11 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.Puzzles.Coins
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Test suite for Data.SBV.Examples.Puzzles.Coins
-----------------------------------------------------------------------------
module TestSuite.Puzzles.Coins(tests) where
import Data.SBV.Examples.Puzzles.Coins
import Utils.SBVTestFramework
-- Test suite
tests :: TestTree
tests = testGroup "Puzzles.Coins" [
goldenVsStringShow "coins" coinsPgm
]
where coinsPgm = runSAT $ do cs <- mapM mkCoin [1..6]
mapM_ constrain [c s | s <- combinations cs, length s >= 2, c <- [c1, c2, c3, c4, c5, c6]]
constrain $ bAnd $ zipWith (.>=) cs (tail cs)
output $ sum cs .== 115
| josefs/sbv | SBVTestSuite/TestSuite/Puzzles/Coins.hs | bsd-3-clause | 912 | 0 | 14 | 218 | 189 | 105 | 84 | 10 | 1 |
{-# LANGUAGE OverloadedStrings, LambdaCase, QuasiQuotes #-}
module Transformations.Simplifying.CaseSimplificationSpec where
import Control.Monad
import Data.Monoid hiding (Alt)
import Transformations.Simplifying.CaseSimplification
import Test.Hspec
import Test.QuickCheck hiding (generate)
import Test.Test
import Test.Check
import Grin.Grin
import Grin.TH
import Test.Assertions
runTests :: IO ()
runTests = hspec spec
{-
Before the case simplificaiton the scrutinised value in case expressions are
normally node variables, which the vectorization changes to explicit node values.
-}
{-
After the case simplification, all case expressions will be scrutinising only
basic values (including tag values), and all case patterns will correspondingly
be just basic values. The patterns will not contain (and bind) any variables.
-}
spec :: Spec
spec = do
testExprContextE $ \ctx -> do
it "Example from Figure 4.11" $ do
let before =
[expr|
l1 <- store (CNone)
case (t a1 a2) of
CNil -> pure 3
(CCons x xs) -> store x
store xs
pure 5
|]
let after =
[expr|
l1 <- store (CNone)
case t of
CNil -> pure 3
CCons -> store a1
store a2
pure 5
|]
pending
caseSimplification (ctx before) `sameAs` (ctx after)
forM_ programGenerators $ \(name, gen) -> do
describe name $ do
it "Program size does not change" $ do
pending
-- NOTE: commented out due type error
{-
property $ forAll gen programSizeDoesNotChange
-}
it "Cases with tas as values have tags in their alternatives" $ do
pending
-- NOTE: commented out due type error
{-
property $ forAll gen effectedAlternativesHasOnlyTags
-}
varTagCover :: Exp -> Property -> Property
varTagCover exp =
within 10000000 {-microsecond-} .
cover 1 (getAny $ valuesInCases (Any . isVarTagNode) exp) "Case with VarTagNode"
programSizeDoesNotChange :: Exp -> Property
programSizeDoesNotChange exp = varTagCover exp $ unchangedSize exp $ caseSimplification exp
effectedAlternativesHasOnlyTags :: Exp -> Property
effectedAlternativesHasOnlyTags exp = varTagCover exp $ checkVarTagCases $ caseSimplification exp
isVarTagNode :: Val -> Bool
isVarTagNode = \case
VarTagNode _ _ -> True
_ -> False
unchangedSize :: Exp -> Exp -> Property
unchangedSize before after = property $ programSize before == programSize after
checkVarTagCases :: Exp -> Property
checkVarTagCases = \case
ECase val alts | isVarTagNode val -> mconcat (checkAlt <$> alts)
Program _ defs -> mconcat (checkVarTagCases <$> defs)
Def name params body -> checkVarTagCases body
EBind se lpat exp -> checkVarTagCases se <> checkVarTagCases exp
ECase val alts -> mconcat (checkVarTagCases <$> alts)
SBlock exp -> checkVarTagCases exp
Alt cpat exp -> checkVarTagCases exp
rest -> property True
where
checkAlt :: Exp -> Property
checkAlt (Alt cpat exp) = checkVarTagCases exp <> property (isBasicCPat cpat)
instance Semigroup Property where
p <> q = p .&&. q
instance Monoid Property where
mempty = property True
mconcat ps = conjoin ps
| andorp/grin | grin/test/Transformations/Simplifying/CaseSimplificationSpec.hs | bsd-3-clause | 3,413 | 0 | 17 | 940 | 682 | 343 | 339 | 61 | 8 |
module Graphics.Gnuplot.Execute where
import Graphics.Gnuplot.Private.OS (gnuplotName, )
import System.Exit (ExitCode, )
import System.IO (hPutStr, )
import qualified System.Process as Proc
simple ::
[String] {-^ The lines of the gnuplot script to be piped into gnuplot -}
-> [String] {-^ Options for gnuplot -}
-> IO ExitCode
simple program options =
do -- putStrLn cmd
(inp,_out,_err,pid) <-
Proc.runInteractiveProcess gnuplotName options Nothing Nothing
hPutStr inp (unlines program)
Proc.waitForProcess pid
| kubkon/gnuplot | execute/pipe/Graphics/Gnuplot/Execute.hs | bsd-3-clause | 557 | 0 | 9 | 113 | 137 | 79 | 58 | 15 | 1 |
--------------------------------------------------------------------------------
-- | Call into the Haskell LLVM API to generate LLVM bitcode.
--
module Llvm.CodeOutput where
-- import ErrUtils
-- import Outputable
import Llvm.AbsSyn as AbsSyn
import Llvm.MetaData
import Llvm.Types
import Llvm.TypeConversions
import DynFlags
import Unique
import FastString
import LLVM.General.AST as AST
import qualified LLVM.General.AST.Global as G
import qualified LLVM.General.AST.Constant as C
import qualified LLVM.General.AST.InlineAssembly as IA
import Data.Maybe
--------------------------------------------------------------------------------
-- * Top Level Output functions
--------------------------------------------------------------------------------
-- unused
-- | Output out a whole LLVM module.
outputLlvmModule :: LlvmModule -> DynFlags -> Module
outputLlvmModule (LlvmModule comments aliases meta globals decls funcs) dflags
= Module {
moduleName = "<module-name-here>",
moduleDataLayout = Just (platformToDataLayout (targetPlatform dflags)),
moduleTargetTriple =
Just (platformToTargetTriple (targetPlatform dflags)),
moduleDefinitions = concat [alis, metas, glos, decs, funs]
}
where alis = outputLlvmAliases aliases
metas = outputLlvmMetas meta
glos = outputLlvmGlobals globals
decs = outputLlvmFunctionDecls decls
funs = outputLlvmFunctions funcs
-- | Output out a list of global mutable variable definitions
outputLlvmGlobals :: [LMGlobal] -> [Definition]
outputLlvmGlobals ls = map outputLlvmGlobal ls
-- | Output out a global mutable variable definition
outputLlvmGlobal :: LMGlobal -> Definition
outputLlvmGlobal (LMGlobal var@(LMGlobalVar name ty link sec ali con) dat) =
let section = (Just . unpackFS) =<< sec
alignment = maybe 0 fromIntegral ali
init = case dat of
Just stat -> Just (llvmStaticToConstant stat)
Nothing -> Just (C.Null (llvmTypeToType (pLower ty)))
ty' = case dat of
Just stat -> llvmTypeToType (getStatType stat)
Nothing -> llvmTypeToType (pLower ty)
name' = llvmVarToName var
link' = llvmLinkageTypeToLinkage link
in
if con == Alias then
GlobalDefinition
(globalAliasDefaults {
G.name = name',
G.linkage = link',
G.type' = ty',
G.aliasee = fromJust init
})
else
GlobalDefinition
(globalVariableDefaults {
G.name = name',
G.linkage = link',
G.isConstant = (con == Constant),
G.type' = ty',
G.initializer = init,
G.section = section,
G.alignment = alignment
})
outputLlvmGlobal (LMGlobal var val) =
error "outputLlvmGlobal: Non Global variable output as global."
-- | Output out a list of LLVM type aliases.
outputLlvmAliases :: [LlvmAlias] -> [Definition]
outputLlvmAliases alis = map outputLlvmAlias alis
-- | Output out an LLVM type alias.
outputLlvmAlias :: LlvmAlias -> Definition
outputLlvmAlias (name, ty) =
TypeDefinition (mkName name) (Just (llvmTypeToType ty))
-- | Output out a list of LLVM metadata.
outputLlvmMetas :: [MetaDecl] -> [Definition]
outputLlvmMetas metas = map (outputLlvmMeta ) metas
-- | Output out an LLVM metadata definition
outputLlvmMeta :: MetaDecl -> Definition
outputLlvmMeta (MetaUnamed n m) =
MetadataNodeDefinition (MetadataNodeID (fromIntegral n))
[(Just (outputLlvmMetaExpr m))]
outputLlvmMeta (MetaNamed n m) =
NamedMetadataDefinition (unpackFS n)
(map (MetadataNodeID . fromIntegral) m)
-- | Output an LLVM metadata value.
outputLlvmMetaExpr :: MetaExpr -> Operand
outputLlvmMetaExpr = metaExprToOperand
-- | Output out a list of function definitions.
outputLlvmFunctions :: LlvmFunctions -> [Definition]
outputLlvmFunctions funcs = map outputLlvmFunction funcs
-- | Output out a function definition.
-- body = [LlvmBlock] = [LlvmBlock {LlvmBlockId [LlvmStatement]}]
outputLlvmFunction :: LlvmFunction -> Definition
outputLlvmFunction
(LlvmFunction dec@(LlvmFunctionDecl name link cc retTy vArgs params ali)
args attrs sec body)
=
let baseDecl = outputLlvmFunctionDeclBase dec
argNames = map (Left . unpackFS) args
parameters = if (length argNames) == (length params)
then zipWith llvmParameterToNamedParameter
params argNames
else
error $ "outputLlvmFunction: Number of arg names" ++
" supplied does not match type signature."
in GlobalDefinition $
baseDecl {
G.parameters = (parameters, vArgs == VarArgs),
G.functionAttributes = map llvmFuncAttrToFunctionAttribute attrs,
G.section = (Just . unpackFS) =<< sec,
G.basicBlocks = outputLlvmBlocks body
}
-- | Output out a list of function declaration.
outputLlvmFunctionDecls :: LlvmFunctionDecls -> [Definition]
outputLlvmFunctionDecls decs = map outputLlvmFunctionDecl decs
-- | Output out a function declaration.
-- Declarations define the function type but don't define the actual body of
-- the function.
outputLlvmFunctionDecl :: LlvmFunctionDecl -> Definition
outputLlvmFunctionDecl dec = GlobalDefinition (outputLlvmFunctionDeclBase dec)
-- | Output a function declaration, but don't wrap it as a Definition
outputLlvmFunctionDeclBase :: LlvmFunctionDecl -> Global
outputLlvmFunctionDeclBase
(LlvmFunctionDecl name link cc retTy vArgs params ali)
=
let ali' = maybe 0 fromIntegral ali
-- Function declarations have no argument names,
-- we only care about the types here.
parameters = zipWith llvmParameterToNamedParameter
params (repeat (Left ""))
in functionDefaults {
G.linkage = llvmLinkageTypeToLinkage link,
G.callingConvention = llvmCallConventionToCallingConvention cc,
G.returnType = llvmTypeToType retTy,
G.name = mkName name,
G.parameters = (parameters, vArgs == VarArgs),
G.alignment = ali'
}
-- | Output out a list of LLVM blocks.
outputLlvmBlocks :: LlvmBlocks -> [BasicBlock]
outputLlvmBlocks blocks = map outputLlvmBlock blocks
partitionEithers :: [Either a b] -> ([a], [b])
partitionEithers [] = ([], [])
partitionEithers ((Left x):zs) =
let (xs, ys) = partitionEithers zs in (x:xs, ys)
partitionEithers ((Right y):zs) =
let (xs, ys) = partitionEithers zs in (xs, y:ys)
head' :: [a] -> a
head' [x] = x
head' _ = error "Fatal error in head'"
-- | Output out an LLVM block.
-- It must be part of a function definition.
outputLlvmBlock :: LlvmBlock -> BasicBlock
outputLlvmBlock (LlvmBlock blockId stmts) =
BasicBlock name instrs (head' terminator)
where
name = Name (show blockId)
-- terminator had better be a singleton list here,
-- else the block is invalid
(instrs, terminator) =
partitionEithers (map outputLlvmStatement stmts)
{- let isLabel (MkLabel _) = True
isLabel _ = False
(block, rest) = break isLabel stmts
outputRest = case rest of
(MkLabel id):xs -> outputLlvmBlock (LlvmBlock id xs)
_ -> ()
in do mapM_ outputLlvmStatement block
outputRest
-}
-- | Output out an LLVM block label.
--outputLlvmBlockLabel :: LlvmBlockId -> Name
--outputLlvmBlockLabel blockId = Name (show blockId)
-- | Output an LLVM statement.
outputLlvmStatement :: LlvmStatement ->
Either (Named Instruction) (Named Terminator)
outputLlvmStatement stmt =
case stmt of
MetaStmt meta s -> outputMetaStatement meta s
_ -> outputMetaStatement [] stmt
-- Output an LLVM statement with metadata annotations.
-- By making instructions and terminators named, we are able to do assignments.
outputMetaStatement :: [MetaAnnot] -> LlvmStatement ->
Either (Named Instruction) (Named Terminator)
outputMetaStatement meta stmt =
case stmt of
Assignment dst expr -> Left $ outputAssignment dst expr meta
AbsSyn.Fence st ord -> Left $ outputFence st ord meta
Branch target -> Right $ outputBranch target meta
BranchIf cond ifT ifF -> Right $ outputBranchIf cond ifT ifF meta
-- We don't need comments
Comment comments ->
error "outputMetaStatement: Can't generate comments."
-- We don't need labels either
MkLabel label ->
error "outputMetaStatement: Can't generate comments."
AbsSyn.Store value ptr -> Left $ outputStore value ptr meta
AbsSyn.Switch scrut def tgs -> Right $ outputSwitch scrut def tgs meta
Return result -> Right $ outputReturn result meta
Expr expr -> Left $ outputMetaExpr meta expr
AbsSyn.Unreachable ->
Right $ Do (AST.Unreachable (outputMetaAnnots meta)) -- T
Nop -> error "NOP generated as a statement"
MetaStmt meta s -> outputMetaStatement meta s
-- | Output an LLVM expression.
outputLlvmExpression :: LlvmExpression -> Named Instruction
outputLlvmExpression expr
= case expr of
MExpr meta e -> outputMetaExpr meta e
_ -> outputMetaExpr [] expr
outputMetaExpr :: [MetaAnnot] -> LlvmExpression -> Named Instruction
outputMetaExpr meta expr =
case expr of
AbsSyn.Alloca tp amount -> outputAlloca tp amount meta
LlvmOp op left right -> outputLlvmMachOp op left right meta
AbsSyn.Call tp fp args attrs ->
outputCall tp fp (map MetaVar args) attrs meta
CallM tp fp args attrs -> outputCall tp fp args attrs meta
Cast op from to -> outputCast op from to meta
Compare op left right -> outputCmpOp op left right meta
Extract vec idx -> outputExtract vec idx meta
Insert vec elt idx -> outputInsert vec elt idx meta
GetElemPtr inb ptr indexes -> outputGetElementPtr inb ptr indexes meta
AbsSyn.Load ptr -> outputLoad ptr meta
Malloc tp amount -> outputMalloc tp amount meta
AbsSyn.Phi tp precessors -> outputPhi tp precessors meta
Asm asm c ty v se sk ->
error "outputMetaExpr: Assembly not used"
MExpr meta e -> outputMetaExpr meta e
--------------------------------------------------------------------------------
-- * Individual print functions
--------------------------------------------------------------------------------
-- | Should always be a function pointer. So a global var of function type
-- (since globals are always pointers) or a local var of pointer function type.
outputCall :: LlvmCallType -> LlvmVar -> [MetaExpr] -> [LlvmFuncAttr] ->
[MetaAnnot] -> Named Instruction
outputCall ct fptr args attrs metas =
case fptr of
-- if local var function pointer, unwrap
LMLocalVar _ (LMPointer (LMFunction d)) -> ppCall' d
-- should be function type otherwise
LMGlobalVar _ (LMFunction d) _ _ _ _ -> ppCall' d
-- not pointer or function, so error
_other -> error $ "outputCall called with non LMFunction type!\nMust be "
++ " called with either global var of function type or "
++ "local var of pointer function type."
where
ppCall' decl@(LlvmFunctionDecl name _ cc ret varargs params _) =
{- IGNORED:
- map fst params (arg types)
- varargs
- Function type, including lifting to ptr type -}
let tc = ct == TailCall
cc' = llvmCallConventionToCallingConvention cc
args' = map outputLlvmMetaExpr args
pattrs =
map (map llvmParamAttrToParameterAttribute . snd) params
attrs' = map llvmFuncAttrToFunctionAttribute attrs
metas' = outputMetaAnnots metas
in Do $ AST.Call { isTailCall = tc,
callingConvention = cc',
returnAttributes = [],
function = Right (llvmVarToOperand fptr),
arguments = zip args' pattrs,
functionAttributes = attrs',
metadata = metas'
}
outputLlvmMachOp :: LlvmMachOp -> LlvmVar -> LlvmVar ->
[MetaAnnot] -> Named Instruction
outputLlvmMachOp op left right metas =
Do $
(case op of
LM_MO_Add -> Add False False left' right' metas'
LM_MO_Sub -> Sub False False left' right' metas'
LM_MO_Mul -> Mul False False left' right' metas'
LM_MO_UDiv -> UDiv False left' right' metas'
LM_MO_SDiv -> SDiv False left' right' metas'
LM_MO_URem -> URem left' right' metas'
LM_MO_SRem -> SRem left' right' metas'
LM_MO_FAdd -> FAdd left' right' metas'
LM_MO_FSub -> FSub left' right' metas'
LM_MO_FMul -> FMul left' right' metas'
LM_MO_FDiv -> FDiv left' right' metas'
LM_MO_FRem -> FRem left' right' metas'
LM_MO_Shl -> Shl False False left' right' metas'
LM_MO_LShr -> LShr False left' right' metas'
LM_MO_AShr -> AShr False left' right' metas'
LM_MO_And -> And left' right' metas'
LM_MO_Or -> Or left' right' metas'
LM_MO_Xor -> Xor left' right' metas')
where left' = llvmVarToOperand left
right' = llvmVarToOperand right
metas' = outputMetaAnnots metas
outputCmpOp :: LlvmCmpOp -> LlvmVar -> LlvmVar ->
[MetaAnnot] -> Named Instruction
outputCmpOp op left right metas =
let
left' = llvmVarToOperand left
right' = llvmVarToOperand right
lty = getVarType left
rty = getVarType right
metas' = outputMetaAnnots metas
in if isInt lty && isInt rty
then Do $ ICmp ((fromJust . llvmCmpOpToIntegerPredicate) op)
left' right' metas'
else if isFloat lty && isFloat rty
then Do $ FCmp ((fromJust . llvmCmpOpToFloatingPointPredicate) op)
left' right' metas'
else error $
"outputCmpOp: Cannot compare incomparable types " ++
show lty ++ ", " ++ show rty
outputAssignment :: LlvmVar -> LlvmExpression -> [MetaAnnot] ->
Named Instruction
outputAssignment var expr metas =
case outputLlvmExpression (MExpr metas expr) of
Do expr' -> (llvmVarToName var) := expr'
_ -> error "Named expression must be a 'Do'"
outputSyncOrdering :: LlvmSyncOrdering -> MemoryOrdering
outputSyncOrdering SyncUnord = Unordered
outputSyncOrdering SyncMonotonic = Monotonic
outputSyncOrdering SyncAcquire = Acquire
outputSyncOrdering SyncRelease = Release
outputSyncOrdering SyncAcqRel = AcquireRelease
outputSyncOrdering SyncSeqCst = SequentiallyConsistent
-- The st (single-thread) boolean might need to be negated.
outputFence :: Bool -> LlvmSyncOrdering -> [MetaAnnot] -> Named Instruction
outputFence st ord metas = Do $ AST.Fence atom metas'
where atom = Atomicity st (outputSyncOrdering ord)
metas' = outputMetaAnnots metas
-- XXX: On x86, vector types need to be 16-byte aligned for aligned access, but
-- we have no way of guaranteeing that this is true with GHC (we would need to
-- modify the layout of the stack and closures, change the storage manager,
-- etc.). So, we blindly tell LLVM that *any* vector store or load could be
-- unaligned. In the future we may be able to guarantee that certain vector
-- access patterns are aligned, in which case we will need a more granular way
-- of specifying alignment.
outputLoad :: LlvmVar -> [MetaAnnot] -> Named Instruction
outputLoad var metas
-- We say the load is non-volatile and non-atomic.
| isVecPtrVar var = Do $ AST.Load False op Nothing 1 metas'
| otherwise = Do $ AST.Load False op Nothing 0 metas'
where
isVecPtrVar = isVector . pLower . getVarType
op = llvmVarToOperand var
metas' = outputMetaAnnots metas
outputStore :: LlvmVar -> LlvmVar -> [MetaAnnot] -> Named Instruction
outputStore val dst metas
-- We say the store is non-volatile and non-atomic.
| isVecPtrVar dst = Do $ AST.Store False dstOp valOp Nothing 1 metas'
| otherwise = Do $ AST.Store False dstOp valOp Nothing 0 metas'
where
isVecPtrVar :: LlvmVar -> Bool
isVecPtrVar = isVector . pLower . getVarType
dstOp = llvmVarToOperand dst
valOp = llvmVarToOperand val
metas' = outputMetaAnnots metas
outputCast :: LlvmCastOp -> LlvmVar -> LlvmType ->
[MetaAnnot] -> Named Instruction
outputCast op var ty metas =
Do $ (case op of
LM_Trunc -> Trunc operand ty' metas'
LM_Zext -> ZExt operand ty' metas'
LM_Sext -> SExt operand ty' metas'
LM_Fptrunc -> FPTrunc operand ty' metas'
LM_Fpext -> FPToUI operand ty' metas'
LM_Fptoui -> FPToUI operand ty' metas'
LM_Fptosi -> FPToSI operand ty' metas'
LM_Uitofp -> UIToFP operand ty' metas'
LM_Sitofp -> SIToFP operand ty' metas'
LM_Ptrtoint -> PtrToInt operand ty' metas'
LM_Inttoptr -> IntToPtr operand ty' metas'
LM_Bitcast -> BitCast operand ty' metas')
where
operand = llvmVarToOperand var
ty' = llvmTypeToType ty
metas' = outputMetaAnnots metas
-- As of LLVM 3.0, malloc is no longer an instruction of the LLVM IR.
outputMalloc :: LlvmType -> Int -> [MetaAnnot] -> Named Instruction --'done'
outputMalloc tp amount metas = error "malloc not implemented"
outputAlloca :: LlvmType -> Int -> [MetaAnnot] -> Named Instruction
outputAlloca ty amount metas = Do $ AST.Alloca ty' (Just numElems) 0 metas'
where ty' = llvmTypeToType ty
-- The number of elements of type ty' to allocate space for
numElems = ConstantOperand (C.Int 32 (toInteger amount))
metas' = outputMetaAnnots metas
outputGetElementPtr :: Bool -> LlvmVar -> [LlvmVar] ->
[MetaAnnot] -> Named Instruction
outputGetElementPtr inb ptr idx metas = Do $ GetElementPtr inb ptr' idx' metas'
where ptr' = llvmVarToOperand ptr
idx' = map (llvmVarToOperand ) idx
metas' = outputMetaAnnots metas
outputReturn :: Maybe LlvmVar -> [MetaAnnot] -> Named Terminator
outputReturn var metas = Do $ Ret var' metas'
where var' = (Just . llvmVarToOperand) =<< var
metas' = outputMetaAnnots metas
-- Unconditional branch to target
outputBranch :: LlvmVar -> [MetaAnnot] -> Named Terminator
outputBranch var metas = Do $ Br name metas'
where name = llvmVarToName var
metas' = outputMetaAnnots metas
outputBranchIf :: LlvmVar -> LlvmVar -> LlvmVar ->
[MetaAnnot] -> Named Terminator
outputBranchIf cond trueT falseT metas =
Do $ CondBr cond' trueT' falseT' metas'
where cond' = llvmVarToOperand cond
trueT' = llvmVarToName trueT
falseT' = llvmVarToName falseT
metas' = outputMetaAnnots metas
outputPhi :: LlvmType -> [(LlvmVar,LlvmVar)] -> [MetaAnnot] -> Named Instruction
outputPhi ty preds metas = Do $ AST.Phi ty' preds' metas'
where ty' = llvmTypeToType ty
preds' = map (\(op,name) -> (llvmVarToOperand op, llvmVarToName name)) preds
errStr = concat $ map (\(op,name) -> show op) preds
metas' = outputMetaAnnots metas
outputSwitch :: LlvmVar -> LlvmVar -> [(LlvmVar,LlvmVar)] ->
[MetaAnnot] -> Named Terminator
outputSwitch op dflt targets metas = Do $ AST.Switch op' dflt' targets' metas'
where op' = llvmVarToOperand op
dflt' = llvmVarToName dflt
targets' =
map (\(con, name) -> (llvmVarToConstant con, llvmVarToName name))
targets
metas' = outputMetaAnnots metas
outputAsm :: LMString -> LMString -> LlvmType -> [LlvmVar] ->
Bool -> Bool -> IA.InlineAssembly
outputAsm asm constraints rty vars sideeffect alignstack =
IA.InlineAssembly {
IA.type' = llvmTypeToType rty,
IA.assembly = unpackFS asm,
IA.constraints = unpackFS constraints,
IA.hasSideEffects = sideeffect,
IA.alignStack= alignstack,
IA.dialect = IA.ATTDialect
}
-- Get a value from a vector
outputExtract :: LlvmVar -> LlvmVar -> [MetaAnnot] -> Named Instruction
outputExtract vec idx metas = Do $ ExtractElement vec' idx' metas'
where vec' = llvmVarToOperand vec
idx' = llvmVarToOperand idx
metas' = outputMetaAnnots metas
-- Insert a value into a vector
outputInsert :: LlvmVar -> LlvmVar -> LlvmVar ->
[MetaAnnot] -> Named Instruction
outputInsert vec elt idx metas = Do $ InsertElement vec' elt' idx' metas'
where vec' = llvmVarToOperand vec
elt' = llvmVarToOperand elt
idx' = llvmVarToOperand idx
metas' = outputMetaAnnots metas
outputMetaAnnots :: [MetaAnnot] -> InstructionMetadata
outputMetaAnnots metas = (concat . map (outputMetaAnnot )) metas
outputMetaAnnot :: MetaAnnot -> InstructionMetadata
outputMetaAnnot (MetaAnnot str expr) =
[(unpackFS str, metaExprToMetadataNode expr)] | a-ford/notghc | Llvm/CodeOutput.hs | bsd-3-clause | 22,098 | 0 | 18 | 6,384 | 4,929 | 2,533 | 2,396 | 370 | 18 |
{-|
Module : Preliminaries
Copyright : © Yghor Kerscher, 2016
Licence : BSD-3
Maintainer : [email protected]
Stability : experimental
The Haskell Report specifies the <https://www.haskell.org/onlinereport/standard-prelude.html Prelude> with a minimal amount of definitions that are always available in scope for application writers. Due to its simplicity and frugality, multiple alternatives and support libraries were devised to improve upon it, including:
* <https://github.com/snoyberg/mono-traversable/tree/master/classy-prelude classy-prelude>
* <https://github.com/nikita-volkov/base-prelude base-prelude>
* <https://github.com/snoyberg/basic-prelude basic-prelude>
* <https://github.com/ekmett/prelude-extras prelude-extras>
* <https://github.com/sdiehl/protolude protolude>
@Preliminaries@ is one of such alternatives and builds upon <https://hackage.haskell.org/package/classy-prelude-0.12.8 classy-prelude>, with the following functionality out-of-the-box:
* Data manipulation — i.e. <https://github.com/aelve/microlens microlens>
* Streaming
* Concurrency
* Parallelism
* Read-only, write-only and read-write environments — i.e. <https://github.com/ekmett/mtl mtl>
To use it, put the following in your @.cabal@ file, ignoring the “…” for omited parts:
@
…
default-extensions: NoImplicitPrelude
build-depends: preliminaries >= 0.1.6 < 1
@
And on each file, add @import Preliminaries@.
You might also want to look at this project’s Cabal file to check on useful GHC extensions to enable alongside this change.
In case something does not build or you find other unpleasant aspects of the library, please contact the maintainer.
-}
module Preliminaries
( -- * Data manipulation
{- |
Lenses provide unified and first-class means to access and modify data structures. 'Lens.Micro.Platform', included here, provides a lightweight alternative to the much larger 'Control.Lens' module, while remaining for the most part compatible.
Use <http://hackage.haskell.org/package/lens-tutorial-1.0.1/docs/Control-Lens-Tutorial.html this tutorial> as an introduction, minding the slightly different module names.
-}
module Lens.Micro.Platform
, module Lens.Micro.Contra
-- * Concurrency
{- |
Structure programs so that different threads are controlled independently. Whenever you need to have distinct functionality happening “at the same time”, you probably want to use the functionality here. The core 'Async' functionality is provided by 'ClassyPrelude'. Modules below provide helpers to execute things asynchronously in streaming 'Conduit's and a transactional queue to transfer data between threads.
-}
, module Data.Conduit.Async
, module Data.Conduit.TQueue
-- * Parallelism
{- |
Using multiple available resources in a device to compute a result is what parallelism is about. Whenever you want to chop your data so that many cores calculate parts of it and bring about a result, you want what the imports here.
'Control.Monad.Par' provides fine-grained control, while 'Control.Monad.Parallel' provides a simple interface to create 'Control.Parallel.Strategies' to parallelise execution. In general it's easier to start with 'Parallel' and switch to 'Par' when more control is needed.
Since the names used by both modules are similar, this module prefixes `par` to all 'Control.Monad.Par' functions that would conflict with 'Control.Parallel'.
-}
, module Control.Monad.Par
, parFork
, parNew
, parNewFull
, parGet
, parPut
, parSpawn
, parParMap
, module Control.Monad.Parallel
, module Control.Parallel
, module Control.Parallel.Strategies
, thru
-- * Environments
{- |
If your programs end up repeatedly passing parameters around for configuration, state or logging, you will benefit from the monads below.
- 'Control.Reader' provides a read-only environment, useful to ensure configuration invariants are kept.
- 'Control.State' helps deal with scenarios where a variable is passed around many functions to “update” its state.
- 'Control.Writer' provides a write-only environment, useful for logging and auditing purposes.
-}
, module Control.Monad.Reader
, module Control.Monad.State.Lazy
, module Control.Monad.Writer.Lazy
-- * System interface
{- |
Terminate your programs with 'exitFailure' or 'exitSuccess'.
You should ensure any scarce resources that outlive program termination are freed with appropriate 'Control.Exception.Safe' functions such as 'onException', 'bracket', 'bracket_', 'finally', 'withException', 'bracketOnError' or 'bracketOnError_'.
-}
, module System.Environment
, getEnvironmentMap
, module System.Exit
-- * Re-exports
, module ClassyPrelude.Conduit
, module Data.Biapplicative
, module Data.Bifoldable
, module Data.Bitraversable
, module Data.MonoTraversable.Instances
, module Data.Default
, module Data.String.Conversions
-- * Utilities
, type ($)
)
where
import Control.Monad.Reader (MonadReader, ask, asks, ReaderT (..), Reader, runReaderT, runReader)
import Control.Monad.State.Lazy (MonadState, get, put, modify, StateT(..), State, runStateT, runState)
import Control.Monad.Writer.Lazy (MonadWriter, tell, listen, listens, WriterT(..), Writer, runWriterT, runWriter)
import Control.Monad.Par as Par
import Control.Monad.Par.Class (ParFuture)
import Control.Monad.Par (Par, runPar, runParIO, IVar, parMapM, parMapReduceRange, InclusiveRange(..), parFor)
import Control.Monad.Parallel (MonadFork, forkExec)
import Control.Parallel
import Control.Parallel.Strategies as Strategies
import Control.Parallel.Strategies
( Strategy, withStrategy
, rseq, rdeepseq
, rpar, rparWith
, evalTraversable, parTraversable, parMap
, Eval, runEval
)
import ClassyPrelude.Conduit
import Data.Biapplicative (Biapplicative, bipure, (<<*>>))
import Data.Bifoldable (Bifoldable, bifoldr, bifold, bifoldMap, bitraverse_, bisequenceA_, bifor_)
import Data.Bitraversable (Bitraversable, bitraverse, bisequenceA, bifor)
import Data.Conduit.Async
import Data.Conduit.TQueue
import Data.Default
import Data.MonoTraversable.Instances ()
import Data.String.Conversions (ConvertibleStrings, cs)
import Lens.Micro.Platform
import Lens.Micro.Contra
import qualified System.Environment as SE
import System.Environment (getEnv, lookupEnv, setEnv, unsetEnv)
import System.Exit (exitFailure, exitSuccess)
parFork :: Par () -> Par ()
parFork = Par.fork
parNew :: Par (IVar a)
parNew = Par.new
parNewFull :: NFData a => a -> Par (IVar a)
parNewFull = Par.newFull
parGet :: IVar a -> Par a
parGet = Par.get
parPut :: NFData a => IVar a -> a -> Par ()
parPut = Par.put
parSpawn :: NFData a => Par a -> Par (IVar a)
parSpawn = Par.spawn
parParMap :: (Traversable t, NFData b, ParFuture iv p) => (a -> b) -> t a -> p (t b)
parParMap = Par.parMap
-- | A synonym for 'Strategies.using'.
thru :: a -> Strategy a -> a
x `thru` strat = x `Strategies.using` strat
-- | Retrieves the current list of environment variables as a 'Map' of keys for variable names and values for current assignment of each variable.
--
-- This is a single action. If you need to keep this structure in sync with system environment, it's your responsibility to call it again. Consider either calling a specific variable with 'getEnv' when you need it, or keep this structure in a 'TVar' and refresh it manually.
getEnvironmentMap :: IO (Map String String)
getEnvironmentMap = SE.getEnvironment >>= pure . mapFromList
-- | This allows you to avoid parentheses in type declarations:
--
-- > f :: h (g (f a b)) -> g (h (f a b))
-- > f :: h $ g $ f a b -> g $ h $ f a b
type f $ x = f x
| kerscher/preliminaries | source/Preliminaries.hs | bsd-3-clause | 7,637 | 0 | 10 | 1,156 | 901 | 562 | 339 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.BitVectors.Model
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Instance declarations for our symbolic world
-----------------------------------------------------------------------------
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE Rank2Types #-}
module Data.SBV.BitVectors.Model (
Mergeable(..), EqSymbolic(..), OrdSymbolic(..), SDivisible(..), Uninterpreted(..), SIntegral
, ite, iteLazy, sBranch, sbvTestBit, sbvPopCount, setBitTo, sbvShiftLeft, sbvShiftRight, sbvSignedShiftArithRight
, sbvRotateLeft, sbvRotateRight, mkUninterpreted
, allEqual, allDifferent, inRange, sElem, oneIf, blastBE, blastLE, fullAdder, fullMultiplier
, lsb, msb, genVar, genVar_, forall, forall_, exists, exists_
, constrain, pConstrain, sBool, sBools, sWord8, sWord8s, sWord16, sWord16s, sWord32
, sWord32s, sWord64, sWord64s, sInt8, sInt8s, sInt16, sInt16s, sInt32, sInt32s, sInt64
, sInt64s, sInteger, sIntegers, sReal, sReals, toSReal, sFloat, sFloats, sDouble, sDoubles, slet
, fusedMA
, liftQRem, liftDMod
)
where
import Control.Monad (when, liftM)
import Data.Array (Array, Ix, listArray, elems, bounds, rangeSize)
import Data.Bits (Bits(..))
import Data.Int (Int8, Int16, Int32, Int64)
import Data.List (genericLength, genericIndex, unzip4, unzip5, unzip6, unzip7, intercalate)
import Data.Maybe (fromMaybe)
import Data.Word (Word8, Word16, Word32, Word64)
import Test.QuickCheck (Testable(..), Arbitrary(..))
import qualified Test.QuickCheck as QC (whenFail)
import qualified Test.QuickCheck.Monadic as QC (monadicIO, run)
import System.Random
import Data.SBV.BitVectors.AlgReals
import Data.SBV.BitVectors.Data
import Data.SBV.Utils.Boolean
-- The following two imports are only needed because of the doctest expressions we have. Sigh..
-- It might be a good idea to reorg some of the content to avoid this.
import Data.SBV.Provers.Prover (isSBranchFeasibleInState, isVacuous, prove)
import Data.SBV.SMT.SMT (ThmResult)
-- | Newer versions of GHC (Starting with 7.8 I think), distinguishes between FiniteBits and Bits classes.
-- We should really use FiniteBitSize for SBV which would make things better. In the interim, just work
-- around pesky warnings..
ghcBitSize :: Bits a => a -> Int
#if __GLASGOW_HASKELL__ >= 708
ghcBitSize x = maybe (error "SBV.ghcBitSize: Unexpected non-finite usage!") id (bitSizeMaybe x)
#else
ghcBitSize = bitSize
#endif
noUnint :: String -> a
noUnint x = error $ "Unexpected operation called on uninterpreted value: " ++ show x
noUnint2 :: String -> String -> a
noUnint2 x y = error $ "Unexpected binary operation called on uninterpreted values: " ++ show (x, y)
liftSym1 :: (State -> Kind -> SW -> IO SW) -> (AlgReal -> AlgReal) -> (Integer -> Integer) -> (Float -> Float) -> (Double -> Double) -> SBV b -> SBV b
liftSym1 _ opCR opCI opCF opCD (SBV k (Left a)) = SBV k $ Left $ mapCW opCR opCI opCF opCD noUnint a
liftSym1 opS _ _ _ _ a@(SBV k _) = SBV k $ Right $ cache c
where c st = do swa <- sbvToSW st a
opS st k swa
liftSW2 :: (State -> Kind -> SW -> SW -> IO SW) -> Kind -> SBV a -> SBV b -> Cached SW
liftSW2 opS k a b = cache c
where c st = do sw1 <- sbvToSW st a
sw2 <- sbvToSW st b
opS st k sw1 sw2
liftSym2 :: (State -> Kind -> SW -> SW -> IO SW) -> (CW -> CW -> Bool) -> (AlgReal -> AlgReal -> AlgReal) -> (Integer -> Integer -> Integer) -> (Float -> Float -> Float) -> (Double -> Double -> Double) -> SBV b -> SBV b -> SBV b
liftSym2 _ okCW opCR opCI opCF opCD (SBV k (Left a)) (SBV _ (Left b)) | okCW a b = SBV k $ Left $ mapCW2 opCR opCI opCF opCD noUnint2 a b
liftSym2 opS _ _ _ _ _ a@(SBV k _) b = SBV k $ Right $ liftSW2 opS k a b
liftSym2B :: (State -> Kind -> SW -> SW -> IO SW) -> (CW -> CW -> Bool) -> (AlgReal -> AlgReal -> Bool) -> (Integer -> Integer -> Bool) -> (Float -> Float -> Bool) -> (Double -> Double -> Bool) -> SBV b -> SBV b -> SBool
liftSym2B _ okCW opCR opCI opCF opCD (SBV _ (Left a)) (SBV _ (Left b)) | okCW a b = literal (liftCW2 opCR opCI opCF opCD noUnint2 a b)
liftSym2B opS _ _ _ _ _ a b = SBV KBool $ Right $ liftSW2 opS KBool a b
liftSym1Bool :: (State -> Kind -> SW -> IO SW) -> (Bool -> Bool) -> SBool -> SBool
liftSym1Bool _ opC (SBV _ (Left a)) = literal $ opC $ cwToBool a
liftSym1Bool opS _ a = SBV KBool $ Right $ cache c
where c st = do sw <- sbvToSW st a
opS st KBool sw
liftSym2Bool :: (State -> Kind -> SW -> SW -> IO SW) -> (Bool -> Bool -> Bool) -> SBool -> SBool -> SBool
liftSym2Bool _ opC (SBV _ (Left a)) (SBV _ (Left b)) = literal (cwToBool a `opC` cwToBool b)
liftSym2Bool opS _ a b = SBV KBool $ Right $ cache c
where c st = do sw1 <- sbvToSW st a
sw2 <- sbvToSW st b
opS st KBool sw1 sw2
mkSymOpSC :: (SW -> SW -> Maybe SW) -> Op -> State -> Kind -> SW -> SW -> IO SW
mkSymOpSC shortCut op st k a b = maybe (newExpr st k (SBVApp op [a, b])) return (shortCut a b)
mkSymOp :: Op -> State -> Kind -> SW -> SW -> IO SW
mkSymOp = mkSymOpSC (const (const Nothing))
mkSymOp1SC :: (SW -> Maybe SW) -> Op -> State -> Kind -> SW -> IO SW
mkSymOp1SC shortCut op st k a = maybe (newExpr st k (SBVApp op [a])) return (shortCut a)
mkSymOp1 :: Op -> State -> Kind -> SW -> IO SW
mkSymOp1 = mkSymOp1SC (const Nothing)
-- Symbolic-Word class instances
-- | Generate a finite symbolic bitvector, named
genVar :: (Random a, SymWord a) => Maybe Quantifier -> Kind -> String -> Symbolic (SBV a)
genVar q k = mkSymSBV q k . Just
-- | Generate a finite symbolic bitvector, unnamed
genVar_ :: (Random a, SymWord a) => Maybe Quantifier -> Kind -> Symbolic (SBV a)
genVar_ q k = mkSymSBV q k Nothing
-- | Generate a finite constant bitvector
genLiteral :: Integral a => Kind -> a -> SBV b
genLiteral k = SBV k . Left . mkConstCW k
-- | Convert a constant to an integral value
genFromCW :: Integral a => CW -> a
genFromCW (CW _ (CWInteger x)) = fromInteger x
genFromCW c = error $ "genFromCW: Unsupported non-integral value: " ++ show c
-- | Generically make a symbolic var
genMkSymVar :: (Random a, SymWord a) => Kind -> Maybe Quantifier -> Maybe String -> Symbolic (SBV a)
genMkSymVar k mbq Nothing = genVar_ mbq k
genMkSymVar k mbq (Just s) = genVar mbq k s
instance SymWord Bool where
mkSymWord = genMkSymVar KBool
literal x = genLiteral KBool (if x then (1::Integer) else 0)
fromCW = cwToBool
mbMaxBound = Just maxBound
mbMinBound = Just minBound
instance SymWord Word8 where
mkSymWord = genMkSymVar (KBounded False 8)
literal = genLiteral (KBounded False 8)
fromCW = genFromCW
mbMaxBound = Just maxBound
mbMinBound = Just minBound
instance SymWord Int8 where
mkSymWord = genMkSymVar (KBounded True 8)
literal = genLiteral (KBounded True 8)
fromCW = genFromCW
mbMaxBound = Just maxBound
mbMinBound = Just minBound
instance SymWord Word16 where
mkSymWord = genMkSymVar (KBounded False 16)
literal = genLiteral (KBounded False 16)
fromCW = genFromCW
mbMaxBound = Just maxBound
mbMinBound = Just minBound
instance SymWord Int16 where
mkSymWord = genMkSymVar (KBounded True 16)
literal = genLiteral (KBounded True 16)
fromCW = genFromCW
mbMaxBound = Just maxBound
mbMinBound = Just minBound
instance SymWord Word32 where
mkSymWord = genMkSymVar (KBounded False 32)
literal = genLiteral (KBounded False 32)
fromCW = genFromCW
mbMaxBound = Just maxBound
mbMinBound = Just minBound
instance SymWord Int32 where
mkSymWord = genMkSymVar (KBounded True 32)
literal = genLiteral (KBounded True 32)
fromCW = genFromCW
mbMaxBound = Just maxBound
mbMinBound = Just minBound
instance SymWord Word64 where
mkSymWord = genMkSymVar (KBounded False 64)
literal = genLiteral (KBounded False 64)
fromCW = genFromCW
mbMaxBound = Just maxBound
mbMinBound = Just minBound
instance SymWord Int64 where
mkSymWord = genMkSymVar (KBounded True 64)
literal = genLiteral (KBounded True 64)
fromCW = genFromCW
mbMaxBound = Just maxBound
mbMinBound = Just minBound
instance SymWord Integer where
mkSymWord = genMkSymVar KUnbounded
literal = SBV KUnbounded . Left . mkConstCW KUnbounded
fromCW = genFromCW
mbMaxBound = Nothing
mbMinBound = Nothing
instance SymWord AlgReal where
mkSymWord = genMkSymVar KReal
literal = SBV KReal . Left . CW KReal . CWAlgReal
fromCW (CW _ (CWAlgReal a)) = a
fromCW c = error $ "SymWord.AlgReal: Unexpected non-real value: " ++ show c
-- AlgReal needs its own definition of isConcretely
-- to make sure we avoid using unimplementable Haskell functions
isConcretely (SBV KReal (Left (CW KReal (CWAlgReal v)))) p
| isExactRational v = p v
isConcretely _ _ = False
mbMaxBound = Nothing
mbMinBound = Nothing
instance SymWord Float where
mkSymWord = genMkSymVar KFloat
literal = SBV KFloat . Left . CW KFloat . CWFloat
fromCW (CW _ (CWFloat a)) = a
fromCW c = error $ "SymWord.Float: Unexpected non-float value: " ++ show c
-- For Float, we conservatively return 'False' for isConcretely. The reason is that
-- this function is used for optimizations when only one of the argument is concrete,
-- and in the presence of NaN's it would be incorrect to do any optimization
isConcretely _ _ = False
mbMaxBound = Nothing
mbMinBound = Nothing
instance SymWord Double where
mkSymWord = genMkSymVar KDouble
literal = SBV KDouble . Left . CW KDouble . CWDouble
fromCW (CW _ (CWDouble a)) = a
fromCW c = error $ "SymWord.Double: Unexpected non-double value: " ++ show c
-- For Double, we conservatively return 'False' for isConcretely. The reason is that
-- this function is used for optimizations when only one of the argument is concrete,
-- and in the presence of NaN's it would be incorrect to do any optimization
isConcretely _ _ = False
mbMaxBound = Nothing
mbMinBound = Nothing
------------------------------------------------------------------------------------
-- * Smart constructors for creating symbolic values. These are not strictly
-- necessary, as they are mere aliases for 'symbolic' and 'symbolics', but
-- they nonetheless make programming easier.
------------------------------------------------------------------------------------
-- | Declare an 'SBool'
sBool :: String -> Symbolic SBool
sBool = symbolic
-- | Declare a list of 'SBool's
sBools :: [String] -> Symbolic [SBool]
sBools = symbolics
-- | Declare an 'SWord8'
sWord8 :: String -> Symbolic SWord8
sWord8 = symbolic
-- | Declare a list of 'SWord8's
sWord8s :: [String] -> Symbolic [SWord8]
sWord8s = symbolics
-- | Declare an 'SWord16'
sWord16 :: String -> Symbolic SWord16
sWord16 = symbolic
-- | Declare a list of 'SWord16's
sWord16s :: [String] -> Symbolic [SWord16]
sWord16s = symbolics
-- | Declare an 'SWord32'
sWord32 :: String -> Symbolic SWord32
sWord32 = symbolic
-- | Declare a list of 'SWord32's
sWord32s :: [String] -> Symbolic [SWord32]
sWord32s = symbolics
-- | Declare an 'SWord64'
sWord64 :: String -> Symbolic SWord64
sWord64 = symbolic
-- | Declare a list of 'SWord64's
sWord64s :: [String] -> Symbolic [SWord64]
sWord64s = symbolics
-- | Declare an 'SInt8'
sInt8 :: String -> Symbolic SInt8
sInt8 = symbolic
-- | Declare a list of 'SInt8's
sInt8s :: [String] -> Symbolic [SInt8]
sInt8s = symbolics
-- | Declare an 'SInt16'
sInt16 :: String -> Symbolic SInt16
sInt16 = symbolic
-- | Declare a list of 'SInt16's
sInt16s :: [String] -> Symbolic [SInt16]
sInt16s = symbolics
-- | Declare an 'SInt32'
sInt32 :: String -> Symbolic SInt32
sInt32 = symbolic
-- | Declare a list of 'SInt32's
sInt32s :: [String] -> Symbolic [SInt32]
sInt32s = symbolics
-- | Declare an 'SInt64'
sInt64 :: String -> Symbolic SInt64
sInt64 = symbolic
-- | Declare a list of 'SInt64's
sInt64s :: [String] -> Symbolic [SInt64]
sInt64s = symbolics
-- | Declare an 'SInteger'
sInteger:: String -> Symbolic SInteger
sInteger = symbolic
-- | Declare a list of 'SInteger's
sIntegers :: [String] -> Symbolic [SInteger]
sIntegers = symbolics
-- | Declare an 'SReal'
sReal:: String -> Symbolic SReal
sReal = symbolic
-- | Declare a list of 'SReal's
sReals :: [String] -> Symbolic [SReal]
sReals = symbolics
-- | Declare an 'SFloat'
sFloat :: String -> Symbolic SFloat
sFloat = symbolic
-- | Declare a list of 'SFloat's
sFloats :: [String] -> Symbolic [SFloat]
sFloats = symbolics
-- | Declare an 'SDouble'
sDouble :: String -> Symbolic SDouble
sDouble = symbolic
-- | Declare a list of 'SDouble's
sDoubles :: [String] -> Symbolic [SDouble]
sDoubles = symbolics
-- | Promote an SInteger to an SReal
toSReal :: SInteger -> SReal
toSReal x
| Just i <- unliteral x = literal $ fromInteger i
| True = SBV KReal (Right (cache y))
where y st = do xsw <- sbvToSW st x
newExpr st KReal (SBVApp (Extract 0 0) [xsw]) -- special encoding!
-- | Symbolic Equality. Note that we can't use Haskell's 'Eq' class since Haskell insists on returning Bool
-- Comparing symbolic values will necessarily return a symbolic value.
--
-- Minimal complete definition: '.=='
infix 4 .==, ./=
class EqSymbolic a where
(.==), (./=) :: a -> a -> SBool
-- minimal complete definition: .==
x ./= y = bnot (x .== y)
-- | Symbolic Comparisons. Similar to 'Eq', we cannot implement Haskell's 'Ord' class
-- since there is no way to return an 'Ordering' value from a symbolic comparison.
-- Furthermore, 'OrdSymbolic' requires 'Mergeable' to implement if-then-else, for the
-- benefit of implementing symbolic versions of 'max' and 'min' functions.
--
-- Minimal complete definition: '.<'
infix 4 .<, .<=, .>, .>=
class (Mergeable a, EqSymbolic a) => OrdSymbolic a where
(.<), (.<=), (.>), (.>=) :: a -> a -> SBool
smin, smax :: a -> a -> a
-- minimal complete definition: .<
a .<= b = a .< b ||| a .== b
a .> b = b .< a
a .>= b = b .<= a
a `smin` b = ite (a .<= b) a b
a `smax` b = ite (a .<= b) b a
{- We can't have a generic instance of the form:
instance Eq a => EqSymbolic a where
x .== y = if x == y then true else false
even if we're willing to allow Flexible/undecidable instances..
This is because if we allow this it would imply EqSymbolic (SBV a);
since (SBV a) has to be Eq as it must be a Num. But this wouldn't be
the right choice obviously; as the Eq instance is bogus for SBV
for natural reasons..
-}
instance EqSymbolic (SBV a) where
(.==) = liftSym2B (mkSymOpSC (eqOpt trueSW) Equal) rationalCheck (==) (==) (==) (==)
(./=) = liftSym2B (mkSymOpSC (eqOpt falseSW) NotEqual) rationalCheck (/=) (/=) (/=) (/=)
-- | eqOpt says the references are to the same SW, thus we can optimize. Note that
-- we explicitly disallow KFloat/KDouble here. Why? Because it's *NOT* true that
-- NaN == NaN, NaN >= NaN, and so-forth. So, we have to make sure we don't optimize
-- floats and doubles, in case the argument turns out to be NaN.
eqOpt :: SW -> SW -> SW -> Maybe SW
eqOpt w x y = case kindOf x of
KFloat -> Nothing
KDouble -> Nothing
_ -> if x == y then Just w else Nothing
instance SymWord a => OrdSymbolic (SBV a) where
x .< y
| Just mb <- mbMaxBound, x `isConcretely` (== mb) = false
| Just mb <- mbMinBound, y `isConcretely` (== mb) = false
| True = liftSym2B (mkSymOpSC (eqOpt falseSW) LessThan) rationalCheck (<) (<) (<) (<) x y
x .<= y
| Just mb <- mbMinBound, x `isConcretely` (== mb) = true
| Just mb <- mbMaxBound, y `isConcretely` (== mb) = true
| True = liftSym2B (mkSymOpSC (eqOpt trueSW) LessEq) rationalCheck (<=) (<=) (<=) (<=) x y
x .> y
| Just mb <- mbMinBound, x `isConcretely` (== mb) = false
| Just mb <- mbMaxBound, y `isConcretely` (== mb) = false
| True = liftSym2B (mkSymOpSC (eqOpt falseSW) GreaterThan) rationalCheck (>) (>) (>) (>) x y
x .>= y
| Just mb <- mbMaxBound, x `isConcretely` (== mb) = true
| Just mb <- mbMinBound, y `isConcretely` (== mb) = true
| True = liftSym2B (mkSymOpSC (eqOpt trueSW) GreaterEq) rationalCheck (>=) (>=) (>=) (>=) x y
-- Bool
instance EqSymbolic Bool where
x .== y = if x == y then true else false
-- Lists
instance EqSymbolic a => EqSymbolic [a] where
[] .== [] = true
(x:xs) .== (y:ys) = x .== y &&& xs .== ys
_ .== _ = false
instance OrdSymbolic a => OrdSymbolic [a] where
[] .< [] = false
[] .< _ = true
_ .< [] = false
(x:xs) .< (y:ys) = x .< y ||| (x .== y &&& xs .< ys)
-- Maybe
instance EqSymbolic a => EqSymbolic (Maybe a) where
Nothing .== Nothing = true
Just a .== Just b = a .== b
_ .== _ = false
instance (OrdSymbolic a) => OrdSymbolic (Maybe a) where
Nothing .< Nothing = false
Nothing .< _ = true
Just _ .< Nothing = false
Just a .< Just b = a .< b
-- Either
instance (EqSymbolic a, EqSymbolic b) => EqSymbolic (Either a b) where
Left a .== Left b = a .== b
Right a .== Right b = a .== b
_ .== _ = false
instance (OrdSymbolic a, OrdSymbolic b) => OrdSymbolic (Either a b) where
Left a .< Left b = a .< b
Left _ .< Right _ = true
Right _ .< Left _ = false
Right a .< Right b = a .< b
-- 2-Tuple
instance (EqSymbolic a, EqSymbolic b) => EqSymbolic (a, b) where
(a0, b0) .== (a1, b1) = a0 .== a1 &&& b0 .== b1
instance (OrdSymbolic a, OrdSymbolic b) => OrdSymbolic (a, b) where
(a0, b0) .< (a1, b1) = a0 .< a1 ||| (a0 .== a1 &&& b0 .< b1)
-- 3-Tuple
instance (EqSymbolic a, EqSymbolic b, EqSymbolic c) => EqSymbolic (a, b, c) where
(a0, b0, c0) .== (a1, b1, c1) = (a0, b0) .== (a1, b1) &&& c0 .== c1
instance (OrdSymbolic a, OrdSymbolic b, OrdSymbolic c) => OrdSymbolic (a, b, c) where
(a0, b0, c0) .< (a1, b1, c1) = (a0, b0) .< (a1, b1) ||| ((a0, b0) .== (a1, b1) &&& c0 .< c1)
-- 4-Tuple
instance (EqSymbolic a, EqSymbolic b, EqSymbolic c, EqSymbolic d) => EqSymbolic (a, b, c, d) where
(a0, b0, c0, d0) .== (a1, b1, c1, d1) = (a0, b0, c0) .== (a1, b1, c1) &&& d0 .== d1
instance (OrdSymbolic a, OrdSymbolic b, OrdSymbolic c, OrdSymbolic d) => OrdSymbolic (a, b, c, d) where
(a0, b0, c0, d0) .< (a1, b1, c1, d1) = (a0, b0, c0) .< (a1, b1, c1) ||| ((a0, b0, c0) .== (a1, b1, c1) &&& d0 .< d1)
-- 5-Tuple
instance (EqSymbolic a, EqSymbolic b, EqSymbolic c, EqSymbolic d, EqSymbolic e) => EqSymbolic (a, b, c, d, e) where
(a0, b0, c0, d0, e0) .== (a1, b1, c1, d1, e1) = (a0, b0, c0, d0) .== (a1, b1, c1, d1) &&& e0 .== e1
instance (OrdSymbolic a, OrdSymbolic b, OrdSymbolic c, OrdSymbolic d, OrdSymbolic e) => OrdSymbolic (a, b, c, d, e) where
(a0, b0, c0, d0, e0) .< (a1, b1, c1, d1, e1) = (a0, b0, c0, d0) .< (a1, b1, c1, d1) ||| ((a0, b0, c0, d0) .== (a1, b1, c1, d1) &&& e0 .< e1)
-- 6-Tuple
instance (EqSymbolic a, EqSymbolic b, EqSymbolic c, EqSymbolic d, EqSymbolic e, EqSymbolic f) => EqSymbolic (a, b, c, d, e, f) where
(a0, b0, c0, d0, e0, f0) .== (a1, b1, c1, d1, e1, f1) = (a0, b0, c0, d0, e0) .== (a1, b1, c1, d1, e1) &&& f0 .== f1
instance (OrdSymbolic a, OrdSymbolic b, OrdSymbolic c, OrdSymbolic d, OrdSymbolic e, OrdSymbolic f) => OrdSymbolic (a, b, c, d, e, f) where
(a0, b0, c0, d0, e0, f0) .< (a1, b1, c1, d1, e1, f1) = (a0, b0, c0, d0, e0) .< (a1, b1, c1, d1, e1)
||| ((a0, b0, c0, d0, e0) .== (a1, b1, c1, d1, e1) &&& f0 .< f1)
-- 7-Tuple
instance (EqSymbolic a, EqSymbolic b, EqSymbolic c, EqSymbolic d, EqSymbolic e, EqSymbolic f, EqSymbolic g) => EqSymbolic (a, b, c, d, e, f, g) where
(a0, b0, c0, d0, e0, f0, g0) .== (a1, b1, c1, d1, e1, f1, g1) = (a0, b0, c0, d0, e0, f0) .== (a1, b1, c1, d1, e1, f1) &&& g0 .== g1
instance (OrdSymbolic a, OrdSymbolic b, OrdSymbolic c, OrdSymbolic d, OrdSymbolic e, OrdSymbolic f, OrdSymbolic g) => OrdSymbolic (a, b, c, d, e, f, g) where
(a0, b0, c0, d0, e0, f0, g0) .< (a1, b1, c1, d1, e1, f1, g1) = (a0, b0, c0, d0, e0, f0) .< (a1, b1, c1, d1, e1, f1)
||| ((a0, b0, c0, d0, e0, f0) .== (a1, b1, c1, d1, e1, f1) &&& g0 .< g1)
-- | Symbolic Numbers. This is a simple class that simply incorporates all number like
-- base types together, simplifying writing polymorphic type-signatures that work for all
-- symbolic numbers, such as 'SWord8', 'SInt8' etc. For instance, we can write a generic
-- list-minimum function as follows:
--
-- @
-- mm :: SIntegral a => [SBV a] -> SBV a
-- mm = foldr1 (\a b -> ite (a .<= b) a b)
-- @
--
-- It is similar to the standard 'Integral' class, except ranging over symbolic instances.
class (SymWord a, Num a, Bits a) => SIntegral a
-- 'SIntegral' Instances, including all possible variants except 'Bool', since booleans
-- are not numbers.
instance SIntegral Word8
instance SIntegral Word16
instance SIntegral Word32
instance SIntegral Word64
instance SIntegral Int8
instance SIntegral Int16
instance SIntegral Int32
instance SIntegral Int64
instance SIntegral Integer
-- Boolean combinators
instance Boolean SBool where
true = literal True
false = literal False
bnot b | b `isConcretely` (== False) = true
| b `isConcretely` (== True) = false
| True = liftSym1Bool (mkSymOp1SC opt Not) not b
where opt x
| x == falseSW = Just trueSW
| x == trueSW = Just falseSW
| True = Nothing
a &&& b | a `isConcretely` (== False) || b `isConcretely` (== False) = false
| a `isConcretely` (== True) = b
| b `isConcretely` (== True) = a
| True = liftSym2Bool (mkSymOpSC opt And) (&&) a b
where opt x y
| x == falseSW || y == falseSW = Just falseSW
| x == trueSW = Just y
| y == trueSW = Just x
| True = Nothing
a ||| b | a `isConcretely` (== True) || b `isConcretely` (== True) = true
| a `isConcretely` (== False) = b
| b `isConcretely` (== False) = a
| True = liftSym2Bool (mkSymOpSC opt Or) (||) a b
where opt x y
| x == trueSW || y == trueSW = Just trueSW
| x == falseSW = Just y
| y == falseSW = Just x
| True = Nothing
a <+> b | a `isConcretely` (== False) = b
| b `isConcretely` (== False) = a
| a `isConcretely` (== True) = bnot b
| b `isConcretely` (== True) = bnot a
| True = liftSym2Bool (mkSymOpSC opt XOr) (<+>) a b
where opt x y
| x == y = Just falseSW
| x == falseSW = Just y
| y == falseSW = Just x
| True = Nothing
-- | Returns (symbolic) true if all the elements of the given list are different.
allDifferent :: EqSymbolic a => [a] -> SBool
allDifferent (x:xs@(_:_)) = bAll (x ./=) xs &&& allDifferent xs
allDifferent _ = true
-- | Returns (symbolic) true if all the elements of the given list are the same.
allEqual :: EqSymbolic a => [a] -> SBool
allEqual (x:xs@(_:_)) = bAll (x .==) xs
allEqual _ = true
-- | Returns (symbolic) true if the argument is in range
inRange :: OrdSymbolic a => a -> (a, a) -> SBool
inRange x (y, z) = x .>= y &&& x .<= z
-- | Symbolic membership test
sElem :: EqSymbolic a => a -> [a] -> SBool
sElem x xs = bAny (.== x) xs
-- | Returns 1 if the boolean is true, otherwise 0.
oneIf :: ({-Num a,-} SymWord a) => SBool -> SBV a
oneIf t = ite t 1 0
-- | Predicate for optimizing word operations like (+) and (*).
isConcreteZero :: SBV a -> Bool
isConcreteZero (SBV _ (Left (CW _ (CWInteger n)))) = n == 0
isConcreteZero (SBV KReal (Left (CW KReal (CWAlgReal v)))) = isExactRational v && v == 0
isConcreteZero _ = False
-- | Predicate for optimizing word operations like (+) and (*).
isConcreteOne :: SBV a -> Bool
isConcreteOne (SBV _ (Left (CW _ (CWInteger 1)))) = True
isConcreteOne (SBV KReal (Left (CW KReal (CWAlgReal v)))) = isExactRational v && v == 1
isConcreteOne _ = False
-- | Predicate for optimizing bitwise operations.
isConcreteOnes :: SBV a -> Bool
isConcreteOnes (SBV _ (Left (CW (KBounded b w) (CWInteger n)))) =
n == (if b then -1 else bit w - 1)
isConcreteOnes (SBV _ (Left (CW KUnbounded (CWInteger n)))) = n == -1
isConcreteOnes _ = False
-- Num instance for symbolic words.
instance (Ord a, {-Num a,-} SymWord a) => Num (SBV a) where
--BH fromInteger = literal . fromIntegral
fromInteger n = error $ "fromInteger " ++ show n ++ " :: SBV a"
x + y
| isConcreteZero x = y
| isConcreteZero y = x
| True = liftSym2 (mkSymOp Plus) rationalCheck (+) (+) (+) (+) x y
x * y
| isConcreteZero x = x
| isConcreteZero y = y
| isConcreteOne x = y
| isConcreteOne y = x
| True = liftSym2 (mkSymOp Times) rationalCheck (*) (*) (*) (*) x y
x - y
| isConcreteZero y = x
| True = liftSym2 (mkSymOp Minus) rationalCheck (-) (-) (-) (-) x y
abs a
| hasSign a = ite (a .< 0) (-a) a
| True = a
signum a
| hasSign a = ite (a .< 0) (-1) (ite (a .== 0) 0 1)
| True = oneIf (a ./= 0)
negate x
| isConcreteZero x = x
| True = sbvFromInteger (kindOf x) 0 - x
instance (SymWord a, Fractional a) => Fractional (SBV a) where
fromRational = literal . fromRational
x / y = liftSym2 (mkSymOp Quot) rationalCheck (/) die (/) (/) x y
where -- should never happen
die = error "impossible: integer valued data found in Fractional instance"
-- | Define Floating instance on SBV's; only for base types that are already floating; i.e., SFloat and SDouble
-- Note that most of the fields are "undefined" for symbolic values, we add methods as they are supported by SMTLib.
-- Currently, the only symbolicly available function in this class is sqrt.
instance (SymWord a, Fractional a, Floating a) => Floating (SBV a) where
pi = literal pi
exp = lift1FNS "exp" exp
log = lift1FNS "log" log
sqrt = lift1F sqrt smtLibSquareRoot
sin = lift1FNS "sin" sin
cos = lift1FNS "cos" cos
tan = lift1FNS "tan" tan
asin = lift1FNS "asin" asin
acos = lift1FNS "acos" acos
atan = lift1FNS "atan" atan
sinh = lift1FNS "sinh" sinh
cosh = lift1FNS "cosh" cosh
tanh = lift1FNS "tanh" tanh
asinh = lift1FNS "asinh" asinh
acosh = lift1FNS "acosh" acosh
atanh = lift1FNS "atanh" atanh
(**) = lift2FNS "**" (**)
logBase = lift2FNS "logBase" logBase
-- | Fused-multiply add. @fusedMA a b c = a * b + c@, for double and floating point values.
-- Note that a 'fusedMA' call will *never* be concrete, even if all the arguments are constants; since
-- we cannot guarantee the precision requirements, which is the whole reason why 'fusedMA' exists in the
-- first place. (NB. 'fusedMA' only rounds once, even though it does two operations, and hence the extra
-- precision.)
fusedMA :: (SymWord a, Floating a) => SBV a -> SBV a -> SBV a -> SBV a
fusedMA a b c = SBV k $ Right $ cache r
where k = kindOf a
r st = do swa <- sbvToSW st a
swb <- sbvToSW st b
swc <- sbvToSW st c
newExpr st k (SBVApp smtLibFusedMA [swa, swb, swc])
-- | Lift a float/double unary function, using a corresponding function in SMT-lib. We piggy-back on the uninterpreted
-- function mechanism here, as it essentially is the same as introducing this as a new function.
lift1F :: (SymWord a, Floating a) => (a -> a) -> Op -> SBV a -> SBV a
lift1F f smtOp sv
| Just v <- unliteral sv = literal $ f v
| True = SBV k $ Right $ cache c
where k = kindOf sv
c st = do swa <- sbvToSW st sv
newExpr st k (SBVApp smtOp [swa])
-- | Lift a float/double unary function, only over constants
lift1FNS :: (SymWord a, Floating a) => String -> (a -> a) -> SBV a -> SBV a
lift1FNS nm f sv
| Just v <- unliteral sv = literal $ f v
| True = error $ "SBV." ++ nm ++ ": not supported for symbolic values of type " ++ show (kindOf sv)
-- | Lift a float/double binary function, only over constants
lift2FNS :: (SymWord a, Floating a) => String -> (a -> a -> a) -> SBV a -> SBV a -> SBV a
lift2FNS nm f sv1 sv2
| Just v1 <- unliteral sv1
, Just v2 <- unliteral sv2 = literal $ f v1 v2
| True = error $ "SBV." ++ nm ++ ": not supported for symbolic values of type " ++ show (kindOf sv1)
-- Most operations on concrete rationals require a compatibility check
rationalCheck :: CW -> CW -> Bool
rationalCheck a b = case (cwVal a, cwVal b) of
(CWAlgReal x, CWAlgReal y) -> isExactRational x && isExactRational y
_ -> True
-- same as above, for SBV's
rationalSBVCheck :: SBV a -> SBV a -> Bool
rationalSBVCheck (SBV KReal (Left a)) (SBV KReal (Left b)) = rationalCheck a b
rationalSBVCheck _ _ = True
-- Some operations will never be used on Reals, but we need fillers:
noReal :: String -> AlgReal -> AlgReal -> AlgReal
noReal o a b = error $ "SBV.AlgReal." ++ o ++ ": Unexpected arguments: " ++ show (a, b)
noFloat :: String -> Float -> Float -> Float
noFloat o a b = error $ "SBV.Float." ++ o ++ ": Unexpected arguments: " ++ show (a, b)
noDouble :: String -> Double -> Double -> Double
noDouble o a b = error $ "SBV.Double." ++ o ++ ": Unexpected arguments: " ++ show (a, b)
noRealUnary :: String -> AlgReal -> AlgReal
noRealUnary o a = error $ "SBV.AlgReal." ++ o ++ ": Unexpected argument: " ++ show a
noFloatUnary :: String -> Float -> Float
noFloatUnary o a = error $ "SBV.Float." ++ o ++ ": Unexpected argument: " ++ show a
noDoubleUnary :: String -> Double -> Double
noDoubleUnary o a = error $ "SBV.Double." ++ o ++ ": Unexpected argument: " ++ show a
-- NB. In the optimizations below, use of -1 is valid as
-- -1 has all bits set to True for both signed and unsigned values
instance ({-Num a,-} Bits a, SymWord a) => Bits (SBV a) where
x .&. y
| isConcreteZero x = x
| isConcreteOnes x = y
| isConcreteZero y = y
| isConcreteOnes y = x
| True = liftSym2 (mkSymOp And) (const (const True)) (noReal ".&.") (.&.) (noFloat ".&.") (noDouble ".&.") x y
x .|. y
| isConcreteZero x = y
| isConcreteOnes x = x
| isConcreteZero y = x
| isConcreteOnes y = y
| True = liftSym2 (mkSymOp Or) (const (const True)) (noReal ".|.") (.|.) (noFloat ".|.") (noDouble ".|.") x y
x `xor` y
| isConcreteZero x = y
| isConcreteZero y = x
| True = liftSym2 (mkSymOp XOr) (const (const True)) (noReal "xor") xor (noFloat "xor") (noDouble "xor") x y
complement = liftSym1 (mkSymOp1 Not) (noRealUnary "complement") complement (noFloatUnary "complement") (noDoubleUnary "complement")
bitSize x = case kindOf x of KBounded _ w -> w
#if __GLASGOW_HASKELL__ >= 708
bitSizeMaybe x = Just $ case kindOf x of KBounded _ w -> w
#endif
isSigned x = case kindOf x of KBounded s _ -> s
bit i = 1 `shiftL` i
setBit x i = x .|. sbvFromInteger (kindOf x) (bit i)
shiftL x y
| y < 0 = shiftR x (-y)
| y == 0 = x
| True = liftSym1 (mkSymOp1 (Shl y)) (noRealUnary "shiftL") (`shiftL` y) (noFloatUnary "shiftL") (noDoubleUnary "shiftL") x
shiftR x y
| y < 0 = shiftL x (-y)
| y == 0 = x
| True = liftSym1 (mkSymOp1 (Shr y)) (noRealUnary "shiftR") (`shiftR` y) (noFloatUnary "shiftR") (noDoubleUnary "shiftR") x
rotateL x y
| y < 0 = rotateR x (-y)
| y == 0 = x
| isBounded x = let sz = ghcBitSize x in liftSym1 (mkSymOp1 (Rol (y `mod` sz))) (noRealUnary "rotateL") (rot True sz y) (noFloatUnary "rotateL") (noDoubleUnary "rotateL") x
| True = shiftL x y -- for unbounded Integers, rotateL is the same as shiftL in Haskell
rotateR x y
| y < 0 = rotateL x (-y)
| y == 0 = x
| isBounded x = let sz = ghcBitSize x in liftSym1 (mkSymOp1 (Ror (y `mod` sz))) (noRealUnary "rotateR") (rot False sz y) (noFloatUnary "rotateR") (noDoubleUnary "rotateR") x
| True = shiftR x y -- for unbounded integers, rotateR is the same as shiftR in Haskell
-- NB. testBit is *not* implementable on non-concrete symbolic words
x `testBit` i
| SBV _ (Left (CW _ (CWInteger n))) <- x = testBit n i
| True = error $ "SBV.testBit: Called on symbolic value: " ++ show x ++ ". Use sbvTestBit instead."
-- NB. popCount is *not* implementable on non-concrete symbolic words
popCount x
| SBV _ (Left (CW (KBounded _ w) (CWInteger n))) <- x = popCount (n .&. (bit w - 1))
| True = error $ "SBV.popCount: Called on symbolic value: " ++ show x ++ ". Use sbvPopCount instead."
-- Since the underlying representation is just Integers, rotations has to be careful on the bit-size
rot :: Bool -> Int -> Int -> Integer -> Integer
rot toLeft sz amt x
| sz < 2 = x
| True = norm x y' `shiftL` y .|. norm (x `shiftR` y') y
where (y, y') | toLeft = (amt `mod` sz, sz - y)
| True = (sz - y', amt `mod` sz)
norm v s = v .&. ((1 `shiftL` s) - 1)
sbvFromInteger :: Kind -> Integer -> SBV a
sbvFromInteger k n = SBV k (Left (normCW (CW k (CWInteger n))))
-- | Replacement for 'testBit'. Since 'testBit' requires a 'Bool' to be returned,
-- we cannot implement it for symbolic words. Index 0 is the least-significant bit.
sbvTestBit :: (Num a, Bits a, SymWord a) => SBV a -> Int -> SBool
sbvTestBit x i = (x .&. sbvFromInteger k (bit i)) ./= sbvFromInteger k 0
where k = kindOf x
-- | Replacement for 'popCount'. Since 'popCount' returns an 'Int', we cannot implement
-- it for symbolic words. Here, we return an 'SWord8', which can overflow when used on
-- quantities that have more than 255 bits. Currently, that's only the 'SInteger' type
-- that SBV supports, all other types are safe. Even with 'SInteger', this will only
-- overflow if there are at least 256-bits set in the number, and the smallest such
-- number is 2^256-1, which is a pretty darn big number to worry about for practical
-- purposes. In any case, we do not support 'sbvPopCount' for unbounded symbolic integers,
-- as the only possible implementation wouldn't symbolically terminate. So the only overflow
-- issue is with really-really large concrete 'SInteger' values.
sbvPopCount :: (Num a, Bits a, SymWord a) => SBV a -> SWord8
sbvPopCount x
| isReal x = error "SBV.sbvPopCount: Called on a real value"
| isConcrete x = go 0 x
| not (isBounded x) = error "SBV.sbvPopCount: Called on an infinite precision symbolic value"
| True = sum [ite b 1 0 | b <- blastLE x]
where -- concrete case
go !c 0 = c
go !c w = go (c+1) (w .&. (w-1))
-- | Generalization of 'setBit' based on a symbolic boolean. Note that 'setBit' and
-- 'clearBit' are still available on Symbolic words, this operation comes handy when
-- the condition to set/clear happens to be symbolic.
setBitTo :: (Num a, Bits a, SymWord a) => SBV a -> Int -> SBool -> SBV a
setBitTo x i b = ite b (setBit x i) (clearBit x i)
-- | Generalization of 'shiftL', when the shift-amount is symbolic. Since Haskell's
-- 'shiftL' only takes an 'Int' as the shift amount, it cannot be used when we have
-- a symbolic amount to shift with. The shift amount must be an unsigned quantity.
sbvShiftLeft :: (SIntegral a, SIntegral b) => SBV a -> SBV b -> SBV a
sbvShiftLeft x i
| isSigned i = error "sbvShiftLeft: shift amount should be unsigned"
| True = select [x `shiftL` k | k <- [0 .. ghcBitSize x - 1]] z i
where z = sbvFromInteger (kindOf x) 0
-- | Generalization of 'shiftR', when the shift-amount is symbolic. Since Haskell's
-- 'shiftR' only takes an 'Int' as the shift amount, it cannot be used when we have
-- a symbolic amount to shift with. The shift amount must be an unsigned quantity.
--
-- NB. If the shiftee is signed, then this is an arithmetic shift; otherwise it's logical,
-- following the usual Haskell convention. See 'sbvSignedShiftArithRight' for a variant
-- that explicitly uses the msb as the sign bit, even for unsigned underlying types.
sbvShiftRight :: (SIntegral a, SIntegral b) => SBV a -> SBV b -> SBV a
sbvShiftRight x i
| isSigned i = error "sbvShiftRight: shift amount should be unsigned"
| True = select [x `shiftR` k | k <- [0 .. ghcBitSize x - 1]] z i
where z = sbvFromInteger (kindOf x) 0
-- | Arithmetic shift-right with a symbolic unsigned shift amount. This is equivalent
-- to 'sbvShiftRight' when the argument is signed. However, if the argument is unsigned,
-- then it explicitly treats its msb as a sign-bit, and uses it as the bit that
-- gets shifted in. Useful when using the underlying unsigned bit representation to implement
-- custom signed operations. Note that there is no direct Haskell analogue of this function.
sbvSignedShiftArithRight:: (SIntegral a, SIntegral b) => SBV a -> SBV b -> SBV a
sbvSignedShiftArithRight x i
| isSigned i = error "sbvSignedShiftArithRight: shift amount should be unsigned"
| isSigned x = sbvShiftRight x i
| True = ite (msb x)
(complement (sbvShiftRight (complement x) i))
(sbvShiftRight x i)
-- | Generalization of 'rotateL', when the shift-amount is symbolic. Since Haskell's
-- 'rotateL' only takes an 'Int' as the shift amount, it cannot be used when we have
-- a symbolic amount to shift with. The shift amount must be an unsigned quantity.
sbvRotateLeft :: (SIntegral a, SIntegral b) => SBV a -> SBV b -> SBV a
sbvRotateLeft x i
| isSigned i = error "sbvRotateLeft: shift amount should be unsigned"
| True = select [x `rotateL` k | k <- [0 .. ghcBitSize x - 1]] z i
where z = sbvFromInteger (kindOf x) 0
-- | Generalization of 'rotateR', when the shift-amount is symbolic. Since Haskell's
-- 'rotateR' only takes an 'Int' as the shift amount, it cannot be used when we have
-- a symbolic amount to shift with. The shift amount must be an unsigned quantity.
sbvRotateRight :: (SIntegral a, SIntegral b) => SBV a -> SBV b -> SBV a
sbvRotateRight x i
| isSigned i = error "sbvRotateRight: shift amount should be unsigned"
| True = select [x `rotateR` k | k <- [0 .. ghcBitSize x - 1]] z i
where z = sbvFromInteger (kindOf x) 0
-- | Full adder. Returns the carry-out from the addition.
--
-- N.B. Only works for unsigned types. Signed arguments will be rejected.
fullAdder :: SIntegral a => SBV a -> SBV a -> (SBool, SBV a)
fullAdder a b
| isSigned a = error "fullAdder: only works on unsigned numbers"
| True = (a .> s ||| b .> s, s)
where s = a + b
-- | Full multiplier: Returns both the high-order and the low-order bits in a tuple,
-- thus fully accounting for the overflow.
--
-- N.B. Only works for unsigned types. Signed arguments will be rejected.
--
-- N.B. The higher-order bits are determined using a simple shift-add multiplier,
-- thus involving bit-blasting. It'd be naive to expect SMT solvers to deal efficiently
-- with properties involving this function, at least with the current state of the art.
fullMultiplier :: SIntegral a => SBV a -> SBV a -> (SBV a, SBV a)
fullMultiplier a b
| isSigned a = error "fullMultiplier: only works on unsigned numbers"
| True = (go (ghcBitSize a) 0 a, a*b)
where go 0 p _ = p
go n p x = let (c, p') = ite (lsb x) (fullAdder p b) (false, p)
(o, p'') = shiftIn c p'
(_, x') = shiftIn o x
in go (n-1) p'' x'
shiftIn k v = (lsb v, mask .|. (v `shiftR` 1))
where mask = ite k (bit (ghcBitSize v - 1)) 0
-- | Little-endian blasting of a word into its bits. Also see the 'FromBits' class.
blastLE :: (Num a, Bits a, SymWord a) => SBV a -> [SBool]
blastLE x
| isReal x = error "SBV.blastLE: Called on a real value"
| not (isBounded x) = error "SBV.blastLE: Called on an infinite precision value"
| True = map (sbvTestBit x) [0 .. intSizeOf x - 1]
-- | Big-endian blasting of a word into its bits. Also see the 'FromBits' class.
blastBE :: (Num a, Bits a, SymWord a) => SBV a -> [SBool]
blastBE = reverse . blastLE
-- | Least significant bit of a word, always stored at index 0.
lsb :: (Num a, Bits a, SymWord a) => SBV a -> SBool
lsb x = sbvTestBit x 0
-- | Most significant bit of a word, always stored at the last position.
msb :: (Num a, Bits a, SymWord a) => SBV a -> SBool
msb x
| isReal x = error "SBV.msb: Called on a real value"
| not (isBounded x) = error "SBV.msb: Called on an infinite precision value"
| True = sbvTestBit x (intSizeOf x - 1)
-- Enum instance. These instances are suitable for use with concrete values,
-- and will be less useful for symbolic values around. Note that `fromEnum` requires
-- a concrete argument for obvious reasons. Other variants (succ, pred, [x..]) etc are similarly
-- limited. While symbolic variants can be defined for many of these, they will just diverge
-- as final sizes cannot be determined statically.
instance (Show a, Bounded a, Integral a, Num a, SymWord a) => Enum (SBV a) where
succ x
| v == (maxBound :: a) = error $ "Enum.succ{" ++ showType x ++ "}: tried to take `succ' of maxBound"
| True = fromIntegral $ v + 1
where v = enumCvt "succ" x
pred x
| v == (minBound :: a) = error $ "Enum.pred{" ++ showType x ++ "}: tried to take `pred' of minBound"
| True = fromIntegral $ v - 1
where v = enumCvt "pred" x
toEnum x
| xi < fromIntegral (minBound :: a) || xi > fromIntegral (maxBound :: a)
= error $ "Enum.toEnum{" ++ showType r ++ "}: " ++ show x ++ " is out-of-bounds " ++ show (minBound :: a, maxBound :: a)
| True
= r
where xi :: Integer
xi = fromIntegral x
r :: SBV a
r = fromIntegral x
fromEnum x
| r < fromIntegral (minBound :: Int) || r > fromIntegral (maxBound :: Int)
= error $ "Enum.fromEnum{" ++ showType x ++ "}: value " ++ show r ++ " is outside of Int's bounds " ++ show (minBound :: Int, maxBound :: Int)
| True
= fromIntegral r
where r :: Integer
r = enumCvt "fromEnum" x
enumFrom x = map fromIntegral [xi .. fromIntegral (maxBound :: a)]
where xi :: Integer
xi = enumCvt "enumFrom" x
enumFromThen x y
| yi >= xi = map fromIntegral [xi, yi .. fromIntegral (maxBound :: a)]
| True = map fromIntegral [xi, yi .. fromIntegral (minBound :: a)]
where xi, yi :: Integer
xi = enumCvt "enumFromThen.x" x
yi = enumCvt "enumFromThen.y" y
enumFromThenTo x y z = map fromIntegral [xi, yi .. zi]
where xi, yi, zi :: Integer
xi = enumCvt "enumFromThenTo.x" x
yi = enumCvt "enumFromThenTo.y" y
zi = enumCvt "enumFromThenTo.z" z
-- | Helper function for use in enum operations
enumCvt :: (SymWord a, Integral a, Num b) => String -> SBV a -> b
enumCvt w x = case unliteral x of
Nothing -> error $ "Enum." ++ w ++ "{" ++ showType x ++ "}: Called on symbolic value " ++ show x
Just v -> fromIntegral v
-- | The 'SDivisible' class captures the essence of division.
-- Unfortunately we cannot use Haskell's 'Integral' class since the 'Real'
-- and 'Enum' superclasses are not implementable for symbolic bit-vectors.
-- However, 'quotRem' and 'divMod' makes perfect sense, and the 'SDivisible' class captures
-- this operation. One issue is how division by 0 behaves. The verification
-- technology requires total functions, and there are several design choices
-- here. We follow Isabelle/HOL approach of assigning the value 0 for division
-- by 0. Therefore, we impose the following law:
--
-- @ x `sQuotRem` 0 = (0, x) @
-- @ x `sDivMod` 0 = (0, x) @
--
-- Note that our instances implement this law even when @x@ is @0@ itself.
--
-- NB. 'quot' truncates toward zero, while 'div' truncates toward negative infinity.
--
-- Minimal complete definition: 'sQuotRem', 'sDivMod'
class SDivisible a where
sQuotRem :: a -> a -> (a, a)
sDivMod :: a -> a -> (a, a)
sQuot :: a -> a -> a
sRem :: a -> a -> a
sDiv :: a -> a -> a
sMod :: a -> a -> a
x `sQuot` y = fst $ x `sQuotRem` y
x `sRem` y = snd $ x `sQuotRem` y
x `sDiv` y = fst $ x `sDivMod` y
x `sMod` y = snd $ x `sDivMod` y
instance SDivisible Word64 where
sQuotRem x 0 = (0, x)
sQuotRem x y = x `quotRem` y
sDivMod x 0 = (0, x)
sDivMod x y = x `divMod` y
instance SDivisible Int64 where
sQuotRem x 0 = (0, x)
sQuotRem x y = x `quotRem` y
sDivMod x 0 = (0, x)
sDivMod x y = x `divMod` y
instance SDivisible Word32 where
sQuotRem x 0 = (0, x)
sQuotRem x y = x `quotRem` y
sDivMod x 0 = (0, x)
sDivMod x y = x `divMod` y
instance SDivisible Int32 where
sQuotRem x 0 = (0, x)
sQuotRem x y = x `quotRem` y
sDivMod x 0 = (0, x)
sDivMod x y = x `divMod` y
instance SDivisible Word16 where
sQuotRem x 0 = (0, x)
sQuotRem x y = x `quotRem` y
sDivMod x 0 = (0, x)
sDivMod x y = x `divMod` y
instance SDivisible Int16 where
sQuotRem x 0 = (0, x)
sQuotRem x y = x `quotRem` y
sDivMod x 0 = (0, x)
sDivMod x y = x `divMod` y
instance SDivisible Word8 where
sQuotRem x 0 = (0, x)
sQuotRem x y = x `quotRem` y
sDivMod x 0 = (0, x)
sDivMod x y = x `divMod` y
instance SDivisible Int8 where
sQuotRem x 0 = (0, x)
sQuotRem x y = x `quotRem` y
sDivMod x 0 = (0, x)
sDivMod x y = x `divMod` y
instance SDivisible Integer where
sQuotRem x 0 = (0, x)
sQuotRem x y = x `quotRem` y
sDivMod x 0 = (0, x)
sDivMod x y = x `divMod` y
instance SDivisible CW where
sQuotRem a b
| CWInteger x <- cwVal a, CWInteger y <- cwVal b
= let (r1, r2) = sQuotRem x y in (normCW a{ cwVal = CWInteger r1 }, normCW b{ cwVal = CWInteger r2 })
sQuotRem a b = error $ "SBV.sQuotRem: impossible, unexpected args received: " ++ show (a, b)
sDivMod a b
| CWInteger x <- cwVal a, CWInteger y <- cwVal b
= let (r1, r2) = sDivMod x y in (normCW a { cwVal = CWInteger r1 }, normCW b { cwVal = CWInteger r2 })
sDivMod a b = error $ "SBV.sDivMod: impossible, unexpected args received: " ++ show (a, b)
instance SDivisible SWord64 where
sQuotRem = liftQRem
sDivMod = liftDMod
instance SDivisible SInt64 where
sQuotRem = liftQRem
sDivMod = liftDMod
instance SDivisible SWord32 where
sQuotRem = liftQRem
sDivMod = liftDMod
instance SDivisible SInt32 where
sQuotRem = liftQRem
sDivMod = liftDMod
instance SDivisible SWord16 where
sQuotRem = liftQRem
sDivMod = liftDMod
instance SDivisible SInt16 where
sQuotRem = liftQRem
sDivMod = liftDMod
instance SDivisible SWord8 where
sQuotRem = liftQRem
sDivMod = liftDMod
instance SDivisible SInt8 where
sQuotRem = liftQRem
sDivMod = liftDMod
liftQRem :: (SymWord a, Num a, SDivisible a) => SBV a -> SBV a -> (SBV a, SBV a)
liftQRem x y = ite (y .== z) (z, x) (qr x y)
where qr (SBV sgnsz (Left a)) (SBV _ (Left b)) = let (q, r) = sQuotRem a b in (SBV sgnsz (Left q), SBV sgnsz (Left r))
qr a@(SBV sgnsz _) b = (SBV sgnsz (Right (cache (mk Quot))), SBV sgnsz (Right (cache (mk Rem))))
where mk o st = do sw1 <- sbvToSW st a
sw2 <- sbvToSW st b
mkSymOp o st sgnsz sw1 sw2
z = sbvFromInteger (kindOf x) 0
-- Conversion from quotRem (truncate to 0) to divMod (truncate towards negative infinity)
liftDMod :: (SymWord a, Num a, SDivisible a, SDivisible (SBV a)) => SBV a -> SBV a -> (SBV a, SBV a)
liftDMod x y = ite (y .== z) (z, x) $ ite (signum r .== negate (signum y)) (q-1, r+y) qr
where qr@(q, r) = x `sQuotRem` y
z = sbvFromInteger (kindOf x) 0
-- SInteger instance for quotRem/divMod are tricky!
-- SMT-Lib only has Euclidean operations, but Haskell
-- uses "truncate to 0" for quotRem, and "truncate to negative infinity" for divMod.
-- So, we cannot just use the above liftings directly.
instance SDivisible SInteger where
sDivMod = liftDMod
sQuotRem x y
| not (isSymbolic x || isSymbolic y)
= liftQRem x y
| True
= ite (y .== 0) (0, x) (qE+i, rE-i*y)
where (qE, rE) = liftQRem x y -- for integers, this is euclidean due to SMTLib semantics
i = ite (x .>= 0 ||| rE .== 0) 0
$ ite (y .> 0) 1 (-1)
-- Quickcheck interface
-- The Arbitrary instance for SFunArray returns an array initialized
-- to an arbitrary element
instance (SymWord b, Arbitrary b) => Arbitrary (SFunArray a b) where
arbitrary = arbitrary >>= \r -> return $ SFunArray (const r)
instance (SymWord a, Arbitrary a) => Arbitrary (SBV a) where
arbitrary = liftM literal arbitrary
-- | Symbolic conditionals are modeled by the 'Mergeable' class, describing
-- how to merge the results of an if-then-else call with a symbolic test. SBV
-- provides all basic types as instances of this class, so users only need
-- to declare instances for custom data-types of their programs as needed.
--
-- The function 'select' is a total-indexing function out of a list of choices
-- with a default value, simulating array/list indexing. It's an n-way generalization
-- of the 'ite' function.
--
-- Minimal complete definition: 'symbolicMerge'
class Mergeable a where
-- | Merge two values based on the condition. The first argument states
-- whether we force the then-and-else branches before the merging, at the
-- word level. This is an efficiency concern; one that we'd rather not
-- make but unfortunately necessary for getting symbolic simulation
-- working efficiently.
symbolicMerge :: Bool -> SBool -> a -> a -> a
-- | Total indexing operation. @select xs default index@ is intuitively
-- the same as @xs !! index@, except it evaluates to @default@ if @index@
-- overflows
select :: (SymWord b, Num b) => [a] -> a -> SBV b -> a
-- NB. Earlier implementation of select used the binary-search trick
-- on the index to chop down the search space. While that is a good trick
-- in general, it doesn't work for SBV since we do not have any notion of
-- "concrete" subwords: If an index is symbolic, then all its bits are
-- symbolic as well. So, the binary search only pays off only if the indexed
-- list is really humongous, which is not very common in general. (Also,
-- for the case when the list is bit-vectors, we use SMT tables anyhow.)
select xs err ind
| isReal ind = error "SBV.select: unsupported real valued select/index expression"
| True = walk xs ind err
where walk [] _ acc = acc
walk (e:es) i acc = walk es (i-1) (ite (i .== 0) e acc)
-- | If-then-else. This is by definition 'symbolicMerge' with both
-- branches forced. This is typically the desired behavior, but also
-- see 'iteLazy' should you need more laziness.
ite :: Mergeable a => SBool -> a -> a -> a
ite t a b
| Just r <- unliteral t = if r then a else b
| True = symbolicMerge True t a b
-- | A Lazy version of ite, which does not force its arguments. This might
-- cause issues for symbolic simulation with large thunks around, so use with
-- care.
iteLazy :: Mergeable a => SBool -> a -> a -> a
iteLazy t a b
| Just r <- unliteral t = if r then a else b
| True = symbolicMerge False t a b
-- | Branch on a condition, much like 'ite'. The exception is that SBV will
-- check to make sure if the test condition is feasible by making an external
-- call to the SMT solver. Note that this can be expensive, thus we shall use
-- a time-out value ('sBranchTimeOut'). There might be zero, one, or two such
-- external calls per 'sBranch' call:
--
-- - If condition is statically known to be True/False: 0 calls
-- - In this case, we simply constant fold..
--
-- - If condition is determined to be unsatisfiable : 1 call
-- - In this case, we know then-branch is infeasible, so just take the else-branch
--
-- - If condition is determined to be satisfable : 2 calls
-- - In this case, we know then-branch is feasible, but we still have to check if the else-branch is
--
-- In summary, 'sBranch' calls can be expensive, but they can help with the so-called symbolic-termination
-- problem. See "Data.SBV.Examples.Misc.SBranch" for an example.
sBranch :: Mergeable a => SBool -> a -> a -> a
sBranch t a b
| Just r <- unliteral c = if r then a else b
| True = symbolicMerge False c a b
where c = reduceInPathCondition t
-- SBV
instance SymWord a => Mergeable (SBV a) where
symbolicMerge force t a b
| Just r <- unliteral t
= if r then a else b
| force, Just av <- unliteral a, Just bv <- unliteral b, rationalSBVCheck a b, av == bv
= a
| True
= SBV k $ Right $ cache c
where k = kindOf a
c st = do swt <- sbvToSW st t
case () of
() | swt == trueSW -> sbvToSW st a -- these two cases should never be needed as we expect symbolicMerge to be
() | swt == falseSW -> sbvToSW st b -- called with symbolic tests, but just in case..
() -> do {- It is tempting to record the choice of the test expression here as we branch down to the 'then' and 'else' branches. That is,
when we evaluate 'a', we can make use of the fact that the test expression is True, and similarly we can use the fact that it
is False when b is evaluated. In certain cases this can cut down on symbolic simulation significantly, for instance if
repetitive decisions are made in a recursive loop. Unfortunately, the implementation of this idea is quite tricky, due to
our sharing based implementation. As the 'then' branch is evaluated, we will create many expressions that are likely going
to be "reused" when the 'else' branch is executed. But, it would be *dead wrong* to share those values, as they were "cached"
under the incorrect assumptions. To wit, consider the following:
foo x y = ite (y .== 0) k (k+1)
where k = ite (y .== 0) x (x+1)
When we reduce the 'then' branch of the first ite, we'd record the assumption that y is 0. But while reducing the 'then' branch, we'd
like to share 'k', which would evaluate (correctly) to 'x' under the given assumption. When we backtrack and evaluate the 'else'
branch of the first ite, we'd see 'k' is needed again, and we'd look it up from our sharing map to find (incorrectly) that its value
is 'x', which was stored there under the assumption that y was 0, which no longer holds. Clearly, this is unsound.
A sound implementation would have to precisely track which assumptions were active at the time expressions get shared. That is,
in the above example, we should record that the value of 'k' was cached under the assumption that 'y' is 0. While sound, this
approach unfortunately leads to significant loss of valid sharing when the value itself had nothing to do with the assumption itself.
To wit, consider:
foo x y = ite (y .== 0) k (k+1)
where k = x+5
If we tracked the assumptions, we would recompute 'k' twice, since the branch assumptions would differ. Clearly, there is no need to
re-compute 'k' in this case since its value is independent of y. Note that the whole SBV performance story is based on agressive sharing,
and losing that would have other significant ramifications.
The "proper" solution would be to track, with each shared computation, precisely which assumptions it actually *depends* on, rather
than blindly recording all the assumptions present at that time. SBV's symbolic simulation engine clearly has all the info needed to do this
properly, but the implementation is not straightforward at all. For each subexpression, we would need to chase down its dependencies
transitively, which can require a lot of scanning of the generated program causing major slow-down; thus potentially defeating the
whole purpose of sharing in the first place.
Design choice: Keep it simple, and simply do not track the assumption at all. This will maximize sharing, at the cost of evaluating
unreachable branches. I think the simplicity is more important at this point than efficiency.
Also note that the user can avoid most such issues by properly combining if-then-else's with common conditions together. That is, the
first program above should be written like this:
foo x y = ite (y .== 0) x (x+2)
In general, the following transformations should be done whenever possible:
ite e1 (ite e1 e2 e3) e4 --> ite e1 e2 e4
ite e1 e2 (ite e1 e3 e4) --> ite e1 e2 e4
This is in accordance with the general rule-of-thumb stating conditionals should be avoided as much as possible. However, we might prefer
the following:
ite e1 (f e2 e4) (f e3 e5) --> f (ite e1 e2 e3) (ite e1 e4 e5)
especially if this expression happens to be inside 'f's body itself (i.e., when f is recursive), since it reduces the number of
recursive calls. Clearly, programming with symbolic simulation in mind is another kind of beast alltogether.
-}
swa <- sbvToSW (st `extendPathCondition` (&&& t)) a -- evaluate 'then' branch
swb <- sbvToSW (st `extendPathCondition` (&&& bnot t)) b -- evaluate 'else' branch
case () of -- merge:
() | swa == swb -> return swa
() | swa == trueSW && swb == falseSW -> return swt
() | swa == falseSW && swb == trueSW -> newExpr st k (SBVApp Not [swt])
() -> newExpr st k (SBVApp Ite [swt, swa, swb])
-- Custom version of select that translates to SMT-Lib tables at the base type of words
select xs err ind
| SBV _ (Left c) <- ind = case cwVal c of
CWInteger i -> if i < 0 || i >= genericLength xs
then err
else xs `genericIndex` i
_ -> error "SBV.select: unsupported real valued select/index expression"
select xs err ind = SBV kElt $ Right $ cache r
where kInd = kindOf ind
kElt = kindOf err
r st = do sws <- mapM (sbvToSW st) xs
swe <- sbvToSW st err
if all (== swe) sws -- off-chance that all elts are the same
then return swe
else do idx <- getTableIndex st kInd kElt sws
swi <- sbvToSW st ind
let len = length xs
newExpr st kElt (SBVApp (LkUp (idx, kInd, kElt, len) swi swe) [])
-- Unit
instance Mergeable () where
symbolicMerge _ _ _ _ = ()
select _ _ _ = ()
-- Mergeable instances for List/Maybe/Either/Array are useful, but can
-- throw exceptions if there is no structural matching of the results
-- It's a question whether we should really keep them..
-- Lists
instance Mergeable a => Mergeable [a] where
symbolicMerge f t xs ys
| lxs == lys = zipWith (symbolicMerge f t) xs ys
| True = error $ "SBV.Mergeable.List: No least-upper-bound for lists of differing size " ++ show (lxs, lys)
where (lxs, lys) = (length xs, length ys)
-- Maybe
instance Mergeable a => Mergeable (Maybe a) where
symbolicMerge _ _ Nothing Nothing = Nothing
symbolicMerge f t (Just a) (Just b) = Just $ symbolicMerge f t a b
symbolicMerge _ _ a b = error $ "SBV.Mergeable.Maybe: No least-upper-bound for " ++ show (k a, k b)
where k Nothing = "Nothing"
k _ = "Just"
-- Either
instance (Mergeable a, Mergeable b) => Mergeable (Either a b) where
symbolicMerge f t (Left a) (Left b) = Left $ symbolicMerge f t a b
symbolicMerge f t (Right a) (Right b) = Right $ symbolicMerge f t a b
symbolicMerge _ _ a b = error $ "SBV.Mergeable.Either: No least-upper-bound for " ++ show (k a, k b)
where k (Left _) = "Left"
k (Right _) = "Right"
-- Arrays
instance (Ix a, Mergeable b) => Mergeable (Array a b) where
symbolicMerge f t a b
| ba == bb = listArray ba (zipWith (symbolicMerge f t) (elems a) (elems b))
| True = error $ "SBV.Mergeable.Array: No least-upper-bound for rangeSizes" ++ show (k ba, k bb)
where [ba, bb] = map bounds [a, b]
k = rangeSize
-- Functions
instance Mergeable b => Mergeable (a -> b) where
symbolicMerge f t g h x = symbolicMerge f t (g x) (h x)
{- Following definition, while correct, is utterly inefficient. Since the
application is delayed, this hangs on to the inner list and all the
impending merges, even when ind is concrete. Thus, it's much better to
simply use the default definition for the function case.
-}
-- select xs err ind = \x -> select (map ($ x) xs) (err x) ind
-- 2-Tuple
instance (Mergeable a, Mergeable b) => Mergeable (a, b) where
symbolicMerge f t (i0, i1) (j0, j1) = (i i0 j0, i i1 j1)
where i a b = symbolicMerge f t a b
select xs (err1, err2) ind = (select as err1 ind, select bs err2 ind)
where (as, bs) = unzip xs
-- 3-Tuple
instance (Mergeable a, Mergeable b, Mergeable c) => Mergeable (a, b, c) where
symbolicMerge f t (i0, i1, i2) (j0, j1, j2) = (i i0 j0, i i1 j1, i i2 j2)
where i a b = symbolicMerge f t a b
select xs (err1, err2, err3) ind = (select as err1 ind, select bs err2 ind, select cs err3 ind)
where (as, bs, cs) = unzip3 xs
-- 4-Tuple
instance (Mergeable a, Mergeable b, Mergeable c, Mergeable d) => Mergeable (a, b, c, d) where
symbolicMerge f t (i0, i1, i2, i3) (j0, j1, j2, j3) = (i i0 j0, i i1 j1, i i2 j2, i i3 j3)
where i a b = symbolicMerge f t a b
select xs (err1, err2, err3, err4) ind = (select as err1 ind, select bs err2 ind, select cs err3 ind, select ds err4 ind)
where (as, bs, cs, ds) = unzip4 xs
-- 5-Tuple
instance (Mergeable a, Mergeable b, Mergeable c, Mergeable d, Mergeable e) => Mergeable (a, b, c, d, e) where
symbolicMerge f t (i0, i1, i2, i3, i4) (j0, j1, j2, j3, j4) = (i i0 j0, i i1 j1, i i2 j2, i i3 j3, i i4 j4)
where i a b = symbolicMerge f t a b
select xs (err1, err2, err3, err4, err5) ind = (select as err1 ind, select bs err2 ind, select cs err3 ind, select ds err4 ind, select es err5 ind)
where (as, bs, cs, ds, es) = unzip5 xs
-- 6-Tuple
instance (Mergeable a, Mergeable b, Mergeable c, Mergeable d, Mergeable e, Mergeable f) => Mergeable (a, b, c, d, e, f) where
symbolicMerge f t (i0, i1, i2, i3, i4, i5) (j0, j1, j2, j3, j4, j5) = (i i0 j0, i i1 j1, i i2 j2, i i3 j3, i i4 j4, i i5 j5)
where i a b = symbolicMerge f t a b
select xs (err1, err2, err3, err4, err5, err6) ind = (select as err1 ind, select bs err2 ind, select cs err3 ind, select ds err4 ind, select es err5 ind, select fs err6 ind)
where (as, bs, cs, ds, es, fs) = unzip6 xs
-- 7-Tuple
instance (Mergeable a, Mergeable b, Mergeable c, Mergeable d, Mergeable e, Mergeable f, Mergeable g) => Mergeable (a, b, c, d, e, f, g) where
symbolicMerge f t (i0, i1, i2, i3, i4, i5, i6) (j0, j1, j2, j3, j4, j5, j6) = (i i0 j0, i i1 j1, i i2 j2, i i3 j3, i i4 j4, i i5 j5, i i6 j6)
where i a b = symbolicMerge f t a b
select xs (err1, err2, err3, err4, err5, err6, err7) ind = (select as err1 ind, select bs err2 ind, select cs err3 ind, select ds err4 ind, select es err5 ind, select fs err6 ind, select gs err7 ind)
where (as, bs, cs, ds, es, fs, gs) = unzip7 xs
-- Bounded instances
instance (SymWord a, Bounded a) => Bounded (SBV a) where
minBound = literal minBound
maxBound = literal maxBound
-- Arrays
-- SArrays are both "EqSymbolic" and "Mergeable"
instance EqSymbolic (SArray a b) where
(SArray _ a) .== (SArray _ b) = SBV KBool $ Right $ cache c
where c st = do ai <- uncacheAI a st
bi <- uncacheAI b st
newExpr st KBool (SBVApp (ArrEq ai bi) [])
-- When merging arrays; we'll ignore the force argument. This is arguably
-- the right thing to do as we've too many things and likely we want to keep it efficient.
instance SymWord b => Mergeable (SArray a b) where
symbolicMerge _ = mergeArrays
-- SFunArrays are only "Mergeable". Although a brute
-- force equality can be defined, any non-toy instance
-- will suffer from efficiency issues; so we don't define it
instance SymArray SFunArray where
newArray _ = newArray_ -- the name is irrelevant in this case
newArray_ mbiVal = return $ SFunArray $ const $ fromMaybe (error "Reading from an uninitialized array entry") mbiVal
readArray (SFunArray f) = f
resetArray (SFunArray _) a = SFunArray $ const a
writeArray (SFunArray f) a b = SFunArray (\a' -> ite (a .== a') b (f a'))
mergeArrays t (SFunArray g) (SFunArray h) = SFunArray (\x -> ite t (g x) (h x))
-- When merging arrays; we'll ignore the force argument. This is arguably
-- the right thing to do as we've too many things and likely we want to keep it efficient.
instance SymWord b => Mergeable (SFunArray a b) where
symbolicMerge _ = mergeArrays
-- | Uninterpreted constants and functions. An uninterpreted constant is
-- a value that is indexed by its name. The only property the prover assumes
-- about these values are that they are equivalent to themselves; i.e., (for
-- functions) they return the same results when applied to same arguments.
-- We support uninterpreted-functions as a general means of black-box'ing
-- operations that are /irrelevant/ for the purposes of the proof; i.e., when
-- the proofs can be performed without any knowledge about the function itself.
--
-- Minimal complete definition: 'sbvUninterpret'. However, most instances in
-- practice are already provided by SBV, so end-users should not need to define their
-- own instances.
class Uninterpreted a where
-- | Uninterpret a value, receiving an object that can be used instead. Use this version
-- when you do not need to add an axiom about this value.
uninterpret :: String -> a
-- | Uninterpret a value, only for the purposes of code-generation. For execution
-- and verification the value is used as is. For code-generation, the alternate
-- definition is used. This is useful when we want to take advantage of native
-- libraries on the target languages.
cgUninterpret :: String -> [String] -> a -> a
-- | Most generalized form of uninterpretation, this function should not be needed
-- by end-user-code, but is rather useful for the library development.
sbvUninterpret :: Maybe ([String], a) -> String -> a
-- minimal complete definition: 'sbvUninterpret'
uninterpret = sbvUninterpret Nothing
cgUninterpret nm code v = sbvUninterpret (Just (code, v)) nm
mkUninterpreted :: [Kind] -> [SBV ()] -> String -> SBV a
mkUninterpreted ks args nm = SBV ka $ Right $ cache result where
ka = last ks
result st = do
newUninterpreted st nm (SBVType ks) Nothing
sws <- mapM (sbvToSW st) args
mapM_ forceSWArg sws
newExpr st ka $ SBVApp (Uninterpreted nm) sws
-- Plain constants
instance HasKind a => Uninterpreted (SBV a) where
sbvUninterpret mbCgData nm
| Just (_, v) <- mbCgData = v
| True = SBV ka $ Right $ cache result
where ka = kindOf (undefined :: a)
result st | Just (_, v) <- mbCgData, inProofMode st = sbvToSW st v
| True = do newUninterpreted st nm (SBVType [ka]) (fst `fmap` mbCgData)
newExpr st ka $ SBVApp (Uninterpreted nm) []
-- Functions of one argument
instance (SymWord b, HasKind a) => Uninterpreted (SBV b -> SBV a) where
sbvUninterpret mbCgData nm = f
where f arg0
| Just (_, v) <- mbCgData, isConcrete arg0
= v arg0
| True
= SBV ka $ Right $ cache result
where ka = kindOf (undefined :: a)
kb = kindOf (undefined :: b)
result st | Just (_, v) <- mbCgData, inProofMode st = sbvToSW st (v arg0)
| True = do newUninterpreted st nm (SBVType [kb, ka]) (fst `fmap` mbCgData)
sw0 <- sbvToSW st arg0
mapM_ forceSWArg [sw0]
newExpr st ka $ SBVApp (Uninterpreted nm) [sw0]
-- Functions of two arguments
instance (SymWord c, SymWord b, HasKind a) => Uninterpreted (SBV c -> SBV b -> SBV a) where
sbvUninterpret mbCgData nm = f
where f arg0 arg1
| Just (_, v) <- mbCgData, isConcrete arg0, isConcrete arg1
= v arg0 arg1
| True
= SBV ka $ Right $ cache result
where ka = kindOf (undefined :: a)
kb = kindOf (undefined :: b)
kc = kindOf (undefined :: c)
result st | Just (_, v) <- mbCgData, inProofMode st = sbvToSW st (v arg0 arg1)
| True = do newUninterpreted st nm (SBVType [kc, kb, ka]) (fst `fmap` mbCgData)
sw0 <- sbvToSW st arg0
sw1 <- sbvToSW st arg1
mapM_ forceSWArg [sw0, sw1]
newExpr st ka $ SBVApp (Uninterpreted nm) [sw0, sw1]
-- Functions of three arguments
instance (SymWord d, SymWord c, SymWord b, HasKind a) => Uninterpreted (SBV d -> SBV c -> SBV b -> SBV a) where
sbvUninterpret mbCgData nm = f
where f arg0 arg1 arg2
| Just (_, v) <- mbCgData, isConcrete arg0, isConcrete arg1, isConcrete arg2
= v arg0 arg1 arg2
| True
= SBV ka $ Right $ cache result
where ka = kindOf (undefined :: a)
kb = kindOf (undefined :: b)
kc = kindOf (undefined :: c)
kd = kindOf (undefined :: d)
result st | Just (_, v) <- mbCgData, inProofMode st = sbvToSW st (v arg0 arg1 arg2)
| True = do newUninterpreted st nm (SBVType [kd, kc, kb, ka]) (fst `fmap` mbCgData)
sw0 <- sbvToSW st arg0
sw1 <- sbvToSW st arg1
sw2 <- sbvToSW st arg2
mapM_ forceSWArg [sw0, sw1, sw2]
newExpr st ka $ SBVApp (Uninterpreted nm) [sw0, sw1, sw2]
-- Functions of four arguments
instance (SymWord e, SymWord d, SymWord c, SymWord b, HasKind a) => Uninterpreted (SBV e -> SBV d -> SBV c -> SBV b -> SBV a) where
sbvUninterpret mbCgData nm = f
where f arg0 arg1 arg2 arg3
| Just (_, v) <- mbCgData, isConcrete arg0, isConcrete arg1, isConcrete arg2, isConcrete arg3
= v arg0 arg1 arg2 arg3
| True
= SBV ka $ Right $ cache result
where ka = kindOf (undefined :: a)
kb = kindOf (undefined :: b)
kc = kindOf (undefined :: c)
kd = kindOf (undefined :: d)
ke = kindOf (undefined :: e)
result st | Just (_, v) <- mbCgData, inProofMode st = sbvToSW st (v arg0 arg1 arg2 arg3)
| True = do newUninterpreted st nm (SBVType [ke, kd, kc, kb, ka]) (fst `fmap` mbCgData)
sw0 <- sbvToSW st arg0
sw1 <- sbvToSW st arg1
sw2 <- sbvToSW st arg2
sw3 <- sbvToSW st arg3
mapM_ forceSWArg [sw0, sw1, sw2, sw3]
newExpr st ka $ SBVApp (Uninterpreted nm) [sw0, sw1, sw2, sw3]
-- Functions of five arguments
instance (SymWord f, SymWord e, SymWord d, SymWord c, SymWord b, HasKind a) => Uninterpreted (SBV f -> SBV e -> SBV d -> SBV c -> SBV b -> SBV a) where
sbvUninterpret mbCgData nm = f
where f arg0 arg1 arg2 arg3 arg4
| Just (_, v) <- mbCgData, isConcrete arg0, isConcrete arg1, isConcrete arg2, isConcrete arg3, isConcrete arg4
= v arg0 arg1 arg2 arg3 arg4
| True
= SBV ka $ Right $ cache result
where ka = kindOf (undefined :: a)
kb = kindOf (undefined :: b)
kc = kindOf (undefined :: c)
kd = kindOf (undefined :: d)
ke = kindOf (undefined :: e)
kf = kindOf (undefined :: f)
result st | Just (_, v) <- mbCgData, inProofMode st = sbvToSW st (v arg0 arg1 arg2 arg3 arg4)
| True = do newUninterpreted st nm (SBVType [kf, ke, kd, kc, kb, ka]) (fst `fmap` mbCgData)
sw0 <- sbvToSW st arg0
sw1 <- sbvToSW st arg1
sw2 <- sbvToSW st arg2
sw3 <- sbvToSW st arg3
sw4 <- sbvToSW st arg4
mapM_ forceSWArg [sw0, sw1, sw2, sw3, sw4]
newExpr st ka $ SBVApp (Uninterpreted nm) [sw0, sw1, sw2, sw3, sw4]
-- Functions of six arguments
instance (SymWord g, SymWord f, SymWord e, SymWord d, SymWord c, SymWord b, HasKind a) => Uninterpreted (SBV g -> SBV f -> SBV e -> SBV d -> SBV c -> SBV b -> SBV a) where
sbvUninterpret mbCgData nm = f
where f arg0 arg1 arg2 arg3 arg4 arg5
| Just (_, v) <- mbCgData, isConcrete arg0, isConcrete arg1, isConcrete arg2, isConcrete arg3, isConcrete arg4, isConcrete arg5
= v arg0 arg1 arg2 arg3 arg4 arg5
| True
= SBV ka $ Right $ cache result
where ka = kindOf (undefined :: a)
kb = kindOf (undefined :: b)
kc = kindOf (undefined :: c)
kd = kindOf (undefined :: d)
ke = kindOf (undefined :: e)
kf = kindOf (undefined :: f)
kg = kindOf (undefined :: g)
result st | Just (_, v) <- mbCgData, inProofMode st = sbvToSW st (v arg0 arg1 arg2 arg3 arg4 arg5)
| True = do newUninterpreted st nm (SBVType [kg, kf, ke, kd, kc, kb, ka]) (fst `fmap` mbCgData)
sw0 <- sbvToSW st arg0
sw1 <- sbvToSW st arg1
sw2 <- sbvToSW st arg2
sw3 <- sbvToSW st arg3
sw4 <- sbvToSW st arg4
sw5 <- sbvToSW st arg5
mapM_ forceSWArg [sw0, sw1, sw2, sw3, sw4, sw5]
newExpr st ka $ SBVApp (Uninterpreted nm) [sw0, sw1, sw2, sw3, sw4, sw5]
-- Functions of seven arguments
instance (SymWord h, SymWord g, SymWord f, SymWord e, SymWord d, SymWord c, SymWord b, HasKind a)
=> Uninterpreted (SBV h -> SBV g -> SBV f -> SBV e -> SBV d -> SBV c -> SBV b -> SBV a) where
sbvUninterpret mbCgData nm = f
where f arg0 arg1 arg2 arg3 arg4 arg5 arg6
| Just (_, v) <- mbCgData, isConcrete arg0, isConcrete arg1, isConcrete arg2, isConcrete arg3, isConcrete arg4, isConcrete arg5, isConcrete arg6
= v arg0 arg1 arg2 arg3 arg4 arg5 arg6
| True
= SBV ka $ Right $ cache result
where ka = kindOf (undefined :: a)
kb = kindOf (undefined :: b)
kc = kindOf (undefined :: c)
kd = kindOf (undefined :: d)
ke = kindOf (undefined :: e)
kf = kindOf (undefined :: f)
kg = kindOf (undefined :: g)
kh = kindOf (undefined :: h)
result st | Just (_, v) <- mbCgData, inProofMode st = sbvToSW st (v arg0 arg1 arg2 arg3 arg4 arg5 arg6)
| True = do newUninterpreted st nm (SBVType [kh, kg, kf, ke, kd, kc, kb, ka]) (fst `fmap` mbCgData)
sw0 <- sbvToSW st arg0
sw1 <- sbvToSW st arg1
sw2 <- sbvToSW st arg2
sw3 <- sbvToSW st arg3
sw4 <- sbvToSW st arg4
sw5 <- sbvToSW st arg5
sw6 <- sbvToSW st arg6
mapM_ forceSWArg [sw0, sw1, sw2, sw3, sw4, sw5, sw6]
newExpr st ka $ SBVApp (Uninterpreted nm) [sw0, sw1, sw2, sw3, sw4, sw5, sw6]
-- Uncurried functions of two arguments
instance (SymWord c, SymWord b, HasKind a) => Uninterpreted ((SBV c, SBV b) -> SBV a) where
sbvUninterpret mbCgData nm = let f = sbvUninterpret (uc2 `fmap` mbCgData) nm in uncurry f
where uc2 (cs, fn) = (cs, curry fn)
-- Uncurried functions of three arguments
instance (SymWord d, SymWord c, SymWord b, HasKind a) => Uninterpreted ((SBV d, SBV c, SBV b) -> SBV a) where
sbvUninterpret mbCgData nm = let f = sbvUninterpret (uc3 `fmap` mbCgData) nm in \(arg0, arg1, arg2) -> f arg0 arg1 arg2
where uc3 (cs, fn) = (cs, \a b c -> fn (a, b, c))
-- Uncurried functions of four arguments
instance (SymWord e, SymWord d, SymWord c, SymWord b, HasKind a)
=> Uninterpreted ((SBV e, SBV d, SBV c, SBV b) -> SBV a) where
sbvUninterpret mbCgData nm = let f = sbvUninterpret (uc4 `fmap` mbCgData) nm in \(arg0, arg1, arg2, arg3) -> f arg0 arg1 arg2 arg3
where uc4 (cs, fn) = (cs, \a b c d -> fn (a, b, c, d))
-- Uncurried functions of five arguments
instance (SymWord f, SymWord e, SymWord d, SymWord c, SymWord b, HasKind a)
=> Uninterpreted ((SBV f, SBV e, SBV d, SBV c, SBV b) -> SBV a) where
sbvUninterpret mbCgData nm = let f = sbvUninterpret (uc5 `fmap` mbCgData) nm in \(arg0, arg1, arg2, arg3, arg4) -> f arg0 arg1 arg2 arg3 arg4
where uc5 (cs, fn) = (cs, \a b c d e -> fn (a, b, c, d, e))
-- Uncurried functions of six arguments
instance (SymWord g, SymWord f, SymWord e, SymWord d, SymWord c, SymWord b, HasKind a)
=> Uninterpreted ((SBV g, SBV f, SBV e, SBV d, SBV c, SBV b) -> SBV a) where
sbvUninterpret mbCgData nm = let f = sbvUninterpret (uc6 `fmap` mbCgData) nm in \(arg0, arg1, arg2, arg3, arg4, arg5) -> f arg0 arg1 arg2 arg3 arg4 arg5
where uc6 (cs, fn) = (cs, \a b c d e f -> fn (a, b, c, d, e, f))
-- Uncurried functions of seven arguments
instance (SymWord h, SymWord g, SymWord f, SymWord e, SymWord d, SymWord c, SymWord b, HasKind a)
=> Uninterpreted ((SBV h, SBV g, SBV f, SBV e, SBV d, SBV c, SBV b) -> SBV a) where
sbvUninterpret mbCgData nm = let f = sbvUninterpret (uc7 `fmap` mbCgData) nm in \(arg0, arg1, arg2, arg3, arg4, arg5, arg6) -> f arg0 arg1 arg2 arg3 arg4 arg5 arg6
where uc7 (cs, fn) = (cs, \a b c d e f g -> fn (a, b, c, d, e, f, g))
-- | Adding arbitrary constraints. When adding constraints, one has to be careful about
-- making sure they are not inconsistent. The function 'isVacuous' can be use for this purpose.
-- Here is an example. Consider the following predicate:
--
-- >>> let pred = do { x <- forall "x"; constrain $ x .< x; return $ x .>= (5 :: SWord8) }
--
-- This predicate asserts that all 8-bit values are larger than 5, subject to the constraint that the
-- values considered satisfy @x .< x@, i.e., they are less than themselves. Since there are no values that
-- satisfy this constraint, the proof will pass vacuously:
--
-- >>> prove pred
-- Q.E.D.
--
-- We can use 'isVacuous' to make sure to see that the pass was vacuous:
--
-- >>> isVacuous pred
-- True
--
-- While the above example is trivial, things can get complicated if there are multiple constraints with
-- non-straightforward relations; so if constraints are used one should make sure to check the predicate
-- is not vacuously true. Here's an example that is not vacuous:
--
-- >>> let pred' = do { x <- forall "x"; constrain $ x .> 6; return $ x .>= (5 :: SWord8) }
--
-- This time the proof passes as expected:
--
-- >>> prove pred'
-- Q.E.D.
--
-- And the proof is not vacuous:
--
-- >>> isVacuous pred'
-- False
constrain :: SBool -> Symbolic ()
constrain c = addConstraint Nothing c (bnot c)
-- | Adding a probabilistic constraint. The 'Double' argument is the probability
-- threshold. Probabilistic constraints are useful for 'genTest' and 'quickCheck'
-- calls where we restrict our attention to /interesting/ parts of the input domain.
pConstrain :: Double -> SBool -> Symbolic ()
pConstrain t c = addConstraint (Just t) c (bnot c)
-- | Boolean symbolic reduction. See if we can reduce a boolean condition to true/false
-- using the path context information, by making external calls to the SMT solvers. Used in the
-- implementation of 'sBranch'.
reduceInPathCondition :: SBool -> SBool
reduceInPathCondition b
| isConcrete b = b -- No reduction is needed, already a concrete value
| True = SBV k $ Right $ cache c
where k = kindOf b
c st = do -- Now that we know our boolean is not obviously true/false. Need to make an external
-- call to the SMT solver to see if we can prove it is necessarily one of those
let pc = getPathCondition st
satTrue <- isSBranchFeasibleInState st "then" (pc &&& b)
if not satTrue
then return falseSW -- condition is not satisfiable; so it must be necessarily False.
else do satFalse <- isSBranchFeasibleInState st "else" (pc &&& bnot b)
if not satFalse -- negation of the condition is not satisfiable; so it must be necessarily True.
then return trueSW
else sbvToSW st b -- condition is not necessarily always True/False. So, keep symbolic.
-- Quickcheck interface on symbolic-booleans..
instance Testable SBool where
property (SBV _ (Left b)) = property (cwToBool b)
property s = error $ "Cannot quick-check in the presence of uninterpreted constants! (" ++ show s ++ ")"
instance Testable (Symbolic SBool) where
property m = QC.whenFail (putStrLn msg) $ QC.monadicIO test
where runOnce g = do (r, Result _ tvals _ _ cs _ _ _ _ _ cstrs _) <- runSymbolic' (Concrete g) m
let cval = fromMaybe (error "Cannot quick-check in the presence of uninterpeted constants!") . (`lookup` cs)
cond = all (cwToBool . cval) cstrs
when (isSymbolic r) $ error $ "Cannot quick-check in the presence of uninterpreted constants! (" ++ show r ++ ")"
if cond then if r `isConcretely` id
then return False
else do putStrLn $ complain tvals
return True
else runOnce g -- cstrs failed, go again
test = do die <- QC.run $ newStdGen >>= runOnce
when die $ fail "Falsifiable"
msg = "*** SBV: See the custom counter example reported above."
complain [] = "*** SBV Counter Example: Predicate contains no universally quantified variables."
complain qcInfo = intercalate "\n" $ "*** SBV Counter Example:" : map ((" " ++) . info) qcInfo
where maxLen = maximum (0:[length s | (s, _) <- qcInfo])
shN s = s ++ replicate (maxLen - length s) ' '
info (n, cw) = shN n ++ " = " ++ show cw
-- | Explicit sharing combinator. The SBV library has internal caching/hash-consing mechanisms
-- built in, based on Andy Gill's type-safe obervable sharing technique (see: <http://ittc.ku.edu/~andygill/paper.php?label=DSLExtract09>).
-- However, there might be times where being explicit on the sharing can help, especially in experimental code. The 'slet' combinator
-- ensures that its first argument is computed once and passed on to its continuation, explicitly indicating the intent of sharing. Most
-- use cases of the SBV library should simply use Haskell's @let@ construct for this purpose.
slet :: (HasKind a, HasKind b) => SBV a -> (SBV a -> SBV b) -> SBV b
slet x f = SBV k $ Right $ cache r
where k = kindOf (undefined `asTypeOf` f x)
r st = do xsw <- sbvToSW st x
let xsbv = SBV (kindOf x) (Right (cache (const (return xsw))))
res = f xsbv
sbvToSW st res
-- We use 'isVacuous' and 'prove' only for the "test" section in this file, and GHC complains about that. So, this shuts it up.
__unused :: a
__unused = error "__unused" (isVacuous :: SBool -> IO Bool) (prove :: SBool -> IO ThmResult)
{-# ANN module "HLint: ignore Eta reduce" #-}
{-# ANN module "HLint: ignore Reduce duplication" #-}
| TomMD/cryptol | sbv/Data/SBV/BitVectors/Model.hs | bsd-3-clause | 88,082 | 0 | 23 | 25,825 | 25,692 | 13,345 | 12,347 | -1 | -1 |
{-# LANGUAGE UnicodeSyntax #-}
module Typed.Parser where
import Control.Applicative ((<|>))
import qualified Text.Parsec.Language as L
import qualified Text.Parsec.Token as T
import Text.ParserCombinators.Parsec (Parser, alphaNum, chainl1,
letter, oneOf, parse)
import Typed.Semantics (NmTerm (..), Ty (..))
------------
-- LEXING --
------------
lexer ∷ T.TokenParser ()
lexer = T.makeTokenParser
$ L.emptyDef { T.identStart = letter
, T.identLetter = alphaNum
, T.reservedOpNames = ["lambda", ".", ":", "->"]
, T.reservedNames = ["true", "false", "Bool"]
, T.opLetter = oneOf ".:"
}
parens ∷ Parser a → Parser a
parens = T.parens lexer
natural ∷ Parser Integer
natural = T.natural lexer
reserved ∷ String → Parser ()
reserved = T.reserved lexer
reservedOp ∷ String → Parser ()
reservedOp = T.reservedOp lexer
identifier ∷ Parser String
identifier = T.identifier lexer
whiteSpace ∷ Parser ()
whiteSpace = T.whiteSpace lexer
-------------------------------------------------------------------------------
-------------------------------------- PARSING --------------------------------
-------------------------------------------------------------------------------
variable ∷ Parser NmTerm
variable = identifier >>= \x → return $ NmVar x
true ∷ Parser NmTerm
true = reserved "true" >> return NmTrue
false ∷ Parser NmTerm
false = reserved "false" >> return NmFalse
bool ∷ Parser NmTerm
bool = true <|> false
boolTy ∷ Parser Ty
boolTy = reserved "Bool" >> return TyBool
arrTy ∷ Parser Ty
arrTy = let arrTy' = do { reservedOp "->"; return TyArr }
in boolTy `chainl1` arrTy'
anyType ∷ Parser Ty
anyType = arrTy <|> boolTy
abstraction ∷ Parser NmTerm
abstraction = do
reservedOp "lambda"
whiteSpace
x ← identifier
reservedOp ":"
τ ← anyType
reservedOp "."
whiteSpace
body ← expr
return $ NmAbs x τ body
application ∷ Parser NmTerm
application = let f = do { whiteSpace; return NmApp}
in nonApp `chainl1` f
nonApp ∷ Parser NmTerm
nonApp = parens expr
<|> abstraction
<|> variable
<|> true
<|> false
expr ∷ Parser NmTerm
expr = application <|> nonApp
parseExpr ∷ String → NmTerm
parseExpr t = case parse expr "" t of
Left err → error $ show err
Right ast → ast
| ayberkt/TAPL | src/Typed/Parser.hs | bsd-3-clause | 2,587 | 0 | 11 | 691 | 703 | 373 | 330 | 68 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving
, StandaloneDeriving
, TemplateHaskell
, UnicodeSyntax
#-}
module System.FTDI.Properties where
-------------------------------------------------------------------------------
-- Imports
-------------------------------------------------------------------------------
-- base
import Control.Applicative ( liftA2 )
import Control.Arrow ( first )
import Data.Bits ( (.&.) )
import Data.Word ( Word8 )
-- base-unicode
import Data.Eq.Unicode ( (≡) )
import Data.Function.Unicode ( (∘) )
import Data.Ord.Unicode ( (≤) )
import Prelude.Unicode ( (÷) )
-- derive
import Data.Derive.Arbitrary ( makeArbitrary )
import Data.DeriveTH ( derive )
-- ftdi
import System.FTDI ( ModemStatus(..), ChipType(..)
, BaudRate(..), nearestBaudRate
)
import System.FTDI.Internal ( marshalModemStatus
, unmarshalModemStatus
)
-- QuickCheck
import Test.QuickCheck ( Arbitrary, arbitrary, shrink, choose
, arbitraryBoundedIntegral
, shrinkIntegral, frequency
)
-- random
import System.Random ( Random )
-------------------------------------------------------------------------------
-- Properties
-------------------------------------------------------------------------------
prop_marshalModemStatus ∷ ModemStatus → Bool
prop_marshalModemStatus =
isIdentity ( uncurry unmarshalModemStatus
∘ marshalModemStatus
)
prop_unmarshalModemStatus ∷ (Word8, Word8) → Bool
prop_unmarshalModemStatus =
-- The identity only holds when we ignore the 4 least significant bytes.
isIdentityWith (\x → (ignoreBits x ≡))
( marshalModemStatus
∘ uncurry unmarshalModemStatus
∘ ignoreBits
)
where ignoreBits = first (.&. 0xf0)
prop_baudRateError ∷ RealFrac α ⇒ α → (ChipType → BaudRate α → Bool)
prop_baudRateError maxError = \chip baudRate →
let b = nearestBaudRate chip baudRate
e = abs (b - baudRate) ÷ baudRate
in unBaudRate e ≤ maxError
-------------------------------------------------------------------------------
-- Misc
-------------------------------------------------------------------------------
isIdentity ∷ Eq α ⇒ (α → α) → (α → Bool)
isIdentity = isIdentityWith (≡)
isIdentityWith ∷ Eq α ⇒ (α → α → Bool) → (α → α) → (α → Bool)
isIdentityWith eq = liftA2 eq id
-------------------------------------------------------------------------------
-- Arbitrary instances
-------------------------------------------------------------------------------
instance Arbitrary Word8 where
arbitrary = arbitraryBoundedIntegral
shrink = shrinkIntegral
deriving instance Random α ⇒ Random (BaudRate α)
instance (Random α, Num α, Arbitrary α) ⇒ Arbitrary (BaudRate α) where
arbitrary = frequency [ ( 1500000 - unBaudRate (minBound ∷ BaudRate Int)
, choose (minBound, 1500000)
)
, (1, return 2000000)
, (1, return 3000000)
]
shrink = map BaudRate ∘ shrink ∘ unBaudRate
$( derive makeArbitrary ''ModemStatus )
$( derive makeArbitrary ''ChipType )
| roelvandijk/ftdi | System/FTDI/Properties.hs | bsd-3-clause | 3,569 | 0 | 13 | 970 | 688 | 395 | 293 | 55 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Tinc.Sandbox (
PackageConfig
, Sandbox
, findPackageDb
, initSandbox
, recache
) where
import Prelude ()
import Prelude.Compat
import Control.Monad.Compat
import Control.Monad.IO.Class
import Data.List.Compat
import Data.Maybe
import System.Directory
import System.FilePath
import Tinc.Fail
import Tinc.GhcPkg
import Tinc.Git
import Tinc.Process
import Tinc.Types
data PackageConfig
data Sandbox
currentDirectory :: Path Sandbox
currentDirectory = "."
initSandbox :: (MonadIO m, Fail m, Process m) => [Path CachedGitDependency] -> [Path PackageConfig] -> m (Path PackageDb)
initSandbox gitDependencies packageConfigs = do
deleteSandbox
callProcess "cabal" ["sandbox", "init"]
packageDb <- findPackageDb currentDirectory
registerPackageConfigs packageDb packageConfigs
mapM_ (\ dep -> callProcess "cabal" ["sandbox", "add-source", path dep]) gitDependencies
return packageDb
deleteSandbox :: (MonadIO m, Process m) => m ()
deleteSandbox = do
exists <- liftIO $ doesDirectoryExist cabalSandboxDirectory
when exists (callProcess "cabal" ["sandbox", "delete"])
findPackageDb :: (MonadIO m, Fail m) => Path Sandbox -> m (Path PackageDb)
findPackageDb sandbox = do
xs <- liftIO $ getDirectoryContents sandboxDir
case listToMaybe (filter isPackageDb xs) of
Just p -> liftIO $ Path <$> canonicalizePath (sandboxDir </> p)
Nothing -> dieLoc __FILE__ ("No package database found in " ++ show sandboxDir)
where
sandboxDir = path sandbox </> cabalSandboxDirectory
isPackageDb :: FilePath -> Bool
isPackageDb = ("-packages.conf.d" `isSuffixOf`)
cabalSandboxDirectory :: FilePath
cabalSandboxDirectory = ".cabal-sandbox"
registerPackageConfigs :: (MonadIO m, Process m) => Path PackageDb -> [Path PackageConfig] -> m ()
registerPackageConfigs _packageDb [] = return ()
registerPackageConfigs packageDb packages = do
forM_ packages $ \ package ->
liftIO $ copyFile (path package) (path packageDb </> takeFileName (path package))
recache packageDb
recache :: Process m => Path PackageDb -> m ()
recache packageDb = callProcess "ghc-pkg" ["--no-user-package-db", "recache", "--package-db", path packageDb]
| beni55/tinc | src/Tinc/Sandbox.hs | bsd-3-clause | 2,410 | 0 | 15 | 468 | 665 | 344 | 321 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Language.Eiffel.Position
(Pos (..)
,Line
,Column
,SourcePos
,sourceLine
,sourceColumn
,sourceName
,inheritPos
,attachPos
,attachPosM
,attachEmptyPos
,attachPosBefore
,attachPosHere
,takePos
,position
,contents
) where
import Control.Monad
import Data.DeriveTH
import Data.Binary
import Control.DeepSeq
import Text.Parsec
import Text.Parsec.Pos
import Text.Parsec.ByteString
data Pos a = Pos SourcePos a deriving Ord
instance Eq a => Eq (Pos a) where
(==) p1 p2 = contents p1 == contents p2
instance Show a => Show (Pos a) where
show p = -- show (position p) ++ "> " ++
show (contents p)
instance Functor Pos where
fmap f (Pos s a) = Pos s (f a)
inheritPos :: (Pos a -> b) -> Pos a -> Pos b
inheritPos f a = attachPos (position a) (f a)
takePos :: Pos a -> b -> Pos b
takePos pa b = attachPos (position pa) b
attachEmptyPos = attachPos (initialPos "<no file name>")
attachPos :: SourcePos -> a -> Pos a
attachPos = Pos
attachPosM :: Monad m => m SourcePos -> m a -> m (Pos a)
attachPosM = liftM2 attachPos
attachPosHere :: a -> Parser (Pos a)
attachPosHere a = flip attachPos a `fmap` getPosition
attachPosBefore :: Parser a -> Parser (Pos a)
attachPosBefore = attachPosM getPosition
position :: Pos a -> SourcePos
position (Pos p _) = p
contents :: Pos a -> a
contents (Pos _ a) = a
instance Binary SourcePos where
get = return (newPos "filename lost" 0 0)
put _p = return ()
-- instance Binary SourcePos where
-- get = do (line, col, name) <- get
-- return (newPos name line col)
-- put p = put (sourceLine p, sourceColumn p, sourceName p)
$( derive makeBinary ''Pos )
instance NFData SourcePos where
rnf p = sourceLine p `seq` sourceColumn p `seq` sourceName p `seq` ()
$( derive makeNFData ''Pos )
| scottgw/language-eiffel | Language/Eiffel/Position.hs | bsd-3-clause | 1,915 | 0 | 10 | 475 | 645 | 336 | 309 | 57 | 1 |
module Util.System(tempfile,withTempdir,rmFile,catchIO) where
-- System helper functions.
import Control.Monad (when)
import System.Directory (getTemporaryDirectory
, removeFile
, removeDirectoryRecursive
, createDirectoryIfMissing
)
import System.FilePath ((</>), normalise)
import System.IO
import System.IO.Error
import Control.Exception as CE
catchIO :: IO a -> (IOError -> IO a) -> IO a
catchIO = CE.catch
throwIO :: IOError -> IO a
throwIO = CE.throw
tempfile :: IO (FilePath, Handle)
tempfile = do dir <- getTemporaryDirectory
openTempFile (normalise dir) "idris"
withTempdir :: String -> (FilePath -> IO a) -> IO a
withTempdir subdir callback
= do dir <- getTemporaryDirectory
let tmpDir = (normalise dir) </> subdir
removeLater <- catchIO (createDirectoryIfMissing True tmpDir >> return True)
(\ ioError -> if isAlreadyExistsError ioError then return False
else throw ioError
)
result <- callback tmpDir
when removeLater $ removeDirectoryRecursive tmpDir
return result
rmFile :: FilePath -> IO ()
rmFile f = do putStrLn $ "Removing " ++ f
catchIO (removeFile f)
(\ioerr -> putStrLn $ "WARNING: Cannot remove file "
++ f ++ ", Error msg:" ++ show ioerr)
| andyarvanitis/Idris-dev | src/Util/System.hs | bsd-3-clause | 1,488 | 0 | 13 | 493 | 395 | 204 | 191 | 32 | 2 |
{-
%
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcGenDeriv: Generating derived instance declarations
This module is nominally ``subordinate'' to @TcDeriv@, which is the
``official'' interface to deriving-related things.
This is where we do all the grimy bindings' generation.
-}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
module TcGenDeriv (
BagDerivStuff, DerivStuff(..),
hasBuiltinDeriving,
FFoldType(..), functorLikeTraverse,
deepSubtypesContaining, foldDataConArgs,
mkCoerceClassMethEqn,
gen_Newtype_binds,
genAuxBinds,
ordOpTbl, boxConTbl, litConTbl,
mkRdrFunBind
) where
#include "HsVersions.h"
import HsSyn
import RdrName
import BasicTypes
import DataCon
import Name
import Fingerprint
import Encoding
import DynFlags
import PrelInfo
import FamInstEnv( FamInst )
import PrelNames
import THNames
import Module ( moduleName, moduleNameString
, moduleUnitId, unitIdString )
import MkId ( coerceId )
import PrimOp
import SrcLoc
import TyCon
import TcType
import TysPrim
import TysWiredIn
import Type
import Class
import TyCoRep
import VarSet
import VarEnv
import State
import Util
import Var
import Outputable
import Lexeme
import FastString
import Pair
import Bag
import TcEnv (InstInfo)
import StaticFlags( opt_PprStyle_Debug )
import ListSetOps ( assocMaybe )
import Data.List ( partition, intersperse )
type BagDerivStuff = Bag DerivStuff
data AuxBindSpec
= DerivCon2Tag TyCon -- The con2Tag for given TyCon
| DerivTag2Con TyCon -- ...ditto tag2Con
| DerivMaxTag TyCon -- ...and maxTag
deriving( Eq )
-- All these generate ZERO-BASED tag operations
-- I.e first constructor has tag 0
data DerivStuff -- Please add this auxiliary stuff
= DerivAuxBind AuxBindSpec
-- Generics
| DerivFamInst FamInst -- New type family instances
-- New top-level auxiliary bindings
| DerivHsBind (LHsBind RdrName, LSig RdrName) -- Also used for SYB
| DerivInst (InstInfo RdrName) -- New, auxiliary instances
{-
************************************************************************
* *
Class deriving diagnostics
* *
************************************************************************
Only certain blessed classes can be used in a deriving clause. These classes
are listed below in the definition of hasBuiltinDeriving (with the exception
of Generic and Generic1, which are handled separately in TcGenGenerics).
A class might be able to be used in a deriving clause if it -XDeriveAnyClass
is willing to support it. The canDeriveAnyClass function checks if this is
the case.
-}
hasBuiltinDeriving :: DynFlags
-> (Name -> Fixity)
-> Class
-> Maybe (SrcSpan
-> TyCon
-> (LHsBinds RdrName, BagDerivStuff))
hasBuiltinDeriving dflags fix_env clas = assocMaybe gen_list (getUnique clas)
where
gen_list :: [(Unique, SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff))]
gen_list = [ (eqClassKey, gen_Eq_binds)
, (ordClassKey, gen_Ord_binds)
, (enumClassKey, gen_Enum_binds)
, (boundedClassKey, gen_Bounded_binds)
, (ixClassKey, gen_Ix_binds)
, (showClassKey, gen_Show_binds fix_env)
, (readClassKey, gen_Read_binds fix_env)
, (dataClassKey, gen_Data_binds dflags)
, (functorClassKey, gen_Functor_binds)
, (foldableClassKey, gen_Foldable_binds)
, (traversableClassKey, gen_Traversable_binds)
, (liftClassKey, gen_Lift_binds) ]
{-
************************************************************************
* *
Eq instances
* *
************************************************************************
Here are the heuristics for the code we generate for @Eq@. Let's
assume we have a data type with some (possibly zero) nullary data
constructors and some ordinary, non-nullary ones (the rest, also
possibly zero of them). Here's an example, with both \tr{N}ullary and
\tr{O}rdinary data cons.
data Foo ... = N1 | N2 ... | Nn | O1 a b | O2 Int | O3 Double b b | ...
* For the ordinary constructors (if any), we emit clauses to do The
Usual Thing, e.g.,:
(==) (O1 a1 b1) (O1 a2 b2) = a1 == a2 && b1 == b2
(==) (O2 a1) (O2 a2) = a1 == a2
(==) (O3 a1 b1 c1) (O3 a2 b2 c2) = a1 == a2 && b1 == b2 && c1 == c2
Note: if we're comparing unlifted things, e.g., if 'a1' and
'a2' are Float#s, then we have to generate
case (a1 `eqFloat#` a2) of r -> r
for that particular test.
* If there are a lot of (more than en) nullary constructors, we emit a
catch-all clause of the form:
(==) a b = case (con2tag_Foo a) of { a# ->
case (con2tag_Foo b) of { b# ->
case (a# ==# b#) of {
r -> r }}}
If con2tag gets inlined this leads to join point stuff, so
it's better to use regular pattern matching if there aren't too
many nullary constructors. "Ten" is arbitrary, of course
* If there aren't any nullary constructors, we emit a simpler
catch-all:
(==) a b = False
* For the @(/=)@ method, we normally just use the default method.
If the type is an enumeration type, we could/may/should? generate
special code that calls @con2tag_Foo@, much like for @(==)@ shown
above.
We thought about doing this: If we're also deriving 'Ord' for this
tycon, we generate:
instance ... Eq (Foo ...) where
(==) a b = case (compare a b) of { _LT -> False; _EQ -> True ; _GT -> False}
(/=) a b = case (compare a b) of { _LT -> True ; _EQ -> False; _GT -> True }
However, that requires that (Ord <whatever>) was put in the context
for the instance decl, which it probably wasn't, so the decls
produced don't get through the typechecker.
-}
gen_Eq_binds :: SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Eq_binds loc tycon
= (method_binds, aux_binds)
where
all_cons = tyConDataCons tycon
(nullary_cons, non_nullary_cons) = partition isNullarySrcDataCon all_cons
-- If there are ten or more (arbitrary number) nullary constructors,
-- use the con2tag stuff. For small types it's better to use
-- ordinary pattern matching.
(tag_match_cons, pat_match_cons)
| nullary_cons `lengthExceeds` 10 = (nullary_cons, non_nullary_cons)
| otherwise = ([], all_cons)
no_tag_match_cons = null tag_match_cons
fall_through_eqn
| no_tag_match_cons -- All constructors have arguments
= case pat_match_cons of
[] -> [] -- No constructors; no fall-though case
[_] -> [] -- One constructor; no fall-though case
_ -> -- Two or more constructors; add fall-through of
-- (==) _ _ = False
[([nlWildPat, nlWildPat], false_Expr)]
| otherwise -- One or more tag_match cons; add fall-through of
-- extract tags compare for equality
= [([a_Pat, b_Pat],
untag_Expr tycon [(a_RDR,ah_RDR), (b_RDR,bh_RDR)]
(genPrimOpApp (nlHsVar ah_RDR) eqInt_RDR (nlHsVar bh_RDR)))]
aux_binds | no_tag_match_cons = emptyBag
| otherwise = unitBag $ DerivAuxBind $ DerivCon2Tag tycon
method_binds = listToBag [eq_bind, ne_bind]
eq_bind = mk_FunBind loc eq_RDR (map pats_etc pat_match_cons ++ fall_through_eqn)
ne_bind = mk_easy_FunBind loc ne_RDR [a_Pat, b_Pat] (
nlHsApp (nlHsVar not_RDR) (nlHsPar (nlHsVarApps eq_RDR [a_RDR, b_RDR])))
------------------------------------------------------------------
pats_etc data_con
= let
con1_pat = nlConVarPat data_con_RDR as_needed
con2_pat = nlConVarPat data_con_RDR bs_needed
data_con_RDR = getRdrName data_con
con_arity = length tys_needed
as_needed = take con_arity as_RDRs
bs_needed = take con_arity bs_RDRs
tys_needed = dataConOrigArgTys data_con
in
([con1_pat, con2_pat], nested_eq_expr tys_needed as_needed bs_needed)
where
nested_eq_expr [] [] [] = true_Expr
nested_eq_expr tys as bs
= foldl1 and_Expr (zipWith3Equal "nested_eq" nested_eq tys as bs)
where
nested_eq ty a b = nlHsPar (eq_Expr tycon ty (nlHsVar a) (nlHsVar b))
{-
************************************************************************
* *
Ord instances
* *
************************************************************************
Note [Generating Ord instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose constructors are K1..Kn, and some are nullary.
The general form we generate is:
* Do case on first argument
case a of
K1 ... -> rhs_1
K2 ... -> rhs_2
...
Kn ... -> rhs_n
_ -> nullary_rhs
* To make rhs_i
If i = 1, 2, n-1, n, generate a single case.
rhs_2 case b of
K1 {} -> LT
K2 ... -> ...eq_rhs(K2)...
_ -> GT
Otherwise do a tag compare against the bigger range
(because this is the one most likely to succeed)
rhs_3 case tag b of tb ->
if 3 <# tg then GT
else case b of
K3 ... -> ...eq_rhs(K3)....
_ -> LT
* To make eq_rhs(K), which knows that
a = K a1 .. av
b = K b1 .. bv
we just want to compare (a1,b1) then (a2,b2) etc.
Take care on the last field to tail-call into comparing av,bv
* To make nullary_rhs generate this
case con2tag a of a# ->
case con2tag b of ->
a# `compare` b#
Several special cases:
* Two or fewer nullary constructors: don't generate nullary_rhs
* Be careful about unlifted comparisons. When comparing unboxed
values we can't call the overloaded functions.
See function unliftedOrdOp
Note [Do not rely on compare]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's a bad idea to define only 'compare', and build the other binary
comparisons on top of it; see Trac #2130, #4019. Reason: we don't
want to laboriously make a three-way comparison, only to extract a
binary result, something like this:
(>) (I# x) (I# y) = case <# x y of
True -> False
False -> case ==# x y of
True -> False
False -> True
So for sufficiently small types (few constructors, or all nullary)
we generate all methods; for large ones we just use 'compare'.
-}
data OrdOp = OrdCompare | OrdLT | OrdLE | OrdGE | OrdGT
------------
ordMethRdr :: OrdOp -> RdrName
ordMethRdr op
= case op of
OrdCompare -> compare_RDR
OrdLT -> lt_RDR
OrdLE -> le_RDR
OrdGE -> ge_RDR
OrdGT -> gt_RDR
------------
ltResult :: OrdOp -> LHsExpr RdrName
-- Knowing a<b, what is the result for a `op` b?
ltResult OrdCompare = ltTag_Expr
ltResult OrdLT = true_Expr
ltResult OrdLE = true_Expr
ltResult OrdGE = false_Expr
ltResult OrdGT = false_Expr
------------
eqResult :: OrdOp -> LHsExpr RdrName
-- Knowing a=b, what is the result for a `op` b?
eqResult OrdCompare = eqTag_Expr
eqResult OrdLT = false_Expr
eqResult OrdLE = true_Expr
eqResult OrdGE = true_Expr
eqResult OrdGT = false_Expr
------------
gtResult :: OrdOp -> LHsExpr RdrName
-- Knowing a>b, what is the result for a `op` b?
gtResult OrdCompare = gtTag_Expr
gtResult OrdLT = false_Expr
gtResult OrdLE = false_Expr
gtResult OrdGE = true_Expr
gtResult OrdGT = true_Expr
------------
gen_Ord_binds :: SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Ord_binds loc tycon
| null tycon_data_cons -- No data-cons => invoke bale-out case
= (unitBag $ mk_FunBind loc compare_RDR [], emptyBag)
| otherwise
= (unitBag (mkOrdOp OrdCompare) `unionBags` other_ops, aux_binds)
where
aux_binds | single_con_type = emptyBag
| otherwise = unitBag $ DerivAuxBind $ DerivCon2Tag tycon
-- Note [Do not rely on compare]
other_ops | (last_tag - first_tag) <= 2 -- 1-3 constructors
|| null non_nullary_cons -- Or it's an enumeration
= listToBag (map mkOrdOp [OrdLT,OrdLE,OrdGE,OrdGT])
| otherwise
= emptyBag
get_tag con = dataConTag con - fIRST_TAG
-- We want *zero-based* tags, because that's what
-- con2Tag returns (generated by untag_Expr)!
tycon_data_cons = tyConDataCons tycon
single_con_type = isSingleton tycon_data_cons
(first_con : _) = tycon_data_cons
(last_con : _) = reverse tycon_data_cons
first_tag = get_tag first_con
last_tag = get_tag last_con
(nullary_cons, non_nullary_cons) = partition isNullarySrcDataCon tycon_data_cons
mkOrdOp :: OrdOp -> LHsBind RdrName
-- Returns a binding op a b = ... compares a and b according to op ....
mkOrdOp op = mk_easy_FunBind loc (ordMethRdr op) [a_Pat, b_Pat] (mkOrdOpRhs op)
mkOrdOpRhs :: OrdOp -> LHsExpr RdrName
mkOrdOpRhs op -- RHS for comparing 'a' and 'b' according to op
| length nullary_cons <= 2 -- Two nullary or fewer, so use cases
= nlHsCase (nlHsVar a_RDR) $
map (mkOrdOpAlt op) tycon_data_cons
-- i.e. case a of { C1 x y -> case b of C1 x y -> ....compare x,y...
-- C2 x -> case b of C2 x -> ....comopare x.... }
| null non_nullary_cons -- All nullary, so go straight to comparing tags
= mkTagCmp op
| otherwise -- Mixed nullary and non-nullary
= nlHsCase (nlHsVar a_RDR) $
(map (mkOrdOpAlt op) non_nullary_cons
++ [mkSimpleHsAlt nlWildPat (mkTagCmp op)])
mkOrdOpAlt :: OrdOp -> DataCon -> LMatch RdrName (LHsExpr RdrName)
-- Make the alternative (Ki a1 a2 .. av ->
mkOrdOpAlt op data_con
= mkSimpleHsAlt (nlConVarPat data_con_RDR as_needed) (mkInnerRhs op data_con)
where
as_needed = take (dataConSourceArity data_con) as_RDRs
data_con_RDR = getRdrName data_con
mkInnerRhs op data_con
| single_con_type
= nlHsCase (nlHsVar b_RDR) [ mkInnerEqAlt op data_con ]
| tag == first_tag
= nlHsCase (nlHsVar b_RDR) [ mkInnerEqAlt op data_con
, mkSimpleHsAlt nlWildPat (ltResult op) ]
| tag == last_tag
= nlHsCase (nlHsVar b_RDR) [ mkInnerEqAlt op data_con
, mkSimpleHsAlt nlWildPat (gtResult op) ]
| tag == first_tag + 1
= nlHsCase (nlHsVar b_RDR) [ mkSimpleHsAlt (nlConWildPat first_con) (gtResult op)
, mkInnerEqAlt op data_con
, mkSimpleHsAlt nlWildPat (ltResult op) ]
| tag == last_tag - 1
= nlHsCase (nlHsVar b_RDR) [ mkSimpleHsAlt (nlConWildPat last_con) (ltResult op)
, mkInnerEqAlt op data_con
, mkSimpleHsAlt nlWildPat (gtResult op) ]
| tag > last_tag `div` 2 -- lower range is larger
= untag_Expr tycon [(b_RDR, bh_RDR)] $
nlHsIf (genPrimOpApp (nlHsVar bh_RDR) ltInt_RDR tag_lit)
(gtResult op) $ -- Definitely GT
nlHsCase (nlHsVar b_RDR) [ mkInnerEqAlt op data_con
, mkSimpleHsAlt nlWildPat (ltResult op) ]
| otherwise -- upper range is larger
= untag_Expr tycon [(b_RDR, bh_RDR)] $
nlHsIf (genPrimOpApp (nlHsVar bh_RDR) gtInt_RDR tag_lit)
(ltResult op) $ -- Definitely LT
nlHsCase (nlHsVar b_RDR) [ mkInnerEqAlt op data_con
, mkSimpleHsAlt nlWildPat (gtResult op) ]
where
tag = get_tag data_con
tag_lit = noLoc (HsLit (HsIntPrim "" (toInteger tag)))
mkInnerEqAlt :: OrdOp -> DataCon -> LMatch RdrName (LHsExpr RdrName)
-- First argument 'a' known to be built with K
-- Returns a case alternative Ki b1 b2 ... bv -> compare (a1,a2,...) with (b1,b2,...)
mkInnerEqAlt op data_con
= mkSimpleHsAlt (nlConVarPat data_con_RDR bs_needed) $
mkCompareFields tycon op (dataConOrigArgTys data_con)
where
data_con_RDR = getRdrName data_con
bs_needed = take (dataConSourceArity data_con) bs_RDRs
mkTagCmp :: OrdOp -> LHsExpr RdrName
-- Both constructors known to be nullary
-- genreates (case data2Tag a of a# -> case data2Tag b of b# -> a# `op` b#
mkTagCmp op = untag_Expr tycon [(a_RDR, ah_RDR),(b_RDR, bh_RDR)] $
unliftedOrdOp tycon intPrimTy op ah_RDR bh_RDR
mkCompareFields :: TyCon -> OrdOp -> [Type] -> LHsExpr RdrName
-- Generates nested comparisons for (a1,a2...) against (b1,b2,...)
-- where the ai,bi have the given types
mkCompareFields tycon op tys
= go tys as_RDRs bs_RDRs
where
go [] _ _ = eqResult op
go [ty] (a:_) (b:_)
| isUnliftedType ty = unliftedOrdOp tycon ty op a b
| otherwise = genOpApp (nlHsVar a) (ordMethRdr op) (nlHsVar b)
go (ty:tys) (a:as) (b:bs) = mk_compare ty a b
(ltResult op)
(go tys as bs)
(gtResult op)
go _ _ _ = panic "mkCompareFields"
-- (mk_compare ty a b) generates
-- (case (compare a b) of { LT -> <lt>; EQ -> <eq>; GT -> <bt> })
-- but with suitable special cases for
mk_compare ty a b lt eq gt
| isUnliftedType ty
= unliftedCompare lt_op eq_op a_expr b_expr lt eq gt
| otherwise
= nlHsCase (nlHsPar (nlHsApp (nlHsApp (nlHsVar compare_RDR) a_expr) b_expr))
[mkSimpleHsAlt (nlNullaryConPat ltTag_RDR) lt,
mkSimpleHsAlt (nlNullaryConPat eqTag_RDR) eq,
mkSimpleHsAlt (nlNullaryConPat gtTag_RDR) gt]
where
a_expr = nlHsVar a
b_expr = nlHsVar b
(lt_op, _, eq_op, _, _) = primOrdOps "Ord" tycon ty
unliftedOrdOp :: TyCon -> Type -> OrdOp -> RdrName -> RdrName -> LHsExpr RdrName
unliftedOrdOp tycon ty op a b
= case op of
OrdCompare -> unliftedCompare lt_op eq_op a_expr b_expr
ltTag_Expr eqTag_Expr gtTag_Expr
OrdLT -> wrap lt_op
OrdLE -> wrap le_op
OrdGE -> wrap ge_op
OrdGT -> wrap gt_op
where
(lt_op, le_op, eq_op, ge_op, gt_op) = primOrdOps "Ord" tycon ty
wrap prim_op = genPrimOpApp a_expr prim_op b_expr
a_expr = nlHsVar a
b_expr = nlHsVar b
unliftedCompare :: RdrName -> RdrName
-> LHsExpr RdrName -> LHsExpr RdrName -- What to cmpare
-> LHsExpr RdrName -> LHsExpr RdrName -> LHsExpr RdrName -- Three results
-> LHsExpr RdrName
-- Return (if a < b then lt else if a == b then eq else gt)
unliftedCompare lt_op eq_op a_expr b_expr lt eq gt
= nlHsIf (genPrimOpApp a_expr lt_op b_expr) lt $
-- Test (<) first, not (==), because the latter
-- is true less often, so putting it first would
-- mean more tests (dynamically)
nlHsIf (genPrimOpApp a_expr eq_op b_expr) eq gt
nlConWildPat :: DataCon -> LPat RdrName
-- The pattern (K {})
nlConWildPat con = noLoc (ConPatIn (noLoc (getRdrName con))
(RecCon (HsRecFields { rec_flds = []
, rec_dotdot = Nothing })))
{-
************************************************************************
* *
Enum instances
* *
************************************************************************
@Enum@ can only be derived for enumeration types. For a type
\begin{verbatim}
data Foo ... = N1 | N2 | ... | Nn
\end{verbatim}
we use both @con2tag_Foo@ and @tag2con_Foo@ functions, as well as a
@maxtag_Foo@ variable (all generated by @gen_tag_n_con_binds@).
\begin{verbatim}
instance ... Enum (Foo ...) where
succ x = toEnum (1 + fromEnum x)
pred x = toEnum (fromEnum x - 1)
toEnum i = tag2con_Foo i
enumFrom a = map tag2con_Foo [con2tag_Foo a .. maxtag_Foo]
-- or, really...
enumFrom a
= case con2tag_Foo a of
a# -> map tag2con_Foo (enumFromTo (I# a#) maxtag_Foo)
enumFromThen a b
= map tag2con_Foo [con2tag_Foo a, con2tag_Foo b .. maxtag_Foo]
-- or, really...
enumFromThen a b
= case con2tag_Foo a of { a# ->
case con2tag_Foo b of { b# ->
map tag2con_Foo (enumFromThenTo (I# a#) (I# b#) maxtag_Foo)
}}
\end{verbatim}
For @enumFromTo@ and @enumFromThenTo@, we use the default methods.
-}
gen_Enum_binds :: SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Enum_binds loc tycon
= (method_binds, aux_binds)
where
method_binds = listToBag [
succ_enum,
pred_enum,
to_enum,
enum_from,
enum_from_then,
from_enum
]
aux_binds = listToBag $ map DerivAuxBind
[DerivCon2Tag tycon, DerivTag2Con tycon, DerivMaxTag tycon]
occ_nm = getOccString tycon
succ_enum
= mk_easy_FunBind loc succ_RDR [a_Pat] $
untag_Expr tycon [(a_RDR, ah_RDR)] $
nlHsIf (nlHsApps eq_RDR [nlHsVar (maxtag_RDR tycon),
nlHsVarApps intDataCon_RDR [ah_RDR]])
(illegal_Expr "succ" occ_nm "tried to take `succ' of last tag in enumeration")
(nlHsApp (nlHsVar (tag2con_RDR tycon))
(nlHsApps plus_RDR [nlHsVarApps intDataCon_RDR [ah_RDR],
nlHsIntLit 1]))
pred_enum
= mk_easy_FunBind loc pred_RDR [a_Pat] $
untag_Expr tycon [(a_RDR, ah_RDR)] $
nlHsIf (nlHsApps eq_RDR [nlHsIntLit 0,
nlHsVarApps intDataCon_RDR [ah_RDR]])
(illegal_Expr "pred" occ_nm "tried to take `pred' of first tag in enumeration")
(nlHsApp (nlHsVar (tag2con_RDR tycon))
(nlHsApps plus_RDR [nlHsVarApps intDataCon_RDR [ah_RDR],
nlHsLit (HsInt "-1" (-1))]))
to_enum
= mk_easy_FunBind loc toEnum_RDR [a_Pat] $
nlHsIf (nlHsApps and_RDR
[nlHsApps ge_RDR [nlHsVar a_RDR, nlHsIntLit 0],
nlHsApps le_RDR [nlHsVar a_RDR, nlHsVar (maxtag_RDR tycon)]])
(nlHsVarApps (tag2con_RDR tycon) [a_RDR])
(illegal_toEnum_tag occ_nm (maxtag_RDR tycon))
enum_from
= mk_easy_FunBind loc enumFrom_RDR [a_Pat] $
untag_Expr tycon [(a_RDR, ah_RDR)] $
nlHsApps map_RDR
[nlHsVar (tag2con_RDR tycon),
nlHsPar (enum_from_to_Expr
(nlHsVarApps intDataCon_RDR [ah_RDR])
(nlHsVar (maxtag_RDR tycon)))]
enum_from_then
= mk_easy_FunBind loc enumFromThen_RDR [a_Pat, b_Pat] $
untag_Expr tycon [(a_RDR, ah_RDR), (b_RDR, bh_RDR)] $
nlHsApp (nlHsVarApps map_RDR [tag2con_RDR tycon]) $
nlHsPar (enum_from_then_to_Expr
(nlHsVarApps intDataCon_RDR [ah_RDR])
(nlHsVarApps intDataCon_RDR [bh_RDR])
(nlHsIf (nlHsApps gt_RDR [nlHsVarApps intDataCon_RDR [ah_RDR],
nlHsVarApps intDataCon_RDR [bh_RDR]])
(nlHsIntLit 0)
(nlHsVar (maxtag_RDR tycon))
))
from_enum
= mk_easy_FunBind loc fromEnum_RDR [a_Pat] $
untag_Expr tycon [(a_RDR, ah_RDR)] $
(nlHsVarApps intDataCon_RDR [ah_RDR])
{-
************************************************************************
* *
Bounded instances
* *
************************************************************************
-}
gen_Bounded_binds :: SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Bounded_binds loc tycon
| isEnumerationTyCon tycon
= (listToBag [ min_bound_enum, max_bound_enum ], emptyBag)
| otherwise
= ASSERT(isSingleton data_cons)
(listToBag [ min_bound_1con, max_bound_1con ], emptyBag)
where
data_cons = tyConDataCons tycon
----- enum-flavored: ---------------------------
min_bound_enum = mkHsVarBind loc minBound_RDR (nlHsVar data_con_1_RDR)
max_bound_enum = mkHsVarBind loc maxBound_RDR (nlHsVar data_con_N_RDR)
data_con_1 = head data_cons
data_con_N = last data_cons
data_con_1_RDR = getRdrName data_con_1
data_con_N_RDR = getRdrName data_con_N
----- single-constructor-flavored: -------------
arity = dataConSourceArity data_con_1
min_bound_1con = mkHsVarBind loc minBound_RDR $
nlHsVarApps data_con_1_RDR (nOfThem arity minBound_RDR)
max_bound_1con = mkHsVarBind loc maxBound_RDR $
nlHsVarApps data_con_1_RDR (nOfThem arity maxBound_RDR)
{-
************************************************************************
* *
Ix instances
* *
************************************************************************
Deriving @Ix@ is only possible for enumeration types and
single-constructor types. We deal with them in turn.
For an enumeration type, e.g.,
\begin{verbatim}
data Foo ... = N1 | N2 | ... | Nn
\end{verbatim}
things go not too differently from @Enum@:
\begin{verbatim}
instance ... Ix (Foo ...) where
range (a, b)
= map tag2con_Foo [con2tag_Foo a .. con2tag_Foo b]
-- or, really...
range (a, b)
= case (con2tag_Foo a) of { a# ->
case (con2tag_Foo b) of { b# ->
map tag2con_Foo (enumFromTo (I# a#) (I# b#))
}}
-- Generate code for unsafeIndex, because using index leads
-- to lots of redundant range tests
unsafeIndex c@(a, b) d
= case (con2tag_Foo d -# con2tag_Foo a) of
r# -> I# r#
inRange (a, b) c
= let
p_tag = con2tag_Foo c
in
p_tag >= con2tag_Foo a && p_tag <= con2tag_Foo b
-- or, really...
inRange (a, b) c
= case (con2tag_Foo a) of { a_tag ->
case (con2tag_Foo b) of { b_tag ->
case (con2tag_Foo c) of { c_tag ->
if (c_tag >=# a_tag) then
c_tag <=# b_tag
else
False
}}}
\end{verbatim}
(modulo suitable case-ification to handle the unlifted tags)
For a single-constructor type (NB: this includes all tuples), e.g.,
\begin{verbatim}
data Foo ... = MkFoo a b Int Double c c
\end{verbatim}
we follow the scheme given in Figure~19 of the Haskell~1.2 report
(p.~147).
-}
gen_Ix_binds :: SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Ix_binds loc tycon
| isEnumerationTyCon tycon
= ( enum_ixes
, listToBag $ map DerivAuxBind
[DerivCon2Tag tycon, DerivTag2Con tycon, DerivMaxTag tycon])
| otherwise
= (single_con_ixes, unitBag (DerivAuxBind (DerivCon2Tag tycon)))
where
--------------------------------------------------------------
enum_ixes = listToBag [ enum_range, enum_index, enum_inRange ]
enum_range
= mk_easy_FunBind loc range_RDR [nlTuplePat [a_Pat, b_Pat] Boxed] $
untag_Expr tycon [(a_RDR, ah_RDR)] $
untag_Expr tycon [(b_RDR, bh_RDR)] $
nlHsApp (nlHsVarApps map_RDR [tag2con_RDR tycon]) $
nlHsPar (enum_from_to_Expr
(nlHsVarApps intDataCon_RDR [ah_RDR])
(nlHsVarApps intDataCon_RDR [bh_RDR]))
enum_index
= mk_easy_FunBind loc unsafeIndex_RDR
[noLoc (AsPat (noLoc c_RDR)
(nlTuplePat [a_Pat, nlWildPat] Boxed)),
d_Pat] (
untag_Expr tycon [(a_RDR, ah_RDR)] (
untag_Expr tycon [(d_RDR, dh_RDR)] (
let
rhs = nlHsVarApps intDataCon_RDR [c_RDR]
in
nlHsCase
(genOpApp (nlHsVar dh_RDR) minusInt_RDR (nlHsVar ah_RDR))
[mkSimpleHsAlt (nlVarPat c_RDR) rhs]
))
)
-- This produces something like `(ch >= ah) && (ch <= bh)`
enum_inRange
= mk_easy_FunBind loc inRange_RDR [nlTuplePat [a_Pat, b_Pat] Boxed, c_Pat] $
untag_Expr tycon [(a_RDR, ah_RDR)] (
untag_Expr tycon [(b_RDR, bh_RDR)] (
untag_Expr tycon [(c_RDR, ch_RDR)] (
-- This used to use `if`, which interacts badly with RebindableSyntax.
-- See #11396.
nlHsApps and_RDR
[ genPrimOpApp (nlHsVar ch_RDR) geInt_RDR (nlHsVar ah_RDR)
, genPrimOpApp (nlHsVar ch_RDR) leInt_RDR (nlHsVar bh_RDR)
]
)))
--------------------------------------------------------------
single_con_ixes
= listToBag [single_con_range, single_con_index, single_con_inRange]
data_con
= case tyConSingleDataCon_maybe tycon of -- just checking...
Nothing -> panic "get_Ix_binds"
Just dc -> dc
con_arity = dataConSourceArity data_con
data_con_RDR = getRdrName data_con
as_needed = take con_arity as_RDRs
bs_needed = take con_arity bs_RDRs
cs_needed = take con_arity cs_RDRs
con_pat xs = nlConVarPat data_con_RDR xs
con_expr = nlHsVarApps data_con_RDR cs_needed
--------------------------------------------------------------
single_con_range
= mk_easy_FunBind loc range_RDR
[nlTuplePat [con_pat as_needed, con_pat bs_needed] Boxed] $
noLoc (mkHsComp ListComp stmts con_expr)
where
stmts = zipWith3Equal "single_con_range" mk_qual as_needed bs_needed cs_needed
mk_qual a b c = noLoc $ mkBindStmt (nlVarPat c)
(nlHsApp (nlHsVar range_RDR)
(mkLHsVarTuple [a,b]))
----------------
single_con_index
= mk_easy_FunBind loc unsafeIndex_RDR
[nlTuplePat [con_pat as_needed, con_pat bs_needed] Boxed,
con_pat cs_needed]
-- We need to reverse the order we consider the components in
-- so that
-- range (l,u) !! index (l,u) i == i -- when i is in range
-- (from http://haskell.org/onlinereport/ix.html) holds.
(mk_index (reverse $ zip3 as_needed bs_needed cs_needed))
where
-- index (l1,u1) i1 + rangeSize (l1,u1) * (index (l2,u2) i2 + ...)
mk_index [] = nlHsIntLit 0
mk_index [(l,u,i)] = mk_one l u i
mk_index ((l,u,i) : rest)
= genOpApp (
mk_one l u i
) plus_RDR (
genOpApp (
(nlHsApp (nlHsVar unsafeRangeSize_RDR)
(mkLHsVarTuple [l,u]))
) times_RDR (mk_index rest)
)
mk_one l u i
= nlHsApps unsafeIndex_RDR [mkLHsVarTuple [l,u], nlHsVar i]
------------------
single_con_inRange
= mk_easy_FunBind loc inRange_RDR
[nlTuplePat [con_pat as_needed, con_pat bs_needed] Boxed,
con_pat cs_needed] $
foldl1 and_Expr (zipWith3Equal "single_con_inRange" in_range as_needed bs_needed cs_needed)
where
in_range a b c = nlHsApps inRange_RDR [mkLHsVarTuple [a,b], nlHsVar c]
{-
************************************************************************
* *
Read instances
* *
************************************************************************
Example
infix 4 %%
data T = Int %% Int
| T1 { f1 :: Int }
| T2 T
instance Read T where
readPrec =
parens
( prec 4 (
do x <- ReadP.step Read.readPrec
expectP (Symbol "%%")
y <- ReadP.step Read.readPrec
return (x %% y))
+++
prec (appPrec+1) (
-- Note the "+1" part; "T2 T1 {f1=3}" should parse ok
-- Record construction binds even more tightly than application
do expectP (Ident "T1")
expectP (Punc '{')
expectP (Ident "f1")
expectP (Punc '=')
x <- ReadP.reset Read.readPrec
expectP (Punc '}')
return (T1 { f1 = x }))
+++
prec appPrec (
do expectP (Ident "T2")
x <- ReadP.step Read.readPrec
return (T2 x))
)
readListPrec = readListPrecDefault
readList = readListDefault
Note [Use expectP]
~~~~~~~~~~~~~~~~~~
Note that we use
expectP (Ident "T1")
rather than
Ident "T1" <- lexP
The latter desugares to inline code for matching the Ident and the
string, and this can be very voluminous. The former is much more
compact. Cf Trac #7258, although that also concerned non-linearity in
the occurrence analyser, a separate issue.
Note [Read for empty data types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
What should we get for this? (Trac #7931)
data Emp deriving( Read ) -- No data constructors
Here we want
read "[]" :: [Emp] to succeed, returning []
So we do NOT want
instance Read Emp where
readPrec = error "urk"
Rather we want
instance Read Emp where
readPred = pfail -- Same as choose []
Because 'pfail' allows the parser to backtrack, but 'error' doesn't.
These instances are also useful for Read (Either Int Emp), where
we want to be able to parse (Left 3) just fine.
-}
gen_Read_binds :: (Name -> Fixity) -> SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Read_binds get_fixity loc tycon
= (listToBag [read_prec, default_readlist, default_readlistprec], emptyBag)
where
-----------------------------------------------------------------------
default_readlist
= mkHsVarBind loc readList_RDR (nlHsVar readListDefault_RDR)
default_readlistprec
= mkHsVarBind loc readListPrec_RDR (nlHsVar readListPrecDefault_RDR)
-----------------------------------------------------------------------
data_cons = tyConDataCons tycon
(nullary_cons, non_nullary_cons) = partition isNullarySrcDataCon data_cons
read_prec = mkHsVarBind loc readPrec_RDR
(nlHsApp (nlHsVar parens_RDR) read_cons)
read_cons | null data_cons = nlHsVar pfail_RDR -- See Note [Read for empty data types]
| otherwise = foldr1 mk_alt (read_nullary_cons ++ read_non_nullary_cons)
read_non_nullary_cons = map read_non_nullary_con non_nullary_cons
read_nullary_cons
= case nullary_cons of
[] -> []
[con] -> [nlHsDo DoExpr (match_con con ++ [noLoc $ mkLastStmt (result_expr con [])])]
_ -> [nlHsApp (nlHsVar choose_RDR)
(nlList (map mk_pair nullary_cons))]
-- NB For operators the parens around (:=:) are matched by the
-- enclosing "parens" call, so here we must match the naked
-- data_con_str con
match_con con | isSym con_str = [symbol_pat con_str]
| otherwise = ident_h_pat con_str
where
con_str = data_con_str con
-- For nullary constructors we must match Ident s for normal constrs
-- and Symbol s for operators
mk_pair con = mkLHsTupleExpr [nlHsLit (mkHsString (data_con_str con)),
result_expr con []]
read_non_nullary_con data_con
| is_infix = mk_parser infix_prec infix_stmts body
| is_record = mk_parser record_prec record_stmts body
-- Using these two lines instead allows the derived
-- read for infix and record bindings to read the prefix form
-- | is_infix = mk_alt prefix_parser (mk_parser infix_prec infix_stmts body)
-- | is_record = mk_alt prefix_parser (mk_parser record_prec record_stmts body)
| otherwise = prefix_parser
where
body = result_expr data_con as_needed
con_str = data_con_str data_con
prefix_parser = mk_parser prefix_prec prefix_stmts body
read_prefix_con
| isSym con_str = [read_punc "(", symbol_pat con_str, read_punc ")"]
| otherwise = ident_h_pat con_str
read_infix_con
| isSym con_str = [symbol_pat con_str]
| otherwise = [read_punc "`"] ++ ident_h_pat con_str ++ [read_punc "`"]
prefix_stmts -- T a b c
= read_prefix_con ++ read_args
infix_stmts -- a %% b, or a `T` b
= [read_a1]
++ read_infix_con
++ [read_a2]
record_stmts -- T { f1 = a, f2 = b }
= read_prefix_con
++ [read_punc "{"]
++ concat (intersperse [read_punc ","] field_stmts)
++ [read_punc "}"]
field_stmts = zipWithEqual "lbl_stmts" read_field labels as_needed
con_arity = dataConSourceArity data_con
labels = map flLabel $ dataConFieldLabels data_con
dc_nm = getName data_con
is_infix = dataConIsInfix data_con
is_record = length labels > 0
as_needed = take con_arity as_RDRs
read_args = zipWithEqual "gen_Read_binds" read_arg as_needed (dataConOrigArgTys data_con)
(read_a1:read_a2:_) = read_args
prefix_prec = appPrecedence
infix_prec = getPrecedence get_fixity dc_nm
record_prec = appPrecedence + 1 -- Record construction binds even more tightly
-- than application; e.g. T2 T1 {x=2} means T2 (T1 {x=2})
------------------------------------------------------------------------
-- Helpers
------------------------------------------------------------------------
mk_alt e1 e2 = genOpApp e1 alt_RDR e2 -- e1 +++ e2
mk_parser p ss b = nlHsApps prec_RDR [nlHsIntLit p -- prec p (do { ss ; b })
, nlHsDo DoExpr (ss ++ [noLoc $ mkLastStmt b])]
con_app con as = nlHsVarApps (getRdrName con) as -- con as
result_expr con as = nlHsApp (nlHsVar returnM_RDR) (con_app con as) -- return (con as)
-- For constructors and field labels ending in '#', we hackily
-- let the lexer generate two tokens, and look for both in sequence
-- Thus [Ident "I"; Symbol "#"]. See Trac #5041
ident_h_pat s | Just (ss, '#') <- snocView s = [ ident_pat ss, symbol_pat "#" ]
| otherwise = [ ident_pat s ]
bindLex pat = noLoc (mkBodyStmt (nlHsApp (nlHsVar expectP_RDR) pat)) -- expectP p
-- See Note [Use expectP]
ident_pat s = bindLex $ nlHsApps ident_RDR [nlHsLit (mkHsString s)] -- expectP (Ident "foo")
symbol_pat s = bindLex $ nlHsApps symbol_RDR [nlHsLit (mkHsString s)] -- expectP (Symbol ">>")
read_punc c = bindLex $ nlHsApps punc_RDR [nlHsLit (mkHsString c)] -- expectP (Punc "<")
data_con_str con = occNameString (getOccName con)
read_arg a ty = ASSERT( not (isUnliftedType ty) )
noLoc (mkBindStmt (nlVarPat a) (nlHsVarApps step_RDR [readPrec_RDR]))
read_field lbl a = read_lbl lbl ++
[read_punc "=",
noLoc (mkBindStmt (nlVarPat a) (nlHsVarApps reset_RDR [readPrec_RDR]))]
-- When reading field labels we might encounter
-- a = 3
-- _a = 3
-- or (#) = 4
-- Note the parens!
read_lbl lbl | isSym lbl_str
= [read_punc "(", symbol_pat lbl_str, read_punc ")"]
| otherwise
= ident_h_pat lbl_str
where
lbl_str = unpackFS lbl
{-
************************************************************************
* *
Show instances
* *
************************************************************************
Example
infixr 5 :^:
data Tree a = Leaf a | Tree a :^: Tree a
instance (Show a) => Show (Tree a) where
showsPrec d (Leaf m) = showParen (d > app_prec) showStr
where
showStr = showString "Leaf " . showsPrec (app_prec+1) m
showsPrec d (u :^: v) = showParen (d > up_prec) showStr
where
showStr = showsPrec (up_prec+1) u .
showString " :^: " .
showsPrec (up_prec+1) v
-- Note: right-associativity of :^: ignored
up_prec = 5 -- Precedence of :^:
app_prec = 10 -- Application has precedence one more than
-- the most tightly-binding operator
-}
gen_Show_binds :: (Name -> Fixity) -> SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Show_binds get_fixity loc tycon
= (listToBag [shows_prec, show_list], emptyBag)
where
-----------------------------------------------------------------------
show_list = mkHsVarBind loc showList_RDR
(nlHsApp (nlHsVar showList___RDR) (nlHsPar (nlHsApp (nlHsVar showsPrec_RDR) (nlHsIntLit 0))))
-----------------------------------------------------------------------
data_cons = tyConDataCons tycon
shows_prec = mk_FunBind loc showsPrec_RDR (map pats_etc data_cons)
pats_etc data_con
| nullary_con = -- skip the showParen junk...
ASSERT(null bs_needed)
([nlWildPat, con_pat], mk_showString_app op_con_str)
| record_syntax = -- skip showParen (#2530)
([a_Pat, con_pat], nlHsPar (nested_compose_Expr show_thingies))
| otherwise =
([a_Pat, con_pat],
showParen_Expr (genOpApp a_Expr ge_RDR
(nlHsLit (HsInt "" con_prec_plus_one)))
(nlHsPar (nested_compose_Expr show_thingies)))
where
data_con_RDR = getRdrName data_con
con_arity = dataConSourceArity data_con
bs_needed = take con_arity bs_RDRs
arg_tys = dataConOrigArgTys data_con -- Correspond 1-1 with bs_needed
con_pat = nlConVarPat data_con_RDR bs_needed
nullary_con = con_arity == 0
labels = map flLabel $ dataConFieldLabels data_con
lab_fields = length labels
record_syntax = lab_fields > 0
dc_nm = getName data_con
dc_occ_nm = getOccName data_con
con_str = occNameString dc_occ_nm
op_con_str = wrapOpParens con_str
backquote_str = wrapOpBackquotes con_str
show_thingies
| is_infix = [show_arg1, mk_showString_app (" " ++ backquote_str ++ " "), show_arg2]
| record_syntax = mk_showString_app (op_con_str ++ " {") :
show_record_args ++ [mk_showString_app "}"]
| otherwise = mk_showString_app (op_con_str ++ " ") : show_prefix_args
show_label l = mk_showString_app (nm ++ " = ")
-- Note the spaces around the "=" sign. If we
-- don't have them then we get Foo { x=-1 } and
-- the "=-" parses as a single lexeme. Only the
-- space after the '=' is necessary, but it
-- seems tidier to have them both sides.
where
nm = wrapOpParens (unpackFS l)
show_args = zipWith show_arg bs_needed arg_tys
(show_arg1:show_arg2:_) = show_args
show_prefix_args = intersperse (nlHsVar showSpace_RDR) show_args
-- Assumption for record syntax: no of fields == no of
-- labelled fields (and in same order)
show_record_args = concat $
intersperse [mk_showString_app ", "] $
[ [show_label lbl, arg]
| (lbl,arg) <- zipEqual "gen_Show_binds"
labels show_args ]
show_arg :: RdrName -> Type -> LHsExpr RdrName
show_arg b arg_ty
| isUnliftedType arg_ty
-- See Note [Deriving and unboxed types] in TcDeriv
= nlHsApps compose_RDR [mk_shows_app boxed_arg,
mk_showString_app postfixMod]
| otherwise
= mk_showsPrec_app arg_prec arg
where
arg = nlHsVar b
boxed_arg = box "Show" tycon arg arg_ty
postfixMod = assoc_ty_id "Show" tycon postfixModTbl arg_ty
-- Fixity stuff
is_infix = dataConIsInfix data_con
con_prec_plus_one = 1 + getPrec is_infix get_fixity dc_nm
arg_prec | record_syntax = 0 -- Record fields don't need parens
| otherwise = con_prec_plus_one
wrapOpParens :: String -> String
wrapOpParens s | isSym s = '(' : s ++ ")"
| otherwise = s
wrapOpBackquotes :: String -> String
wrapOpBackquotes s | isSym s = s
| otherwise = '`' : s ++ "`"
isSym :: String -> Bool
isSym "" = False
isSym (c : _) = startsVarSym c || startsConSym c
-- | showString :: String -> ShowS
mk_showString_app :: String -> LHsExpr RdrName
mk_showString_app str = nlHsApp (nlHsVar showString_RDR) (nlHsLit (mkHsString str))
-- | showsPrec :: Show a => Int -> a -> ShowS
mk_showsPrec_app :: Integer -> LHsExpr RdrName -> LHsExpr RdrName
mk_showsPrec_app p x = nlHsApps showsPrec_RDR [nlHsLit (HsInt "" p), x]
-- | shows :: Show a => a -> ShowS
mk_shows_app :: LHsExpr RdrName -> LHsExpr RdrName
mk_shows_app x = nlHsApp (nlHsVar shows_RDR) x
getPrec :: Bool -> (Name -> Fixity) -> Name -> Integer
getPrec is_infix get_fixity nm
| not is_infix = appPrecedence
| otherwise = getPrecedence get_fixity nm
appPrecedence :: Integer
appPrecedence = fromIntegral maxPrecedence + 1
-- One more than the precedence of the most
-- tightly-binding operator
getPrecedence :: (Name -> Fixity) -> Name -> Integer
getPrecedence get_fixity nm
= case get_fixity nm of
Fixity _ x _assoc -> fromIntegral x
-- NB: the Report says that associativity is not taken
-- into account for either Read or Show; hence we
-- ignore associativity here
{-
************************************************************************
* *
Data instances
* *
************************************************************************
From the data type
data T a b = T1 a b | T2
we generate
$cT1 = mkDataCon $dT "T1" Prefix
$cT2 = mkDataCon $dT "T2" Prefix
$dT = mkDataType "Module.T" [] [$con_T1, $con_T2]
-- the [] is for field labels.
instance (Data a, Data b) => Data (T a b) where
gfoldl k z (T1 a b) = z T `k` a `k` b
gfoldl k z T2 = z T2
-- ToDo: add gmapT,Q,M, gfoldr
gunfold k z c = case conIndex c of
I# 1# -> k (k (z T1))
I# 2# -> z T2
toConstr (T1 _ _) = $cT1
toConstr T2 = $cT2
dataTypeOf _ = $dT
dataCast1 = gcast1 -- If T :: * -> *
dataCast2 = gcast2 -- if T :: * -> * -> *
-}
gen_Data_binds :: DynFlags
-> SrcSpan
-> TyCon -- For data families, this is the
-- *representation* TyCon
-> (LHsBinds RdrName, -- The method bindings
BagDerivStuff) -- Auxiliary bindings
gen_Data_binds dflags loc rep_tc
= (listToBag [gfoldl_bind, gunfold_bind, toCon_bind, dataTypeOf_bind]
`unionBags` gcast_binds,
-- Auxiliary definitions: the data type and constructors
listToBag ( DerivHsBind (genDataTyCon)
: map (DerivHsBind . genDataDataCon) data_cons))
where
data_cons = tyConDataCons rep_tc
n_cons = length data_cons
one_constr = n_cons == 1
genDataTyCon :: (LHsBind RdrName, LSig RdrName)
genDataTyCon -- $dT
= (mkHsVarBind loc rdr_name rhs,
L loc (TypeSig [L loc rdr_name] sig_ty))
where
rdr_name = mk_data_type_name rep_tc
sig_ty = mkLHsSigWcType (nlHsTyVar dataType_RDR)
constrs = [nlHsVar (mk_constr_name con) | con <- tyConDataCons rep_tc]
rhs = nlHsVar mkDataType_RDR
`nlHsApp` nlHsLit (mkHsString (showSDocOneLine dflags (ppr rep_tc)))
`nlHsApp` nlList constrs
genDataDataCon :: DataCon -> (LHsBind RdrName, LSig RdrName)
genDataDataCon dc -- $cT1 etc
= (mkHsVarBind loc rdr_name rhs,
L loc (TypeSig [L loc rdr_name] sig_ty))
where
rdr_name = mk_constr_name dc
sig_ty = mkLHsSigWcType (nlHsTyVar constr_RDR)
rhs = nlHsApps mkConstr_RDR constr_args
constr_args
= [ -- nlHsIntLit (toInteger (dataConTag dc)), -- Tag
nlHsVar (mk_data_type_name (dataConTyCon dc)), -- DataType
nlHsLit (mkHsString (occNameString dc_occ)), -- String name
nlList labels, -- Field labels
nlHsVar fixity] -- Fixity
labels = map (nlHsLit . mkHsString . unpackFS . flLabel)
(dataConFieldLabels dc)
dc_occ = getOccName dc
is_infix = isDataSymOcc dc_occ
fixity | is_infix = infix_RDR
| otherwise = prefix_RDR
------------ gfoldl
gfoldl_bind = mk_HRFunBind 2 loc gfoldl_RDR (map gfoldl_eqn data_cons)
gfoldl_eqn con
= ([nlVarPat k_RDR, nlVarPat z_RDR, nlConVarPat con_name as_needed],
foldl mk_k_app (nlHsVar z_RDR `nlHsApp` nlHsVar con_name) as_needed)
where
con_name :: RdrName
con_name = getRdrName con
as_needed = take (dataConSourceArity con) as_RDRs
mk_k_app e v = nlHsPar (nlHsOpApp e k_RDR (nlHsVar v))
------------ gunfold
gunfold_bind = mk_HRFunBind 2 loc
gunfold_RDR
[([k_Pat, z_Pat, if one_constr then nlWildPat else c_Pat],
gunfold_rhs)]
gunfold_rhs
| one_constr = mk_unfold_rhs (head data_cons) -- No need for case
| otherwise = nlHsCase (nlHsVar conIndex_RDR `nlHsApp` c_Expr)
(map gunfold_alt data_cons)
gunfold_alt dc = mkSimpleHsAlt (mk_unfold_pat dc) (mk_unfold_rhs dc)
mk_unfold_rhs dc = foldr nlHsApp
(nlHsVar z_RDR `nlHsApp` nlHsVar (getRdrName dc))
(replicate (dataConSourceArity dc) (nlHsVar k_RDR))
mk_unfold_pat dc -- Last one is a wild-pat, to avoid
-- redundant test, and annoying warning
| tag-fIRST_TAG == n_cons-1 = nlWildPat -- Last constructor
| otherwise = nlConPat intDataCon_RDR
[nlLitPat (HsIntPrim "" (toInteger tag))]
where
tag = dataConTag dc
------------ toConstr
toCon_bind = mk_FunBind loc toConstr_RDR (map to_con_eqn data_cons)
to_con_eqn dc = ([nlWildConPat dc], nlHsVar (mk_constr_name dc))
------------ dataTypeOf
dataTypeOf_bind = mk_easy_FunBind
loc
dataTypeOf_RDR
[nlWildPat]
(nlHsVar (mk_data_type_name rep_tc))
------------ gcast1/2
-- Make the binding dataCast1 x = gcast1 x -- if T :: * -> *
-- or dataCast2 x = gcast2 s -- if T :: * -> * -> *
-- (or nothing if T has neither of these two types)
-- But care is needed for data families:
-- If we have data family D a
-- data instance D (a,b,c) = A | B deriving( Data )
-- and we want instance ... => Data (D [(a,b,c)]) where ...
-- then we need dataCast1 x = gcast1 x
-- because D :: * -> *
-- even though rep_tc has kind * -> * -> * -> *
-- Hence looking for the kind of fam_tc not rep_tc
-- See Trac #4896
tycon_kind = case tyConFamInst_maybe rep_tc of
Just (fam_tc, _) -> tyConKind fam_tc
Nothing -> tyConKind rep_tc
gcast_binds | tycon_kind `tcEqKind` kind1 = mk_gcast dataCast1_RDR gcast1_RDR
| tycon_kind `tcEqKind` kind2 = mk_gcast dataCast2_RDR gcast2_RDR
| otherwise = emptyBag
mk_gcast dataCast_RDR gcast_RDR
= unitBag (mk_easy_FunBind loc dataCast_RDR [nlVarPat f_RDR]
(nlHsVar gcast_RDR `nlHsApp` nlHsVar f_RDR))
kind1, kind2 :: Kind
kind1 = liftedTypeKind `mkFunTy` liftedTypeKind
kind2 = liftedTypeKind `mkFunTy` kind1
gfoldl_RDR, gunfold_RDR, toConstr_RDR, dataTypeOf_RDR, mkConstr_RDR,
mkDataType_RDR, conIndex_RDR, prefix_RDR, infix_RDR,
dataCast1_RDR, dataCast2_RDR, gcast1_RDR, gcast2_RDR,
constr_RDR, dataType_RDR,
eqChar_RDR , ltChar_RDR , geChar_RDR , gtChar_RDR , leChar_RDR ,
eqInt_RDR , ltInt_RDR , geInt_RDR , gtInt_RDR , leInt_RDR ,
eqWord_RDR , ltWord_RDR , geWord_RDR , gtWord_RDR , leWord_RDR ,
eqAddr_RDR , ltAddr_RDR , geAddr_RDR , gtAddr_RDR , leAddr_RDR ,
eqFloat_RDR , ltFloat_RDR , geFloat_RDR , gtFloat_RDR , leFloat_RDR ,
eqDouble_RDR, ltDouble_RDR, geDouble_RDR, gtDouble_RDR, leDouble_RDR :: RdrName
gfoldl_RDR = varQual_RDR gENERICS (fsLit "gfoldl")
gunfold_RDR = varQual_RDR gENERICS (fsLit "gunfold")
toConstr_RDR = varQual_RDR gENERICS (fsLit "toConstr")
dataTypeOf_RDR = varQual_RDR gENERICS (fsLit "dataTypeOf")
dataCast1_RDR = varQual_RDR gENERICS (fsLit "dataCast1")
dataCast2_RDR = varQual_RDR gENERICS (fsLit "dataCast2")
gcast1_RDR = varQual_RDR tYPEABLE (fsLit "gcast1")
gcast2_RDR = varQual_RDR tYPEABLE (fsLit "gcast2")
mkConstr_RDR = varQual_RDR gENERICS (fsLit "mkConstr")
constr_RDR = tcQual_RDR gENERICS (fsLit "Constr")
mkDataType_RDR = varQual_RDR gENERICS (fsLit "mkDataType")
dataType_RDR = tcQual_RDR gENERICS (fsLit "DataType")
conIndex_RDR = varQual_RDR gENERICS (fsLit "constrIndex")
prefix_RDR = dataQual_RDR gENERICS (fsLit "Prefix")
infix_RDR = dataQual_RDR gENERICS (fsLit "Infix")
eqChar_RDR = varQual_RDR gHC_PRIM (fsLit "eqChar#")
ltChar_RDR = varQual_RDR gHC_PRIM (fsLit "ltChar#")
leChar_RDR = varQual_RDR gHC_PRIM (fsLit "leChar#")
gtChar_RDR = varQual_RDR gHC_PRIM (fsLit "gtChar#")
geChar_RDR = varQual_RDR gHC_PRIM (fsLit "geChar#")
eqInt_RDR = varQual_RDR gHC_PRIM (fsLit "==#")
ltInt_RDR = varQual_RDR gHC_PRIM (fsLit "<#" )
leInt_RDR = varQual_RDR gHC_PRIM (fsLit "<=#")
gtInt_RDR = varQual_RDR gHC_PRIM (fsLit ">#" )
geInt_RDR = varQual_RDR gHC_PRIM (fsLit ">=#")
eqWord_RDR = varQual_RDR gHC_PRIM (fsLit "eqWord#")
ltWord_RDR = varQual_RDR gHC_PRIM (fsLit "ltWord#")
leWord_RDR = varQual_RDR gHC_PRIM (fsLit "leWord#")
gtWord_RDR = varQual_RDR gHC_PRIM (fsLit "gtWord#")
geWord_RDR = varQual_RDR gHC_PRIM (fsLit "geWord#")
eqAddr_RDR = varQual_RDR gHC_PRIM (fsLit "eqAddr#")
ltAddr_RDR = varQual_RDR gHC_PRIM (fsLit "ltAddr#")
leAddr_RDR = varQual_RDR gHC_PRIM (fsLit "leAddr#")
gtAddr_RDR = varQual_RDR gHC_PRIM (fsLit "gtAddr#")
geAddr_RDR = varQual_RDR gHC_PRIM (fsLit "geAddr#")
eqFloat_RDR = varQual_RDR gHC_PRIM (fsLit "eqFloat#")
ltFloat_RDR = varQual_RDR gHC_PRIM (fsLit "ltFloat#")
leFloat_RDR = varQual_RDR gHC_PRIM (fsLit "leFloat#")
gtFloat_RDR = varQual_RDR gHC_PRIM (fsLit "gtFloat#")
geFloat_RDR = varQual_RDR gHC_PRIM (fsLit "geFloat#")
eqDouble_RDR = varQual_RDR gHC_PRIM (fsLit "==##")
ltDouble_RDR = varQual_RDR gHC_PRIM (fsLit "<##" )
leDouble_RDR = varQual_RDR gHC_PRIM (fsLit "<=##")
gtDouble_RDR = varQual_RDR gHC_PRIM (fsLit ">##" )
geDouble_RDR = varQual_RDR gHC_PRIM (fsLit ">=##")
{-
************************************************************************
* *
Functor instances
see http://www.mail-archive.com/[email protected]/msg02116.html
* *
************************************************************************
For the data type:
data T a = T1 Int a | T2 (T a)
We generate the instance:
instance Functor T where
fmap f (T1 b1 a) = T1 b1 (f a)
fmap f (T2 ta) = T2 (fmap f ta)
Notice that we don't simply apply 'fmap' to the constructor arguments.
Rather
- Do nothing to an argument whose type doesn't mention 'a'
- Apply 'f' to an argument of type 'a'
- Apply 'fmap f' to other arguments
That's why we have to recurse deeply into the constructor argument types,
rather than just one level, as we typically do.
What about types with more than one type parameter? In general, we only
derive Functor for the last position:
data S a b = S1 [b] | S2 (a, T a b)
instance Functor (S a) where
fmap f (S1 bs) = S1 (fmap f bs)
fmap f (S2 (p,q)) = S2 (a, fmap f q)
However, we have special cases for
- tuples
- functions
More formally, we write the derivation of fmap code over type variable
'a for type 'b as ($fmap 'a 'b). In this general notation the derived
instance for T is:
instance Functor T where
fmap f (T1 x1 x2) = T1 ($(fmap 'a 'b1) x1) ($(fmap 'a 'a) x2)
fmap f (T2 x1) = T2 ($(fmap 'a '(T a)) x1)
$(fmap 'a 'b) = \x -> x -- when b does not contain a
$(fmap 'a 'a) = f
$(fmap 'a '(b1,b2)) = \x -> case x of (x1,x2) -> ($(fmap 'a 'b1) x1, $(fmap 'a 'b2) x2)
$(fmap 'a '(T b1 b2)) = fmap $(fmap 'a 'b2) -- when a only occurs in the last parameter, b2
$(fmap 'a '(b -> c)) = \x b -> $(fmap 'a' 'c) (x ($(cofmap 'a 'b) b))
For functions, the type parameter 'a can occur in a contravariant position,
which means we need to derive a function like:
cofmap :: (a -> b) -> (f b -> f a)
This is pretty much the same as $fmap, only without the $(cofmap 'a 'a) case:
$(cofmap 'a 'b) = \x -> x -- when b does not contain a
$(cofmap 'a 'a) = error "type variable in contravariant position"
$(cofmap 'a '(b1,b2)) = \x -> case x of (x1,x2) -> ($(cofmap 'a 'b1) x1, $(cofmap 'a 'b2) x2)
$(cofmap 'a '[b]) = map $(cofmap 'a 'b)
$(cofmap 'a '(T b1 b2)) = fmap $(cofmap 'a 'b2) -- when a only occurs in the last parameter, b2
$(cofmap 'a '(b -> c)) = \x b -> $(cofmap 'a' 'c) (x ($(fmap 'a 'c) b))
Note that the code produced by $(fmap _ _) is always a higher order function,
with type `(a -> b) -> (g a -> g b)` for some g. When we need to do pattern
matching on the type, this means create a lambda function (see the (,) case above).
The resulting code for fmap can look a bit weird, for example:
data X a = X (a,Int)
-- generated instance
instance Functor X where
fmap f (X x) = (\y -> case y of (x1,x2) -> X (f x1, (\z -> z) x2)) x
The optimizer should be able to simplify this code by simple inlining.
An older version of the deriving code tried to avoid these applied
lambda functions by producing a meta level function. But the function to
be mapped, `f`, is a function on the code level, not on the meta level,
so it was eta expanded to `\x -> [| f $x |]`. This resulted in too much eta expansion.
It is better to produce too many lambdas than to eta expand, see ticket #7436.
-}
gen_Functor_binds :: SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Functor_binds loc tycon
= (unitBag fmap_bind, emptyBag)
where
data_cons = tyConDataCons tycon
fmap_bind = mkRdrFunBind (L loc fmap_RDR) eqns
fmap_eqn con = evalState (match_for_con [f_Pat] con =<< parts) bs_RDRs
where
parts = sequence $ foldDataConArgs ft_fmap con
eqns | null data_cons = [mkSimpleMatch [nlWildPat, nlWildPat]
(error_Expr "Void fmap")]
| otherwise = map fmap_eqn data_cons
ft_fmap :: FFoldType (State [RdrName] (LHsExpr RdrName))
ft_fmap = FT { ft_triv = mkSimpleLam $ \x -> return x -- fmap f = \x -> x
, ft_var = return f_Expr -- fmap f = f
, ft_fun = \g h -> do -- fmap f = \x b -> h (x (g b))
gg <- g
hh <- h
mkSimpleLam2 $ \x b -> return $ nlHsApp hh (nlHsApp x (nlHsApp gg b))
, ft_tup = \t gs -> do -- fmap f = \x -> case x of (a1,a2,..) -> (g1 a1,g2 a2,..)
gg <- sequence gs
mkSimpleLam $ mkSimpleTupleCase match_for_con t gg
, ft_ty_app = \_ g -> nlHsApp fmap_Expr <$> g -- fmap f = fmap g
, ft_forall = \_ g -> g
, ft_bad_app = panic "in other argument"
, ft_co_var = panic "contravariant" }
-- Con a1 a2 ... -> Con (f1 a1) (f2 a2) ...
match_for_con :: [LPat RdrName] -> DataCon -> [LHsExpr RdrName]
-> State [RdrName] (LMatch RdrName (LHsExpr RdrName))
match_for_con = mkSimpleConMatch $
\con_name xs -> return $ nlHsApps con_name xs -- Con x1 x2 ..
{-
Utility functions related to Functor deriving.
Since several things use the same pattern of traversal, this is abstracted into functorLikeTraverse.
This function works like a fold: it makes a value of type 'a' in a bottom up way.
-}
-- Generic traversal for Functor deriving
data FFoldType a -- Describes how to fold over a Type in a functor like way
= FT { ft_triv :: a -- Does not contain variable
, ft_var :: a -- The variable itself
, ft_co_var :: a -- The variable itself, contravariantly
, ft_fun :: a -> a -> a -- Function type
, ft_tup :: TyCon -> [a] -> a -- Tuple type
, ft_ty_app :: Type -> a -> a -- Type app, variable only in last argument
, ft_bad_app :: a -- Type app, variable other than in last argument
, ft_forall :: TcTyVar -> a -> a -- Forall type
}
functorLikeTraverse :: forall a.
TyVar -- ^ Variable to look for
-> FFoldType a -- ^ How to fold
-> Type -- ^ Type to process
-> a
functorLikeTraverse var (FT { ft_triv = caseTrivial, ft_var = caseVar
, ft_co_var = caseCoVar, ft_fun = caseFun
, ft_tup = caseTuple, ft_ty_app = caseTyApp
, ft_bad_app = caseWrongArg, ft_forall = caseForAll })
ty
= fst (go False ty)
where
go :: Bool -- Covariant or contravariant context
-> Type
-> (a, Bool) -- (result of type a, does type contain var)
go co ty | Just ty' <- coreView ty = go co ty'
go co (TyVarTy v) | v == var = (if co then caseCoVar else caseVar,True)
go co (ForAllTy (Anon x) y) | isPredTy x = go co y
| xc || yc = (caseFun xr yr,True)
where (xr,xc) = go (not co) x
(yr,yc) = go co y
go co (AppTy x y) | xc = (caseWrongArg, True)
| yc = (caseTyApp x yr, True)
where (_, xc) = go co x
(yr,yc) = go co y
go co ty@(TyConApp con args)
| not (or xcs) = (caseTrivial, False) -- Variable does not occur
-- At this point we know that xrs, xcs is not empty,
-- and at least one xr is True
| isTupleTyCon con = (caseTuple con xrs, True)
| or (init xcs) = (caseWrongArg, True) -- T (..var..) ty
| Just (fun_ty, _) <- splitAppTy_maybe ty -- T (..no var..) ty
= (caseTyApp fun_ty (last xrs), True)
| otherwise = (caseWrongArg, True) -- Non-decomposable (eg type function)
where
(xrs,xcs) = unzip (map (go co) args)
go _ (ForAllTy (Named _ Visible) _) = panic "unexpected visible binder"
go co (ForAllTy (Named v _) x) | v /= var && xc = (caseForAll v xr,True)
where (xr,xc) = go co x
go _ _ = (caseTrivial,False)
-- Return all syntactic subterms of ty that contain var somewhere
-- These are the things that should appear in instance constraints
deepSubtypesContaining :: TyVar -> Type -> [TcType]
deepSubtypesContaining tv
= functorLikeTraverse tv
(FT { ft_triv = []
, ft_var = []
, ft_fun = (++)
, ft_tup = \_ xs -> concat xs
, ft_ty_app = (:)
, ft_bad_app = panic "in other argument"
, ft_co_var = panic "contravariant"
, ft_forall = \v xs -> filterOut ((v `elemVarSet`) . tyCoVarsOfType) xs })
foldDataConArgs :: FFoldType a -> DataCon -> [a]
-- Fold over the arguments of the datacon
foldDataConArgs ft con
= map foldArg (dataConOrigArgTys con)
where
foldArg
= case getTyVar_maybe (last (tyConAppArgs (dataConOrigResTy con))) of
Just tv -> functorLikeTraverse tv ft
Nothing -> const (ft_triv ft)
-- If we are deriving Foldable for a GADT, there is a chance that the last
-- type variable in the data type isn't actually a type variable at all.
-- (for example, this can happen if the last type variable is refined to
-- be a concrete type such as Int). If the last type variable is refined
-- to be a specific type, then getTyVar_maybe will return Nothing.
-- See Note [DeriveFoldable with ExistentialQuantification]
--
-- The kind checks have ensured the last type parameter is of kind *.
-- Make a HsLam using a fresh variable from a State monad
mkSimpleLam :: (LHsExpr RdrName -> State [RdrName] (LHsExpr RdrName))
-> State [RdrName] (LHsExpr RdrName)
-- (mkSimpleLam fn) returns (\x. fn(x))
mkSimpleLam lam = do
(n:names) <- get
put names
body <- lam (nlHsVar n)
return (mkHsLam [nlVarPat n] body)
mkSimpleLam2 :: (LHsExpr RdrName -> LHsExpr RdrName
-> State [RdrName] (LHsExpr RdrName))
-> State [RdrName] (LHsExpr RdrName)
mkSimpleLam2 lam = do
(n1:n2:names) <- get
put names
body <- lam (nlHsVar n1) (nlHsVar n2)
return (mkHsLam [nlVarPat n1,nlVarPat n2] body)
-- "Con a1 a2 a3 -> fold [x1 a1, x2 a2, x3 a3]"
mkSimpleConMatch :: Monad m => (RdrName -> [LHsExpr RdrName] -> m (LHsExpr RdrName))
-> [LPat RdrName]
-> DataCon
-> [LHsExpr RdrName]
-> m (LMatch RdrName (LHsExpr RdrName))
mkSimpleConMatch fold extra_pats con insides = do
let con_name = getRdrName con
let vars_needed = takeList insides as_RDRs
let pat = nlConVarPat con_name vars_needed
rhs <- fold con_name (zipWith nlHsApp insides (map nlHsVar vars_needed))
return $ mkMatch (extra_pats ++ [pat]) rhs (noLoc emptyLocalBinds)
-- "case x of (a1,a2,a3) -> fold [x1 a1, x2 a2, x3 a3]"
mkSimpleTupleCase :: Monad m => ([LPat RdrName] -> DataCon -> [a]
-> m (LMatch RdrName (LHsExpr RdrName)))
-> TyCon -> [a] -> LHsExpr RdrName -> m (LHsExpr RdrName)
mkSimpleTupleCase match_for_con tc insides x
= do { let data_con = tyConSingleDataCon tc
; match <- match_for_con [] data_con insides
; return $ nlHsCase x [match] }
{-
************************************************************************
* *
Foldable instances
see http://www.mail-archive.com/[email protected]/msg02116.html
* *
************************************************************************
Deriving Foldable instances works the same way as Functor instances,
only Foldable instances are not possible for function types at all.
Here the derived instance for the type T above is:
instance Foldable T where
foldr f z (T1 x1 x2 x3) = $(foldr 'a 'b1) x1 ( $(foldr 'a 'a) x2 ( $(foldr 'a 'b2) x3 z ) )
The cases are:
$(foldr 'a 'b) = \x z -> z -- when b does not contain a
$(foldr 'a 'a) = f
$(foldr 'a '(b1,b2)) = \x z -> case x of (x1,x2) -> $(foldr 'a 'b1) x1 ( $(foldr 'a 'b2) x2 z )
$(foldr 'a '(T b1 b2)) = \x z -> foldr $(foldr 'a 'b2) z x -- when a only occurs in the last parameter, b2
Note that the arguments to the real foldr function are the wrong way around,
since (f :: a -> b -> b), while (foldr f :: b -> t a -> b).
Foldable instances differ from Functor and Traversable instances in that
Foldable instances can be derived for data types in which the last type
variable is existentially quantified. In particular, if the last type variable
is refined to a more specific type in a GADT:
data GADT a where
G :: a ~ Int => a -> G Int
then the deriving machinery does not attempt to check that the type a contains
Int, since it is not syntactically equal to a type variable. That is, the
derived Foldable instance for GADT is:
instance Foldable GADT where
foldr _ z (GADT _) = z
See Note [DeriveFoldable with ExistentialQuantification].
-}
gen_Foldable_binds :: SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Foldable_binds loc tycon
= (listToBag [foldr_bind, foldMap_bind], emptyBag)
where
data_cons = tyConDataCons tycon
foldr_bind = mkRdrFunBind (L loc foldable_foldr_RDR) eqns
eqns = map foldr_eqn data_cons
foldr_eqn con = evalState (match_foldr z_Expr [f_Pat,z_Pat] con =<< parts) bs_RDRs
where
parts = sequence $ foldDataConArgs ft_foldr con
foldMap_bind = mkRdrFunBind (L loc foldMap_RDR) (map foldMap_eqn data_cons)
foldMap_eqn con = evalState (match_foldMap [f_Pat] con =<< parts) bs_RDRs
where
parts = sequence $ foldDataConArgs ft_foldMap con
ft_foldr :: FFoldType (State [RdrName] (LHsExpr RdrName))
ft_foldr = FT { ft_triv = mkSimpleLam2 $ \_ z -> return z -- foldr f = \x z -> z
, ft_var = return f_Expr -- foldr f = f
, ft_tup = \t g -> do gg <- sequence g -- foldr f = (\x z -> case x of ...)
mkSimpleLam2 $ \x z -> mkSimpleTupleCase (match_foldr z) t gg x
, ft_ty_app = \_ g -> do gg <- g -- foldr f = (\x z -> foldr g z x)
mkSimpleLam2 $ \x z -> return $ nlHsApps foldable_foldr_RDR [gg,z,x]
, ft_forall = \_ g -> g
, ft_co_var = panic "contravariant"
, ft_fun = panic "function"
, ft_bad_app = panic "in other argument" }
match_foldr z = mkSimpleConMatch $ \_con_name xs -> return $ foldr nlHsApp z xs -- g1 v1 (g2 v2 (.. z))
ft_foldMap :: FFoldType (State [RdrName] (LHsExpr RdrName))
ft_foldMap = FT { ft_triv = mkSimpleLam $ \_ -> return mempty_Expr -- foldMap f = \x -> mempty
, ft_var = return f_Expr -- foldMap f = f
, ft_tup = \t g -> do gg <- sequence g -- foldMap f = \x -> case x of (..,)
mkSimpleLam $ mkSimpleTupleCase match_foldMap t gg
, ft_ty_app = \_ g -> nlHsApp foldMap_Expr <$> g -- foldMap f = foldMap g
, ft_forall = \_ g -> g
, ft_co_var = panic "contravariant"
, ft_fun = panic "function"
, ft_bad_app = panic "in other argument" }
match_foldMap = mkSimpleConMatch $ \_con_name xs -> return $
case xs of
[] -> mempty_Expr
xs -> foldr1 (\x y -> nlHsApps mappend_RDR [x,y]) xs
{-
************************************************************************
* *
Traversable instances
see http://www.mail-archive.com/[email protected]/msg02116.html
* *
************************************************************************
Again, Traversable is much like Functor and Foldable.
The cases are:
$(traverse 'a 'b) = pure -- when b does not contain a
$(traverse 'a 'a) = f
$(traverse 'a '(b1,b2)) = \x -> case x of (x1,x2) -> (,) <$> $(traverse 'a 'b1) x1 <*> $(traverse 'a 'b2) x2
$(traverse 'a '(T b1 b2)) = traverse $(traverse 'a 'b2) -- when a only occurs in the last parameter, b2
Note that the generated code is not as efficient as it could be. For instance:
data T a = T Int a deriving Traversable
gives the function: traverse f (T x y) = T <$> pure x <*> f y
instead of: traverse f (T x y) = T x <$> f y
-}
gen_Traversable_binds :: SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Traversable_binds loc tycon
= (unitBag traverse_bind, emptyBag)
where
data_cons = tyConDataCons tycon
traverse_bind = mkRdrFunBind (L loc traverse_RDR) eqns
eqns = map traverse_eqn data_cons
traverse_eqn con = evalState (match_for_con [f_Pat] con =<< parts) bs_RDRs
where
parts = sequence $ foldDataConArgs ft_trav con
ft_trav :: FFoldType (State [RdrName] (LHsExpr RdrName))
ft_trav = FT { ft_triv = return pure_Expr -- traverse f = pure x
, ft_var = return f_Expr -- traverse f = f x
, ft_tup = \t gs -> do -- traverse f = \x -> case x of (a1,a2,..) ->
gg <- sequence gs -- (,,) <$> g1 a1 <*> g2 a2 <*> ..
mkSimpleLam $ mkSimpleTupleCase match_for_con t gg
, ft_ty_app = \_ g -> nlHsApp traverse_Expr <$> g -- traverse f = travese g
, ft_forall = \_ g -> g
, ft_co_var = panic "contravariant"
, ft_fun = panic "function"
, ft_bad_app = panic "in other argument" }
-- Con a1 a2 ... -> Con <$> g1 a1 <*> g2 a2 <*> ...
match_for_con = mkSimpleConMatch $
\con_name xs -> return $ mkApCon (nlHsVar con_name) xs
-- ((Con <$> x1) <*> x2) <*> ..
mkApCon con [] = nlHsApps pure_RDR [con]
mkApCon con (x:xs) = foldl appAp (nlHsApps fmap_RDR [con,x]) xs
where appAp x y = nlHsApps ap_RDR [x,y]
{-
************************************************************************
* *
Lift instances
* *
************************************************************************
Example:
data Foo a = Foo a | a :^: a deriving Lift
==>
instance (Lift a) => Lift (Foo a) where
lift (Foo a)
= appE
(conE
(mkNameG_d "package-name" "ModuleName" "Foo"))
(lift a)
lift (u :^: v)
= infixApp
(lift u)
(conE
(mkNameG_d "package-name" "ModuleName" ":^:"))
(lift v)
Note that (mkNameG_d "package-name" "ModuleName" "Foo") is equivalent to what
'Foo would be when using the -XTemplateHaskell extension. To make sure that
-XDeriveLift can be used on stage-1 compilers, however, we expliticly invoke
makeG_d.
-}
gen_Lift_binds :: SrcSpan -> TyCon -> (LHsBinds RdrName, BagDerivStuff)
gen_Lift_binds loc tycon
| null data_cons = (unitBag (L loc $ mkFunBind (L loc lift_RDR)
[mkMatch [nlWildPat] errorMsg_Expr
(noLoc emptyLocalBinds)])
, emptyBag)
| otherwise = (unitBag lift_bind, emptyBag)
where
errorMsg_Expr = nlHsVar error_RDR `nlHsApp` nlHsLit
(mkHsString $ "Can't lift value of empty datatype " ++ tycon_str)
lift_bind = mk_FunBind loc lift_RDR (map pats_etc data_cons)
data_cons = tyConDataCons tycon
tycon_str = occNameString . nameOccName . tyConName $ tycon
pats_etc data_con
= ([con_pat], lift_Expr)
where
con_pat = nlConVarPat data_con_RDR as_needed
data_con_RDR = getRdrName data_con
con_arity = dataConSourceArity data_con
as_needed = take con_arity as_RDRs
lifted_as = zipWithEqual "mk_lift_app" mk_lift_app
tys_needed as_needed
tycon_name = tyConName tycon
is_infix = dataConIsInfix data_con
tys_needed = dataConOrigArgTys data_con
mk_lift_app ty a
| not (isUnliftedType ty) = nlHsApp (nlHsVar lift_RDR)
(nlHsVar a)
| otherwise = nlHsApp (nlHsVar litE_RDR)
(primLitOp (mkBoxExp (nlHsVar a)))
where (primLitOp, mkBoxExp) = primLitOps "Lift" tycon ty
pkg_name = unitIdString . moduleUnitId
. nameModule $ tycon_name
mod_name = moduleNameString . moduleName . nameModule $ tycon_name
con_name = occNameString . nameOccName . dataConName $ data_con
conE_Expr = nlHsApp (nlHsVar conE_RDR)
(nlHsApps mkNameG_dRDR
(map (nlHsLit . mkHsString)
[pkg_name, mod_name, con_name]))
lift_Expr
| is_infix = nlHsApps infixApp_RDR [a1, conE_Expr, a2]
| otherwise = foldl mk_appE_app conE_Expr lifted_as
(a1:a2:_) = lifted_as
mk_appE_app :: LHsExpr RdrName -> LHsExpr RdrName -> LHsExpr RdrName
mk_appE_app a b = nlHsApps appE_RDR [a, b]
{-
************************************************************************
* *
Newtype-deriving instances
* *
************************************************************************
We take every method in the original instance and `coerce` it to fit
into the derived instance. We need a type annotation on the argument
to `coerce` to make it obvious what instantiation of the method we're
coercing from.
See #8503 for more discussion.
-}
mkCoerceClassMethEqn :: Class -- the class being derived
-> [TyVar] -- the tvs in the instance head
-> [Type] -- instance head parameters (incl. newtype)
-> Type -- the representation type (already eta-reduced)
-> Id -- the method to look at
-> Pair Type
mkCoerceClassMethEqn cls inst_tvs cls_tys rhs_ty id
= Pair (substTy rhs_subst user_meth_ty) (substTy lhs_subst user_meth_ty)
where
cls_tvs = classTyVars cls
in_scope = mkInScopeSet $ mkVarSet inst_tvs
lhs_subst = mkTCvSubst in_scope (zipTyEnv cls_tvs cls_tys, emptyCvSubstEnv)
rhs_subst = mkTCvSubst in_scope
( zipTyEnv cls_tvs (changeLast cls_tys rhs_ty)
, emptyCvSubstEnv )
(_class_tvs, _class_constraint, user_meth_ty)
= tcSplitSigmaTy (varType id)
changeLast :: [a] -> a -> [a]
changeLast [] _ = panic "changeLast"
changeLast [_] x = [x]
changeLast (x:xs) x' = x : changeLast xs x'
gen_Newtype_binds :: SrcSpan
-> Class -- the class being derived
-> [TyVar] -- the tvs in the instance head
-> [Type] -- instance head parameters (incl. newtype)
-> Type -- the representation type (already eta-reduced)
-> LHsBinds RdrName
gen_Newtype_binds loc cls inst_tvs cls_tys rhs_ty
= listToBag $ zipWith mk_bind
(classMethods cls)
(map (mkCoerceClassMethEqn cls inst_tvs cls_tys rhs_ty) (classMethods cls))
where
coerce_RDR = getRdrName coerceId
mk_bind :: Id -> Pair Type -> LHsBind RdrName
mk_bind id (Pair tau_ty user_ty)
= mkRdrFunBind (L loc meth_RDR) [mkSimpleMatch [] rhs_expr]
where
meth_RDR = getRdrName id
rhs_expr
= ( nlHsVar coerce_RDR
`nlHsApp`
(nlHsVar meth_RDR `nlExprWithTySig` toLHsSigWcType tau_ty'))
`nlExprWithTySig` toLHsSigWcType user_ty
-- Open the representation type here, so that it's forall'ed type
-- variables refer to the ones bound in the user_ty
(_, _, tau_ty') = tcSplitSigmaTy tau_ty
nlExprWithTySig :: LHsExpr RdrName -> LHsSigWcType RdrName -> LHsExpr RdrName
nlExprWithTySig e s = noLoc (ExprWithTySig e s)
{-
************************************************************************
* *
\subsection{Generating extra binds (@con2tag@ and @tag2con@)}
* *
************************************************************************
\begin{verbatim}
data Foo ... = ...
con2tag_Foo :: Foo ... -> Int#
tag2con_Foo :: Int -> Foo ... -- easier if Int, not Int#
maxtag_Foo :: Int -- ditto (NB: not unlifted)
\end{verbatim}
The `tags' here start at zero, hence the @fIRST_TAG@ (currently one)
fiddling around.
-}
genAuxBindSpec :: SrcSpan -> AuxBindSpec -> (LHsBind RdrName, LSig RdrName)
genAuxBindSpec loc (DerivCon2Tag tycon)
= (mk_FunBind loc rdr_name eqns,
L loc (TypeSig [L loc rdr_name] sig_ty))
where
rdr_name = con2tag_RDR tycon
sig_ty = mkLHsSigWcType $ L loc $ HsCoreTy $
mkSpecSigmaTy (tyConTyVars tycon) (tyConStupidTheta tycon) $
mkParentType tycon `mkFunTy` intPrimTy
lots_of_constructors = tyConFamilySize tycon > 8
-- was: mAX_FAMILY_SIZE_FOR_VEC_RETURNS
-- but we don't do vectored returns any more.
eqns | lots_of_constructors = [get_tag_eqn]
| otherwise = map mk_eqn (tyConDataCons tycon)
get_tag_eqn = ([nlVarPat a_RDR], nlHsApp (nlHsVar getTag_RDR) a_Expr)
mk_eqn :: DataCon -> ([LPat RdrName], LHsExpr RdrName)
mk_eqn con = ([nlWildConPat con],
nlHsLit (HsIntPrim ""
(toInteger ((dataConTag con) - fIRST_TAG))))
genAuxBindSpec loc (DerivTag2Con tycon)
= (mk_FunBind loc rdr_name
[([nlConVarPat intDataCon_RDR [a_RDR]],
nlHsApp (nlHsVar tagToEnum_RDR) a_Expr)],
L loc (TypeSig [L loc rdr_name] sig_ty))
where
sig_ty = mkLHsSigWcType $ L loc $
HsCoreTy $ mkSpecForAllTys (tyConTyVars tycon) $
intTy `mkFunTy` mkParentType tycon
rdr_name = tag2con_RDR tycon
genAuxBindSpec loc (DerivMaxTag tycon)
= (mkHsVarBind loc rdr_name rhs,
L loc (TypeSig [L loc rdr_name] sig_ty))
where
rdr_name = maxtag_RDR tycon
sig_ty = mkLHsSigWcType (L loc (HsCoreTy intTy))
rhs = nlHsApp (nlHsVar intDataCon_RDR) (nlHsLit (HsIntPrim "" max_tag))
max_tag = case (tyConDataCons tycon) of
data_cons -> toInteger ((length data_cons) - fIRST_TAG)
type SeparateBagsDerivStuff = -- AuxBinds and SYB bindings
( Bag (LHsBind RdrName, LSig RdrName)
-- Extra bindings (used by Generic only)
, Bag (FamInst) -- Extra family instances
, Bag (InstInfo RdrName)) -- Extra instances
genAuxBinds :: SrcSpan -> BagDerivStuff -> SeparateBagsDerivStuff
genAuxBinds loc b = genAuxBinds' b2 where
(b1,b2) = partitionBagWith splitDerivAuxBind b
splitDerivAuxBind (DerivAuxBind x) = Left x
splitDerivAuxBind x = Right x
rm_dups = foldrBag dup_check emptyBag
dup_check a b = if anyBag (== a) b then b else consBag a b
genAuxBinds' :: BagDerivStuff -> SeparateBagsDerivStuff
genAuxBinds' = foldrBag f ( mapBag (genAuxBindSpec loc) (rm_dups b1)
, emptyBag, emptyBag)
f :: DerivStuff -> SeparateBagsDerivStuff -> SeparateBagsDerivStuff
f (DerivAuxBind _) = panic "genAuxBinds'" -- We have removed these before
f (DerivHsBind b) = add1 b
f (DerivFamInst t) = add2 t
f (DerivInst i) = add3 i
add1 x (a,b,c) = (x `consBag` a,b,c)
add2 x (a,b,c) = (a,x `consBag` b,c)
add3 x (a,b,c) = (a,b,x `consBag` c)
mk_data_type_name :: TyCon -> RdrName -- "$tT"
mk_data_type_name tycon = mkAuxBinderName (tyConName tycon) mkDataTOcc
mk_constr_name :: DataCon -> RdrName -- "$cC"
mk_constr_name con = mkAuxBinderName (dataConName con) mkDataCOcc
mkParentType :: TyCon -> Type
-- Turn the representation tycon of a family into
-- a use of its family constructor
mkParentType tc
= case tyConFamInst_maybe tc of
Nothing -> mkTyConApp tc (mkTyVarTys (tyConTyVars tc))
Just (fam_tc,tys) -> mkTyConApp fam_tc tys
{-
************************************************************************
* *
\subsection{Utility bits for generating bindings}
* *
************************************************************************
-}
mk_FunBind :: SrcSpan -> RdrName
-> [([LPat RdrName], LHsExpr RdrName)]
-> LHsBind RdrName
mk_FunBind = mk_HRFunBind 0 -- by using mk_FunBind and not mk_HRFunBind,
-- the caller says that the Void case needs no
-- patterns
-- | This variant of 'mk_FunBind' puts an 'Arity' number of wildcards before
-- the "=" in the empty-data-decl case. This is necessary if the function
-- has a higher-rank type, like foldl. (See deriving/should_compile/T4302)
mk_HRFunBind :: Arity -> SrcSpan -> RdrName
-> [([LPat RdrName], LHsExpr RdrName)]
-> LHsBind RdrName
mk_HRFunBind arity loc fun pats_and_exprs
= mkHRRdrFunBind arity (L loc fun) matches
where
matches = [mkMatch p e (noLoc emptyLocalBinds) | (p,e) <-pats_and_exprs]
mkRdrFunBind :: Located RdrName -> [LMatch RdrName (LHsExpr RdrName)] -> LHsBind RdrName
mkRdrFunBind = mkHRRdrFunBind 0
mkHRRdrFunBind :: Arity -> Located RdrName -> [LMatch RdrName (LHsExpr RdrName)] -> LHsBind RdrName
mkHRRdrFunBind arity fun@(L loc fun_rdr) matches = L loc (mkFunBind fun matches')
where
-- Catch-all eqn looks like
-- fmap = error "Void fmap"
-- It's needed if there no data cons at all,
-- which can happen with -XEmptyDataDecls
-- See Trac #4302
matches' = if null matches
then [mkMatch (replicate arity nlWildPat)
(error_Expr str) (noLoc emptyLocalBinds)]
else matches
str = "Void " ++ occNameString (rdrNameOcc fun_rdr)
box :: String -- The class involved
-> TyCon -- The tycon involved
-> LHsExpr RdrName -- The argument
-> Type -- The argument type
-> LHsExpr RdrName -- Boxed version of the arg
-- See Note [Deriving and unboxed types] in TcDeriv
box cls_str tycon arg arg_ty = nlHsApp (nlHsVar box_con) arg
where
box_con = assoc_ty_id cls_str tycon boxConTbl arg_ty
---------------------
primOrdOps :: String -- The class involved
-> TyCon -- The tycon involved
-> Type -- The type
-> (RdrName, RdrName, RdrName, RdrName, RdrName) -- (lt,le,eq,ge,gt)
-- See Note [Deriving and unboxed types] in TcDeriv
primOrdOps str tycon ty = assoc_ty_id str tycon ordOpTbl ty
primLitOps :: String -- The class involved
-> TyCon -- The tycon involved
-> Type -- The type
-> ( LHsExpr RdrName -> LHsExpr RdrName -- Constructs a Q Exp value
, LHsExpr RdrName -> LHsExpr RdrName -- Constructs a boxed value
)
primLitOps str tycon ty = ( assoc_ty_id str tycon litConTbl ty
, \v -> nlHsVar boxRDR `nlHsApp` v
)
where
boxRDR
| ty `eqType` addrPrimTy = unpackCString_RDR
| otherwise = assoc_ty_id str tycon boxConTbl ty
ordOpTbl :: [(Type, (RdrName, RdrName, RdrName, RdrName, RdrName))]
ordOpTbl
= [(charPrimTy , (ltChar_RDR , leChar_RDR , eqChar_RDR , geChar_RDR , gtChar_RDR ))
,(intPrimTy , (ltInt_RDR , leInt_RDR , eqInt_RDR , geInt_RDR , gtInt_RDR ))
,(wordPrimTy , (ltWord_RDR , leWord_RDR , eqWord_RDR , geWord_RDR , gtWord_RDR ))
,(addrPrimTy , (ltAddr_RDR , leAddr_RDR , eqAddr_RDR , geAddr_RDR , gtAddr_RDR ))
,(floatPrimTy , (ltFloat_RDR , leFloat_RDR , eqFloat_RDR , geFloat_RDR , gtFloat_RDR ))
,(doublePrimTy, (ltDouble_RDR, leDouble_RDR, eqDouble_RDR, geDouble_RDR, gtDouble_RDR)) ]
boxConTbl :: [(Type, RdrName)]
boxConTbl
= [(charPrimTy , getRdrName charDataCon )
,(intPrimTy , getRdrName intDataCon )
,(wordPrimTy , getRdrName wordDataCon )
,(floatPrimTy , getRdrName floatDataCon )
,(doublePrimTy, getRdrName doubleDataCon)
]
-- | A table of postfix modifiers for unboxed values.
postfixModTbl :: [(Type, String)]
postfixModTbl
= [(charPrimTy , "#" )
,(intPrimTy , "#" )
,(wordPrimTy , "##")
,(floatPrimTy , "#" )
,(doublePrimTy, "##")
]
litConTbl :: [(Type, LHsExpr RdrName -> LHsExpr RdrName)]
litConTbl
= [(charPrimTy , nlHsApp (nlHsVar charPrimL_RDR))
,(intPrimTy , nlHsApp (nlHsVar intPrimL_RDR)
. nlHsApp (nlHsVar toInteger_RDR))
,(wordPrimTy , nlHsApp (nlHsVar wordPrimL_RDR)
. nlHsApp (nlHsVar toInteger_RDR))
,(addrPrimTy , nlHsApp (nlHsVar stringPrimL_RDR)
. nlHsApp (nlHsApp
(nlHsVar map_RDR)
(compose_RDR `nlHsApps`
[ nlHsVar fromIntegral_RDR
, nlHsVar fromEnum_RDR
])))
,(floatPrimTy , nlHsApp (nlHsVar floatPrimL_RDR)
. nlHsApp (nlHsVar toRational_RDR))
,(doublePrimTy, nlHsApp (nlHsVar doublePrimL_RDR)
. nlHsApp (nlHsVar toRational_RDR))
]
-- | Lookup `Type` in an association list.
assoc_ty_id :: String -- The class involved
-> TyCon -- The tycon involved
-> [(Type,a)] -- The table
-> Type -- The type
-> a -- The result of the lookup
assoc_ty_id cls_str _ tbl ty
| null res = pprPanic "Error in deriving:" (text "Can't derive" <+> text cls_str <+>
text "for primitive type" <+> ppr ty)
| otherwise = head res
where
res = [id | (ty',id) <- tbl, ty `eqType` ty']
-----------------------------------------------------------------------
and_Expr :: LHsExpr RdrName -> LHsExpr RdrName -> LHsExpr RdrName
and_Expr a b = genOpApp a and_RDR b
-----------------------------------------------------------------------
eq_Expr :: TyCon -> Type -> LHsExpr RdrName -> LHsExpr RdrName -> LHsExpr RdrName
eq_Expr tycon ty a b
| not (isUnliftedType ty) = genOpApp a eq_RDR b
| otherwise = genPrimOpApp a prim_eq b
where
(_, _, prim_eq, _, _) = primOrdOps "Eq" tycon ty
untag_Expr :: TyCon -> [( RdrName, RdrName)] -> LHsExpr RdrName -> LHsExpr RdrName
untag_Expr _ [] expr = expr
untag_Expr tycon ((untag_this, put_tag_here) : more) expr
= nlHsCase (nlHsPar (nlHsVarApps (con2tag_RDR tycon) [untag_this])) {-of-}
[mkSimpleHsAlt (nlVarPat put_tag_here) (untag_Expr tycon more expr)]
enum_from_to_Expr
:: LHsExpr RdrName -> LHsExpr RdrName
-> LHsExpr RdrName
enum_from_then_to_Expr
:: LHsExpr RdrName -> LHsExpr RdrName -> LHsExpr RdrName
-> LHsExpr RdrName
enum_from_to_Expr f t2 = nlHsApp (nlHsApp (nlHsVar enumFromTo_RDR) f) t2
enum_from_then_to_Expr f t t2 = nlHsApp (nlHsApp (nlHsApp (nlHsVar enumFromThenTo_RDR) f) t) t2
showParen_Expr
:: LHsExpr RdrName -> LHsExpr RdrName
-> LHsExpr RdrName
showParen_Expr e1 e2 = nlHsApp (nlHsApp (nlHsVar showParen_RDR) e1) e2
nested_compose_Expr :: [LHsExpr RdrName] -> LHsExpr RdrName
nested_compose_Expr [] = panic "nested_compose_expr" -- Arg is always non-empty
nested_compose_Expr [e] = parenify e
nested_compose_Expr (e:es)
= nlHsApp (nlHsApp (nlHsVar compose_RDR) (parenify e)) (nested_compose_Expr es)
-- impossible_Expr is used in case RHSs that should never happen.
-- We generate these to keep the desugarer from complaining that they *might* happen!
error_Expr :: String -> LHsExpr RdrName
error_Expr string = nlHsApp (nlHsVar error_RDR) (nlHsLit (mkHsString string))
-- illegal_Expr is used when signalling error conditions in the RHS of a derived
-- method. It is currently only used by Enum.{succ,pred}
illegal_Expr :: String -> String -> String -> LHsExpr RdrName
illegal_Expr meth tp msg =
nlHsApp (nlHsVar error_RDR) (nlHsLit (mkHsString (meth ++ '{':tp ++ "}: " ++ msg)))
-- illegal_toEnum_tag is an extended version of illegal_Expr, which also allows you
-- to include the value of a_RDR in the error string.
illegal_toEnum_tag :: String -> RdrName -> LHsExpr RdrName
illegal_toEnum_tag tp maxtag =
nlHsApp (nlHsVar error_RDR)
(nlHsApp (nlHsApp (nlHsVar append_RDR)
(nlHsLit (mkHsString ("toEnum{" ++ tp ++ "}: tag ("))))
(nlHsApp (nlHsApp (nlHsApp
(nlHsVar showsPrec_RDR)
(nlHsIntLit 0))
(nlHsVar a_RDR))
(nlHsApp (nlHsApp
(nlHsVar append_RDR)
(nlHsLit (mkHsString ") is outside of enumeration's range (0,")))
(nlHsApp (nlHsApp (nlHsApp
(nlHsVar showsPrec_RDR)
(nlHsIntLit 0))
(nlHsVar maxtag))
(nlHsLit (mkHsString ")"))))))
parenify :: LHsExpr RdrName -> LHsExpr RdrName
parenify e@(L _ (HsVar _)) = e
parenify e = mkHsPar e
-- genOpApp wraps brackets round the operator application, so that the
-- renamer won't subsequently try to re-associate it.
genOpApp :: LHsExpr RdrName -> RdrName -> LHsExpr RdrName -> LHsExpr RdrName
genOpApp e1 op e2 = nlHsPar (nlHsOpApp e1 op e2)
genPrimOpApp :: LHsExpr RdrName -> RdrName -> LHsExpr RdrName -> LHsExpr RdrName
genPrimOpApp e1 op e2 = nlHsPar (nlHsApp (nlHsVar tagToEnum_RDR) (nlHsOpApp e1 op e2))
a_RDR, b_RDR, c_RDR, d_RDR, f_RDR, k_RDR, z_RDR, ah_RDR, bh_RDR, ch_RDR, dh_RDR
:: RdrName
a_RDR = mkVarUnqual (fsLit "a")
b_RDR = mkVarUnqual (fsLit "b")
c_RDR = mkVarUnqual (fsLit "c")
d_RDR = mkVarUnqual (fsLit "d")
f_RDR = mkVarUnqual (fsLit "f")
k_RDR = mkVarUnqual (fsLit "k")
z_RDR = mkVarUnqual (fsLit "z")
ah_RDR = mkVarUnqual (fsLit "a#")
bh_RDR = mkVarUnqual (fsLit "b#")
ch_RDR = mkVarUnqual (fsLit "c#")
dh_RDR = mkVarUnqual (fsLit "d#")
as_RDRs, bs_RDRs, cs_RDRs :: [RdrName]
as_RDRs = [ mkVarUnqual (mkFastString ("a"++show i)) | i <- [(1::Int) .. ] ]
bs_RDRs = [ mkVarUnqual (mkFastString ("b"++show i)) | i <- [(1::Int) .. ] ]
cs_RDRs = [ mkVarUnqual (mkFastString ("c"++show i)) | i <- [(1::Int) .. ] ]
a_Expr, c_Expr, f_Expr, z_Expr, ltTag_Expr, eqTag_Expr, gtTag_Expr,
false_Expr, true_Expr, fmap_Expr, pure_Expr, mempty_Expr, foldMap_Expr, traverse_Expr :: LHsExpr RdrName
a_Expr = nlHsVar a_RDR
-- b_Expr = nlHsVar b_RDR
c_Expr = nlHsVar c_RDR
f_Expr = nlHsVar f_RDR
z_Expr = nlHsVar z_RDR
ltTag_Expr = nlHsVar ltTag_RDR
eqTag_Expr = nlHsVar eqTag_RDR
gtTag_Expr = nlHsVar gtTag_RDR
false_Expr = nlHsVar false_RDR
true_Expr = nlHsVar true_RDR
fmap_Expr = nlHsVar fmap_RDR
pure_Expr = nlHsVar pure_RDR
mempty_Expr = nlHsVar mempty_RDR
foldMap_Expr = nlHsVar foldMap_RDR
traverse_Expr = nlHsVar traverse_RDR
a_Pat, b_Pat, c_Pat, d_Pat, f_Pat, k_Pat, z_Pat :: LPat RdrName
a_Pat = nlVarPat a_RDR
b_Pat = nlVarPat b_RDR
c_Pat = nlVarPat c_RDR
d_Pat = nlVarPat d_RDR
f_Pat = nlVarPat f_RDR
k_Pat = nlVarPat k_RDR
z_Pat = nlVarPat z_RDR
minusInt_RDR, tagToEnum_RDR :: RdrName
minusInt_RDR = getRdrName (primOpId IntSubOp )
tagToEnum_RDR = getRdrName (primOpId TagToEnumOp)
con2tag_RDR, tag2con_RDR, maxtag_RDR :: TyCon -> RdrName
-- Generates Orig s RdrName, for the binding positions
con2tag_RDR tycon = mk_tc_deriv_name tycon mkCon2TagOcc
tag2con_RDR tycon = mk_tc_deriv_name tycon mkTag2ConOcc
maxtag_RDR tycon = mk_tc_deriv_name tycon mkMaxTagOcc
mk_tc_deriv_name :: TyCon -> (OccName -> OccName) -> RdrName
mk_tc_deriv_name tycon occ_fun = mkAuxBinderName (tyConName tycon) occ_fun
mkAuxBinderName :: Name -> (OccName -> OccName) -> RdrName
-- ^ Make a top-level binder name for an auxiliary binding for a parent name
-- See Note [Auxiliary binders]
mkAuxBinderName parent occ_fun
= mkRdrUnqual (occ_fun stable_parent_occ)
where
stable_parent_occ = mkOccName (occNameSpace parent_occ) stable_string
stable_string
| opt_PprStyle_Debug = parent_stable
| otherwise = parent_stable_hash
parent_stable = nameStableString parent
parent_stable_hash =
let Fingerprint high low = fingerprintString parent_stable
in toBase62 high ++ toBase62Padded low
-- See Note [Base 62 encoding 128-bit integers]
parent_occ = nameOccName parent
{-
Note [Auxiliary binders]
~~~~~~~~~~~~~~~~~~~~~~~~
We often want to make a top-level auxiliary binding. E.g. for comparison we haev
instance Ord T where
compare a b = $con2tag a `compare` $con2tag b
$con2tag :: T -> Int
$con2tag = ...code....
Of course these top-level bindings should all have distinct name, and we are
generating RdrNames here. We can't just use the TyCon or DataCon to distinguish
because with standalone deriving two imported TyCons might both be called T!
(See Trac #7947.)
So we use package name, module name and the name of the parent
(T in this example) as part of the OccName we generate for the new binding.
To make the symbol names short we take a base62 hash of the full name.
In the past we used the *unique* from the parent, but that's not stable across
recompilations as uniques are nondeterministic.
Note [DeriveFoldable with ExistentialQuantification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Functor and Traversable instances can only be derived for data types whose
last type parameter is truly universally polymorphic. For example:
data T a b where
T1 :: b -> T a b -- YES, b is unconstrained
T2 :: Ord b => b -> T a b -- NO, b is constrained by (Ord b)
T3 :: b ~ Int => b -> T a b -- NO, b is constrained by (b ~ Int)
T4 :: Int -> T a Int -- NO, this is just like T3
T5 :: Ord a => a -> b -> T a b -- YES, b is unconstrained, even
-- though a is existential
T6 :: Int -> T Int b -- YES, b is unconstrained
For Foldable instances, however, we can completely lift the constraint that
the last type parameter be truly universally polymorphic. This means that T
(as defined above) can have a derived Foldable instance:
instance Foldable (T a) where
foldr f z (T1 b) = f b z
foldr f z (T2 b) = f b z
foldr f z (T3 b) = f b z
foldr f z (T4 b) = z
foldr f z (T5 a b) = f b z
foldr f z (T6 a) = z
foldMap f (T1 b) = f b
foldMap f (T2 b) = f b
foldMap f (T3 b) = f b
foldMap f (T4 b) = mempty
foldMap f (T5 a b) = f b
foldMap f (T6 a) = mempty
In a Foldable instance, it is safe to fold over an occurrence of the last type
parameter that is not truly universally polymorphic. However, there is a bit
of subtlety in determining what is actually an occurrence of a type parameter.
T3 and T4, as defined above, provide one example:
data T a b where
...
T3 :: b ~ Int => b -> T a b
T4 :: Int -> T a Int
...
instance Foldable (T a) where
...
foldr f z (T3 b) = f b z
foldr f z (T4 b) = z
...
foldMap f (T3 b) = f b
foldMap f (T4 b) = mempty
...
Notice that the argument of T3 is folded over, whereas the argument of T4 is
not. This is because we only fold over constructor arguments that
syntactically mention the universally quantified type parameter of that
particular data constructor. See foldDataConArgs for how this is implemented.
As another example, consider the following data type. The argument of each
constructor has the same type as the last type parameter:
data E a where
E1 :: (a ~ Int) => a -> E a
E2 :: Int -> E Int
E3 :: (a ~ Int) => a -> E Int
E4 :: (a ~ Int) => Int -> E a
Only E1's argument is an occurrence of a universally quantified type variable
that is syntactically equivalent to the last type parameter, so only E1's
argument will be be folded over in a derived Foldable instance.
See Trac #10447 for the original discussion on this feature. Also see
https://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/DeriveFunctor
for a more in-depth explanation.
-}
| nushio3/ghc | compiler/typecheck/TcGenDeriv.hs | bsd-3-clause | 103,881 | 0 | 19 | 33,003 | 18,330 | 9,639 | 8,691 | 1,290 | 9 |
{-# Language TypeFamilies #-}
module Data.Source.String.Offset where
import Data.Source.Class
data Src
= Src
{ loc :: Int
, str :: [Char]
} deriving (Eq,Ord,Read,Show)
instance Source Src where
type Location Src = Int
type Element Src = Char
type Token Src = [Char]
type Error Src = () -- errors are impossible here, so this is dummy
uncons (Src i (x:xs)) = Right $ Just (x,Src (i+1) xs)
uncons _ = Right $ Nothing
view src _ emh nxh = case src of
Src i (x:xs) -> nxh x $ Src (i+1) xs
_ -> emh
location = loc
token (Src il h) (Src ir _) = take (ir - il) h
offset = loc
mkSrc :: String -> Src
mkSrc = Src 0
| permeakra/source | Data/Source/String/Offset.hs | bsd-3-clause | 761 | 0 | 12 | 279 | 294 | 159 | 135 | 23 | 1 |
module Algebra.Structures.Field
( module Algebra.Structures.IntegralDomain
, Field(..)
, (</>)
) where
import Algebra.Structures.IntegralDomain
class IntegralDomain a => Field a where
inv :: a -> a
(</>) :: Field a => a -> a -> a
x </> y = x <*> inv y
infixl 7 </>
| Alex128/abstract-math | src/Algebra/Structures/Field.hs | bsd-3-clause | 292 | 0 | 7 | 73 | 104 | 59 | 45 | 10 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Todo.Changelog (
-- * The Log type
Log (..),
logHead,
-- * Log entries
LogEntry (..),
entryHash,
entryVersion,
entryParent,
entryTime,
entryAction,
EntryHash (..),
EntryVersion (..),
ParentLogEntry (..),
-- * Log actions
EntryAction (..),
newLog,
mkLog,
addLogEntry,
addLogAction,
logToTextList,
validateEntryHistory,
validateLogHistory,
rehashEntryHistory,
rehashLogHistory,
rebuildTaskStat
) where
import Codec.Utils (Octet)
import Control.Lens
import Control.Monad
import Data.Binary.Put
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Digest.SHA256 as Sha256
import Data.Int
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Time
import Data.UUID (UUID)
import qualified Data.UUID as U
import qualified Data.UUID.V4 as UuidV4
import Todo.Todo
{-| Store the logs and some meta data. -}
data Log = Log {
-- | Holds the newest entry which logs back.
_logHead :: ParentLogEntry
} deriving (Show, Read)
data EntryHash = EmptyHash
| DblSha256Hash [Octet]
deriving (Show, Read, Eq)
data EntryVersion = Version0_1_0
deriving (Show, Read, Eq, Enum)
currentVersion :: EntryVersion
currentVersion = Version0_1_0
data EntryAction =
-- | New active action is added to the list
NewActiveAction ActiveTask
-- | An active action is completed
| CompletedActiveAction ActiveTask
-- | A booled action was added
| NewPooledAction PooledTask
-- | New pooled tasks were activated
| ActivationAction [ActiveTask]
-- | Cleanup was performed on the list
| CleanupAction [ActiveTask]
deriving (Show, Read)
{-| One log entry which recursively holds the previous entry
The log entry contains a hash value which combines the other
attributes. The hash should be the outcome of these values chained:
- Version string
- Parent hash
- Systime
- Action stringified
-}
data LogEntry = LogEntry {
_entryHash :: EntryHash,
_entryVersion :: EntryVersion,
_entryParent :: ParentLogEntry,
_entryTime :: ZonedTime,
_entryAction :: EntryAction
} deriving (Show, Read)
data ParentLogEntry =
PrevLogEntry LogEntry
| StartLogEntry UUID
| EmptyLogEntry
deriving (Show, Read)
makeLenses ''LogEntry
makeLenses ''Log
enumToInt16 :: Enum a => a -> Int16
enumToInt16 enum = fromInteger $ toInteger $ fromEnum enum
startParentIdFlag :: Int8
startParentIdFlag = 0
prevParentIdFlag :: Int8
prevParentIdFlag = 1
putHash :: EntryHash -> Put
putHash (DblSha256Hash hash) = do
putInt8 0
forM_ hash $ \x -> putWord8 x
putParentUuid :: ParentLogEntry -> Put
putParentUuid (StartLogEntry uuid) = do
putInt8 startParentIdFlag
putLazyByteString $ U.toByteString uuid
putParentUuid (PrevLogEntry entry) = do
putInt8 prevParentIdFlag
putHash $ entry ^. entryHash
timeToInt32 :: ZonedTime -> Int32
timeToInt32 t =
let tAsUnix = formatTime defaultTimeLocale "%s" utcT
utcT = zonedTimeToUTC t
unixTimeInt = read tAsUnix
unixTimeInt32 = fromInteger $ toInteger unixTimeInt
in unixTimeInt32
putTime :: ZonedTime -> Put
putTime t = do
let tStr = show t
tText = T.pack tStr
tBs = TE.encodeUtf8 tText
putInt8 $ fromInteger $ toInteger $ B.length tBs
putByteString tBs
putLogEntry :: LogEntry -> Put
putLogEntry logEntry = do
putInt16le $ enumToInt16 $ logEntry ^. entryVersion
putParentUuid $ logEntry ^. entryParent
putTime $ logEntry ^. entryTime
putEntryAction $ logEntry ^. entryAction
putEntryAction :: EntryAction -> Put
putEntryAction (NewActiveAction aTask) = do
putInt8 1
putActiveTask aTask
putEntryAction (CompletedActiveAction aTask) = do
putInt8 2
putActiveTask aTask
putEntryAction (NewPooledAction pTask) = do
putInt8 3
putPooledTask pTask
putEntryAction (ActivationAction aTasks) = do
putInt8 4
putInt16le $ fromInteger $ toInteger $ length aTasks
forM_ aTasks $ \aTask -> putActiveTask aTask
putEntryAction (CleanupAction aTasks) = do
putInt8 5
putInt16le $ fromInteger $ toInteger $ length aTasks
forM_ aTasks $ \aTask -> putActiveTask aTask
logEntryToDblSha256 :: LogEntry -> EntryHash
logEntryToDblSha256 logEntry =
let byteString = runPut (putLogEntry logEntry)
octets = BL.unpack byteString
sha256 = Sha256.hash octets
dblSha256 = Sha256.hash sha256
in DblSha256Hash dblSha256
-- | Create a complete new log
newLog :: IO Log
newLog = do
uuid <- UuidV4.nextRandom
return $ mkLog uuid
-- | Create a new log with the given UUID
mkLog :: UUID -> Log
mkLog uuid = Log $ StartLogEntry uuid
addLogEntry :: LogEntry -> Log -> Log
addLogEntry entry = over logHead $ \head ->
let entry' = set entryParent head entry
hash = logEntryToDblSha256 entry'
entry'' = set entryHash hash entry'
in PrevLogEntry entry''
addLogAction :: EntryAction -> ZonedTime -> Log -> Log
addLogAction action time =
let logEntry = LogEntry EmptyHash currentVersion EmptyLogEntry time action
in addLogEntry logEntry
logToList :: Log -> [LogEntry]
logToList log =
parentEntryToList $ log ^. logHead
parentEntryToList :: ParentLogEntry -> [LogEntry]
parentEntryToList (StartLogEntry _) = []
parentEntryToList (PrevLogEntry entry) = entry : parentEntryToList prevEntry
where prevEntry = entry ^. entryParent
logToTextList :: Log -> [Text]
logToTextList log =
map logEntryToText $ parentEntryToList (log ^. logHead)
logEntryToText :: LogEntry -> Text
logEntryToText entry =
(T.pack $ show $ entry^.entryTime)
`T.append` ": " `T.append` (entryActionToText (entry ^. entryAction))
activeTaskToText :: ActiveTask -> Text
activeTaskToText aTask =
(aTask ^. atTask . tTitle) `T.append`
", due " `T.append` (T.pack $ show (aTask ^. atDue))
mergeTextNewln :: Text -> Text -> Text
mergeTextNewln t1 t2 = t1 `T.append` "\n" `T.append` t2
entryActionToText :: EntryAction -> Text
entryActionToText (NewActiveAction aTask) =
"Add Task: " `T.append` (activeTaskToText aTask)
entryActionToText (NewPooledAction pTask) =
"Add Pool: " `T.append` (pTask ^. ptTask . tTitle)
entryActionToText (CompletedActiveAction aTask) =
"Completed Task: " `T.append` (activeTaskToText aTask)
entryActionToText (ActivationAction aTasks) =
"Many tasks activated: " `T.append`
(foldl mergeTextNewln "" $ map (\x -> " - " `T.append` activeTaskToText x) aTasks)
validateEntryHistory :: Maybe EntryHash -> LogEntry -> (Bool, [Text])
validateEntryHistory untilHash logEntry =
let verificationHash = logEntryToDblSha256 logEntry
logText = logEntryToText logEntry
in if (Just $ logEntry ^. entryHash) == untilHash
then (True, [logText `T.append` " (checked)"])
else if verificationHash == (logEntry ^. entryHash)
then case logEntry ^. entryParent of
StartLogEntry _ -> (True, [logText `T.append` " (checked)"])
PrevLogEntry prevLogEntry ->
let (res, log)
= validateEntryHistory untilHash prevLogEntry
log' = (logText `T.append` " (checked)") : log
in (res, log')
else (False, [logText `T.append` " (failed)"])
validateLogHistory :: Maybe EntryHash -> Log -> (Bool, [Text])
validateLogHistory untilHash log =
case log ^. logHead of
StartLogEntry _ -> (True, ["Emty log passed"])
PrevLogEntry logEntry -> validateEntryHistory untilHash logEntry
rehashEntryHistory :: LogEntry -> LogEntry
rehashEntryHistory logEntry =
let rehashedParent = case logEntry ^. entryParent of
StartLogEntry _ -> logEntry ^. entryParent
PrevLogEntry prevLogEntry ->
PrevLogEntry $ rehashEntryHistory prevLogEntry
logEntry' = set entryParent rehashedParent logEntry
newHash = logEntryToDblSha256 logEntry'
in set entryHash newHash logEntry'
rehashLogHistory :: Log -> Log
rehashLogHistory log =
case log ^. logHead of
StartLogEntry _ -> log
PrevLogEntry logEntry ->
set logHead (PrevLogEntry $ rehashEntryHistory logEntry) log
{-| Applies a LogEntry to a TaskStat and returns the result. -}
statStep :: (LogEntry, TaskStat) -> TaskStat
statStep (logEntry, stat) = case logEntry ^. entryAction of
NewActiveAction aTask -> addActiveTaskType aTask stat
NewPooledAction pTask -> addPooledTaskType pTask stat
CompletedActiveAction aTask -> markDone (aTask ^. atTask . tTitle) stat
ActivationAction aTasks ->
foldr addActiveTaskType stat aTasks
{-| Rebuild the whole TaskStat from a Log -}
rebuildTaskStat :: Log -> TaskStat
rebuildTaskStat log =
let entries = logToEntryList log
in rebuildTaskStatFromList entries
applyLogEntriesToTaskStat :: [LogEntry] -> TaskStat -> TaskStat
applyLogEntriesToTaskStat entries taskStat = foldr aux taskStat entries
where aux entry stat = statStep (entry, stat)
rebuildTaskStatFromList :: [LogEntry] -> TaskStat
rebuildTaskStatFromList entries =
applyLogEntriesToTaskStat entries emptyTaskStat
{-| Turns the Log to a list of its LogEntries.
Starting with the most recent LogEntry. -}
logToEntryList :: Log -> [LogEntry]
logToEntryList log =
let parentEntry = log ^. logHead
in parentEntryToEntryList parentEntry
{-| Turns a LogEntry and its parent to a List.
Starting with the most recent LogEntry. -}
parentEntryToEntryList :: ParentLogEntry -> [LogEntry]
parentEntryToEntryList (StartLogEntry _) = []
parentEntryToEntryList (PrevLogEntry e) =
e : (parentEntryToEntryList $ e ^. entryParent)
| neosam/haskelltodo | src/Todo/Changelog.hs | bsd-3-clause | 10,426 | 0 | 18 | 2,668 | 2,511 | 1,329 | 1,182 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
-- |
-- Copyright : (c) 2010-2012 Benedikt Schmidt
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Benedikt Schmidt <[email protected]>
--
-- Equational signatures for Maude.
module Term.Maude.Signature (
-- * Maude signatures
MaudeSig
, enableDH
, enableBP
, enableMSet
, enableDiff
, enableXor
, stFunSyms
, stRules
, funSyms
, irreducibleFunSyms
, rrulesForMaudeSig
, noEqFunSyms
-- * predefined maude signatures
, dhMaudeSig
, pairMaudeSig
, asymEncMaudeSig
, symEncMaudeSig
, signatureMaudeSig
, revealSignatureMaudeSig
, hashMaudeSig
, msetMaudeSig
, bpMaudeSig
, xorMaudeSig
, minimalMaudeSig
, enableDiffMaudeSig
-- * extend maude signatures
, addFunSym
, addCtxtStRule
-- * pretty printing
, prettyMaudeSig
) where
import Term.Term
import Term.LTerm
import Term.Builtin.Rules
import Term.SubtermRule
import Control.Monad.Fresh
-- import Control.Applicative
import Control.DeepSeq
import GHC.Generics (Generic)
import Data.Binary
import Data.Foldable (asum)
-- import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as S
import qualified Data.ByteString.Char8 as BC
import qualified Text.PrettyPrint.Highlight as P
------------------------------------------------------------------------------
-- Maude Signatures
----------------------------------------------------------------------
-- | The required information to define a @Maude functional module@.
data MaudeSig = MaudeSig
{ enableDH :: Bool
, enableBP :: Bool
, enableMSet :: Bool
, enableXor :: Bool
, enableDiff :: Bool
, stFunSyms :: S.Set NoEqSym -- ^ function signature for subterm theory
, stRules :: S.Set CtxtStRule -- ^ rewriting rules for subterm theory
, funSyms :: FunSig -- ^ function signature including the
-- function symbols for DH, BP, and Multiset
-- can be computed from enableX and stFunSyms
, irreducibleFunSyms :: FunSig -- ^ irreducible function symbols (can be computed)
}
deriving (Ord, Show, Eq, Generic, NFData, Binary)
-- | Smart constructor for maude signatures. Computes funSyms and irreducibleFunSyms.
maudeSig :: MaudeSig -> MaudeSig
maudeSig msig@(MaudeSig {enableDH,enableBP,enableMSet,enableXor,enableDiff=_,stFunSyms,stRules}) =
msig {enableDH=enableDH||enableBP, funSyms=allfuns, irreducibleFunSyms=irreduciblefuns}
where
allfuns = (S.map NoEq stFunSyms)
`S.union` (if enableDH || enableBP then dhFunSig else S.empty)
`S.union` (if enableBP then bpFunSig else S.empty)
`S.union` (if enableMSet then msetFunSig else S.empty)
`S.union` (if enableXor then xorFunSig else S.empty)
irreduciblefuns = allfuns `S.difference` reducible
reducible =
S.fromList [ o | CtxtStRule (viewTerm -> FApp o _) _ <- S.toList stRules ]
`S.union` dhReducibleFunSig `S.union` bpReducibleFunSig `S.union` xorReducibleFunSig
-- | A monoid instance to combine maude signatures.
instance Semigroup MaudeSig where
MaudeSig dh1 bp1 mset1 xor1 diff1 stFunSyms1 stRules1 _ _ <>
MaudeSig dh2 bp2 mset2 xor2 diff2 stFunSyms2 stRules2 _ _ =
maudeSig (mempty {enableDH=dh1||dh2
,enableBP=bp1||bp2
,enableMSet=mset1||mset2
,enableXor=xor1||xor2
,enableDiff=diff1||diff2
,stFunSyms=S.union stFunSyms1 stFunSyms2
,stRules=S.union stRules1 stRules2})
instance Monoid MaudeSig where
mempty = MaudeSig False False False False False S.empty S.empty S.empty S.empty
-- | Non-AC function symbols.
noEqFunSyms :: MaudeSig -> NoEqFunSig
noEqFunSyms msig = S.fromList [ o | NoEq o <- S.toList (funSyms msig) ]
-- | Add function symbol to given maude signature.
addFunSym :: NoEqSym -> MaudeSig -> MaudeSig
addFunSym funsym msig =
msig `mappend` mempty {stFunSyms=S.fromList [funsym]}
-- | Add subterm rule to given maude signature.
addCtxtStRule :: CtxtStRule -> MaudeSig -> MaudeSig
addCtxtStRule str msig =
msig `mappend` mempty {stRules=S.fromList [str]}
-- | Returns all rewriting rules including the rules
-- for DH, BP, and multiset.
rrulesForMaudeSig :: MaudeSig -> Set (RRule LNTerm)
rrulesForMaudeSig (MaudeSig {enableDH, enableBP, enableMSet, enableXor, stRules}) =
(S.map ctxtStRuleToRRule stRules)
`S.union` (if enableDH then dhRules else S.empty)
`S.union` (if enableBP then bpRules else S.empty)
`S.union` (if enableMSet then msetRules else S.empty)
`S.union` (if enableXor then xorRules else S.empty)
------------------------------------------------------------------------------
-- Builtin maude signatures
------------------------------------------------------------------------------
-- | Maude signatures for the AC symbols.
dhMaudeSig, bpMaudeSig, msetMaudeSig, xorMaudeSig :: MaudeSig
dhMaudeSig = maudeSig $ mempty {enableDH=True}
bpMaudeSig = maudeSig $ mempty {enableBP=True}
msetMaudeSig = maudeSig $ mempty {enableMSet=True}
xorMaudeSig = maudeSig $ mempty {enableXor=True}
-- | Maude signatures for the default subterm symbols.
--pairMaudeSig :: Bool -> MaudeSig
--pairMaudeSig flag = maudeSig $ mempty {stFunSyms=pairFunSig,stRules=pairRules,enableDiff=flag}
pairMaudeSig, symEncMaudeSig, asymEncMaudeSig, signatureMaudeSig, revealSignatureMaudeSig, hashMaudeSig :: MaudeSig
pairMaudeSig = maudeSig $ mempty {stFunSyms=pairFunSig,stRules=pairRules}
symEncMaudeSig = maudeSig $ mempty {stFunSyms=symEncFunSig,stRules=symEncRules}
asymEncMaudeSig = maudeSig $ mempty {stFunSyms=asymEncFunSig,stRules=asymEncRules}
signatureMaudeSig = maudeSig $ mempty {stFunSyms=signatureFunSig,stRules=signatureRules}
revealSignatureMaudeSig = maudeSig $ mempty {stFunSyms=revealSignatureFunSig,stRules=revealSignatureRules}
hashMaudeSig = maudeSig $ mempty {stFunSyms=hashFunSig}
-- | The minimal maude signature.
minimalMaudeSig :: Bool -> MaudeSig
minimalMaudeSig flag = maudeSig $ mempty {enableDiff=flag,stFunSyms=pairFunSig,stRules=pairRules}
-- essentially pairMaudeSig, but with the enableDiff flag set according to "flag"
-- -- MaudeSig False False False flag pairFunSig pairRules S.empty S.empty
-- | Signature with enableDiff set to True
enableDiffMaudeSig :: MaudeSig
enableDiffMaudeSig = maudeSig $ mempty {enableDiff=True}
------------------------------------------------------------------------------
-- Pretty Printing
------------------------------------------------------------------------------
prettyMaudeSig :: P.HighlightDocument d => MaudeSig -> d
prettyMaudeSig sig = P.vcat
[ ppNonEmptyList' "builtins:" P.text builtIns
, ppNonEmptyList' "functions:" ppFunSymb $ S.toList (stFunSyms sig)
, ppNonEmptyList
(\ds -> P.sep (P.keyword_ "equations:" : map (P.nest 2) ds))
prettyCtxtStRule $ S.toList (stRules sig)
]
where
ppNonEmptyList' name = ppNonEmptyList ((P.keyword_ name P.<->) . P.fsep)
ppNonEmptyList _ _ [] = P.emptyDoc
ppNonEmptyList hdr pp xs = hdr $ P.punctuate P.comma $ map pp xs
builtIns = asum $ map (\(f, x) -> guard (f sig) *> pure x)
[ (enableDH, "diffie-hellman")
, (enableBP, "bilinear-pairing")
, (enableMSet, "multiset")
, (enableXor, "xor")
]
ppFunSymb (f,(k,priv)) = P.text $ BC.unpack f ++ "/" ++ show k ++ showPriv priv
where showPriv Private = " [private]"
showPriv Public = ""
| rsasse/tamarin-prover | lib/term/src/Term/Maude/Signature.hs | gpl-3.0 | 7,924 | 0 | 17 | 1,718 | 1,694 | 994 | 700 | 130 | 5 |
{-# LANGUAGE Rank2Types, NoMonomorphismRestriction, ScopedTypeVariables #-}
module Database.Design.Ampersand.Test.Parser.ParserTest (
parseReparse, parseScripts, showErrors
) where
import Prelude hiding (readFile)
import Database.Design.Ampersand.ADL1.PrettyPrinters(prettyPrint)
import Database.Design.Ampersand.Core.ParseTree
import Database.Design.Ampersand.Input.ADL1.CtxError (Guarded(..),whenChecked,CtxError)
import Database.Design.Ampersand.Input.ADL1.Parser
import Database.Design.Ampersand.Input.Parsing
import Database.Design.Ampersand.Misc.Options(Options)
import System.IO (hPutStrLn, stderr)
-- Tries to parse all the given files
parseScripts :: Options -> [FilePath] -> IO Bool
parseScripts _ [] = return True
parseScripts opts (f:fs) =
do parsed <- parseADL opts (Left f)
case parsed of
Checked _ -> do { putStrLn ("Parsed: " ++ f); parseScripts opts fs }
Errors e -> do { putStrLn ("Cannot parse: " ++ f); showErrors e; return False }
printErrLn :: Show a => a -> IO ()
printErrLn a = hPutStrLn stderr (show a)
showErrors :: [CtxError] -> IO ()
showErrors [] = return ()
showErrors (e:es) = do { printErrLn e; showErrors es }
parse :: FilePath -> String -> Guarded P_Context
parse file txt = whenChecked (runParser pContext file txt) (Checked . fst)
parseReparse :: FilePath -> String -> Guarded P_Context
parseReparse file txt = whenChecked (parse file txt) reparse
where reparse p = parse (file ++ "**pretty") (prettyPrint p)
| guoy34/ampersand | src/Database/Design/Ampersand/Test/Parser/ParserTest.hs | gpl-3.0 | 1,511 | 0 | 14 | 251 | 484 | 263 | 221 | 28 | 2 |
module Propellor.Property.Tor where
import Propellor
import qualified Propellor.Property.File as File
import qualified Propellor.Property.Apt as Apt
isBridge :: Property
isBridge = setup `requires` Apt.installed ["tor"]
`describe` "tor bridge"
where
setup = "/etc/tor/torrc" `File.hasContent`
[ "SocksPort 0"
, "ORPort 443"
, "BridgeRelay 1"
, "Exitpolicy reject *:*"
] `onChange` restartTor
restartTor :: Property
restartTor = cmdProperty "service" ["tor", "restart"]
| abailly/propellor-test2 | src/Propellor/Property/Tor.hs | bsd-2-clause | 486 | 6 | 9 | 74 | 118 | 75 | 43 | 15 | 1 |
-- |
-- Module : Crypto.Number.Serialize.Internal.LE
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : Good
--
-- Fast serialization primitives for integer using raw pointers (little endian)
{-# LANGUAGE BangPatterns #-}
module Crypto.Number.Serialize.Internal.LE
( i2osp
, i2ospOf
, os2ip
) where
import Crypto.Number.Compat
import Crypto.Number.Basic
import Data.Bits
import Data.Memory.PtrMethods
import Data.Word (Word8)
import Foreign.Ptr
import Foreign.Storable
-- | Fill a pointer with the little endian binary representation of an integer
--
-- If the room available @ptrSz@ is less than the number of bytes needed,
-- 0 is returned. Likewise if a parameter is invalid, 0 is returned.
--
-- Returns the number of bytes written
i2osp :: Integer -> Ptr Word8 -> Int -> IO Int
i2osp m ptr ptrSz
| ptrSz <= 0 = return 0
| m < 0 = return 0
| m == 0 = pokeByteOff ptr 0 (0 :: Word8) >> return 1
| ptrSz < sz = return 0
| otherwise = fillPtr ptr sz m >> return sz
where
!sz = numBytes m
-- | Similar to 'i2osp', except it will pad any remaining space with zero.
i2ospOf :: Integer -> Ptr Word8 -> Int -> IO Int
i2ospOf m ptr ptrSz
| ptrSz <= 0 = return 0
| m < 0 = return 0
| ptrSz < sz = return 0
| otherwise = do
memSet ptr 0 ptrSz
fillPtr ptr sz m
return ptrSz
where
!sz = numBytes m
fillPtr :: Ptr Word8 -> Int -> Integer -> IO ()
fillPtr p sz m = gmpExportIntegerLE m p `onGmpUnsupported` export 0 m
where
export ofs i
| ofs >= sz = return ()
| otherwise = do
let (i', b) = i `divMod` 256
pokeByteOff p ofs (fromIntegral b :: Word8)
export (ofs+1) i'
-- | Transform a little endian binary integer representation pointed by a
-- pointer and a size into an integer
os2ip :: Ptr Word8 -> Int -> IO Integer
os2ip ptr ptrSz
| ptrSz <= 0 = return 0
| otherwise = gmpImportIntegerLE ptrSz ptr `onGmpUnsupported` loop 0 (ptrSz-1) ptr
where
loop :: Integer -> Int -> Ptr Word8 -> IO Integer
loop !acc i !p
| i < 0 = return acc
| otherwise = do
w <- peekByteOff p i :: IO Word8
loop ((acc `shiftL` 8) .|. fromIntegral w) (i-1) p
| vincenthz/cryptonite | Crypto/Number/Serialize/Internal/LE.hs | bsd-3-clause | 2,415 | 0 | 14 | 718 | 692 | 341 | 351 | 48 | 1 |
{-# LANGUAGE RecordWildCards #-}
record = 1 | bitemyapp/apply-refact | tests/examples/Extensions13.hs | bsd-3-clause | 44 | 0 | 4 | 7 | 7 | 4 | 3 | 2 | 1 |
{-# language MagicHash #-}
{-# language UnboxedTuples #-}
module CopyArray
( smallCopy
) where
import GHC.Exts
import GHC.IO
data ByteArray = ByteArray ByteArray#
-- Does an 8 byte copy with sub-word (2 bytes) alignment
-- Should be unrolled into 4 aligned stores (MOVWs)
smallCopy :: ByteArray -> IO ByteArray
smallCopy (ByteArray ba) = IO $ \s0 -> case newByteArray# 8# s0 of
(# s1, mut #) -> case copyByteArray# ba 2# mut 0# 8# s1 of
s2 -> case unsafeFreezeByteArray# mut s2 of
(# s3, frozen #) -> (# s3, ByteArray frozen #)
| sdiehl/ghc | testsuite/tests/codeGen/should_gen_asm/bytearray-memcpy-unroll.hs | bsd-3-clause | 552 | 0 | 17 | 117 | 138 | 74 | 64 | 12 | 1 |
module GuardsIn1 where
f :: [Int] -> Int
f g
| g == [1] = 42
| otherwise = head g | kmate/HaRe | old/testing/introCase/GuardsIn1.hs | bsd-3-clause | 94 | 0 | 9 | 33 | 49 | 25 | 24 | 5 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>Passive Scan Rules | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | ccgreen13/zap-extensions | src/org/zaproxy/zap/extension/pscanrules/resources/help_fil_PH/helpset_fil_PH.hs | apache-2.0 | 981 | 80 | 66 | 161 | 417 | 211 | 206 | -1 | -1 |
module Utility where
isDots :: String -> Bool
isDots s = (s == ".") || (s == "..")
isManaged :: FilePath -> Bool
isManaged s = (s /= ".towhead.db") && (not $ isDots s) && (s /= ".dat")
| wangell/towhead | Utility.hs | mit | 187 | 0 | 9 | 40 | 88 | 48 | 40 | 5 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module CNFSet (tests) where
import BooleanLaws
import NegableLaws
import SimplifyLaws
import BooleanModelLaws
import FreeBoolean
import Test.Tasty
import Test.QuickCheck
import Control.Applicative
import Data.Monoid
import Data.Function (on)
import Data.Algebra.Boolean.Negable (Negable)
import Data.Algebra.Boolean.CNF.Set
instance (Ord a, Negable a, Arbitrary a) => Arbitrary (CNF a) where
arbitrary = fromFreeBoolean <$> arbitrary
tests :: TestTree
tests = testGroup "CNF set implementation"
[ monotoneLaws eq
, nonMonotoneLaws eq
, negableLaws eq
, simplifyLaws (undefined :: CNF (Either Bool Bool))
, booleanModelLaws (undefined :: CNF (Either Bool Bool))
]
eq :: CNF (Maybe (Sum Int)) -> CNF (Maybe (Sum Int)) -> Bool
eq = (==) `on` toBool
| phadej/boolean-normal-forms | tests/CNFSet.hs | mit | 805 | 0 | 11 | 126 | 254 | 143 | 111 | 25 | 1 |
{-# LANGUAGE RecordWildCards #-}
-- import Control.Monad.State
import Data.Monoid
import Data.Maybe()
import Text.Read()
import System.Exit
type Point = Int
newtype Region = Region (Point, Point) deriving (Eq, Show)
instance Monoid Region where
mempty = Region (0, 0)
mappend (Region (begin, _)) (Region(_, end)) = Region (begin, end)
--Sourced
data Sourced a = Sourced {
val :: a,
region :: Region
}
instance Monoid a => Monoid (Sourced a) where
mempty = Sourced {val = mempty, region=mempty}
mappend source1 source2 = Sourced { val=val source1 <> val source2, region=region source1 <> region source2}
appendSourced :: Sourced [a] -> Sourced a -> Sourced [a]
appendSourced list atom = Sourced { val= val list ++ [val atom], region=region list <> region atom }
instance Show a => Show (Sourced a) where
show Sourced {val=val, region=Region (begin, end)} = "(" ++ show begin ++ ":" ++ show end ++ ")" ++ show val
instance Functor Sourced where
fmap f sourced = Sourced { val=f $ val sourced, region=region sourced}
--Token
data Token = TokenOpenBracket | TokenCloseBracket | TokenIdentifier String | TokenString String | TokenBool Bool
instance Show Token where
show TokenOpenBracket = "("
show TokenCloseBracket = ")"
show (TokenIdentifier s) = "id-" ++ s
show (TokenString s) = "\"" ++ s ++ "\""
show (TokenBool b) = "bool-" ++ show b
--Cursor
class StreamError e where
emptyError :: e
data Cursor a e = Cursor {
stream :: [a],
point :: Point
}
cursorEmpty :: Cursor a e -> Bool
cursorEmpty Cursor{..} = null stream
cursorTake :: StreamError e => Int -> Cursor a e -> Either e (Sourced [a], Cursor a e)
cursorTake move Cursor{..} =
if length stream < move
then Left emptyError
else Right (sourcedValue, cursor')
where
sourcedValue = Sourced { val=value, region=Region(point, point') }
value = take move stream
point' = point + move
cursor' = Cursor { stream=drop move stream, point=point' }
cursorAdvance :: StreamError e => Cursor a e -> Either e (Sourced a, Cursor a e)
cursorAdvance cursor = do
(sourcedListVal, cursor') <- cursorTake 1 cursor
let value = head $ val sourcedListVal
return (Sourced {val=value, region=region sourcedListVal}, cursor')
cursorAdvanceUnsourced :: StreamError e => Cursor a e -> Either e (a, Cursor a e)
cursorAdvanceUnsourced cursor = do
(sourced, cursor') <- cursorAdvance cursor
return (val sourced, cursor')
cursorPeek :: StreamError e => Int -> Cursor a e -> Either e a
cursorPeek index Cursor{..} =
if length stream <= index
then Left emptyError
else Right $ stream !! index
cursorTakeWhileAccum :: StreamError e => Sourced [a] -> (a -> Bool) -> Cursor a e -> Either e (Sourced [a], Cursor a e)
cursorTakeWhileAccum accum taker cursor =
if cursorEmpty cursor
then
return (accum, cursor)
else
do
peek <- cursorPeek 0 cursor
if taker peek
then do
(current, cursor') <- cursorAdvance cursor
cursorTakeWhileAccum (accum `appendSourced` current) taker cursor'
else Right (accum, cursor)
cursorTakeWhile :: StreamError e => (a -> Bool) -> Cursor a e -> Either e (Sourced [a], Cursor a e)
cursorTakeWhile taker cursor = cursorTakeWhileAccum emptyaccum taker cursor where
emptyaccum = Sourced {val=[], region=Region(point cursor, point cursor)}
{-
cursorTakeWhileUnsourced :: StreamError e => (a -> Bool) -> Cursor a e -> Either e ([a], Cursor a e)
cursorTakeWhileUnsourced taker cursor = do
(sourced, cursor') <- (cursorTakeWhile taker cursor)
return (val sourced, cursor')
-}
--Tokenizer
data TokenizerError = UnexpectedEOF | UnclosedString (Sourced String) deriving (Show)
instance StreamError TokenizerError where
emptyError = UnexpectedEOF
tokenizeOpenBracket :: [Sourced Token] -> Cursor Char TokenizerError -> Either TokenizerError [Sourced Token]
tokenizeOpenBracket accum cursor = do
(sourcedStr, cursor') <- cursorAdvance cursor
let sourcedTok = fmap (const TokenOpenBracket) sourcedStr
tokenizeAccum (accum ++ [sourcedTok]) cursor'
tokenizeCloseBracket :: [Sourced Token] -> Cursor Char TokenizerError -> Either TokenizerError [Sourced Token]
tokenizeCloseBracket accum cursor = do
(sourcedStr, cursor') <- cursorAdvance cursor
let sourcedTok = fmap (const TokenCloseBracket) sourcedStr
tokenizeAccum (accum ++ [sourcedTok]) cursor'
tokenizeIdentifier :: [Sourced Token] -> Cursor Char TokenizerError -> Either TokenizerError [Sourced Token]
tokenizeIdentifier accum cursor =
do
(sourcedStr, cursor') <- cursorTakeWhile (`notElem` "() \n\t") cursor
let sourcedTok = --check for keywords
case val sourcedStr of
"true" -> fmap (const (TokenBool True)) sourcedStr
"false" -> fmap (const (TokenBool False)) sourcedStr
_ -> fmap TokenIdentifier sourcedStr
tokenizeAccum (accum ++ [sourcedTok]) cursor'
tokenizeString :: [Sourced Token] -> Cursor Char TokenizerError -> Either TokenizerError [Sourced Token]
tokenizeString accum cursor =
do
(_, cursorNext) <- cursorAdvance cursor --consume the "
(sourcedStr, cursor') <- cursorTakeWhile (/= '\"') cursorNext
if cursorEmpty cursor'
then
Left (UnclosedString sourcedStr)
else do
(_, cursorAbsorbed) <- cursorAdvance cursor' --absorb the '\"'
let sourcedTok = fmap TokenString sourcedStr
tokenizeAccum (accum ++ [sourcedTok]) cursorAbsorbed
tokenizeAccum :: [Sourced Token] -> Cursor Char TokenizerError -> Either TokenizerError [Sourced Token]
tokenizeAccum accum cursor =
do
(_, cursor') <- cursorTakeWhile (`elem` " \n\t") cursor --cleanup whtespace
if cursorEmpty cursor'
then return accum
else do
peek <- cursorPeek 0 cursor'
case peek of
'(' -> tokenizeOpenBracket accum cursor'
')' -> tokenizeCloseBracket accum cursor'
'\"' -> tokenizeString accum cursor'
_ -> tokenizeIdentifier accum cursor'
tokenize :: String -> Either TokenizerError [Sourced Token]
tokenize src = tokenizeAccum [] cursor where
cursor = Cursor {stream=src, point=0}
--Parser
data ParseError = UnbalancedParantheses (Sourced Token) | UnexpectedToken (Sourced Token) | UnexpectedEndOfTokens deriving(Show)
instance StreamError ParseError where
emptyError = UnexpectedEndOfTokens
data AST = ASTList [Sourced AST] | AtomId String | AtomInt Int | AtomFloat Double
instance Show AST where
show (ASTList l) = "(" ++ foldl (\a b -> a ++ " " ++ b) "" (map (show . val) l) ++ ")"
show (AtomId str) = "" ++ str
show (AtomInt int) = show int
show (AtomFloat float) = show float
type ParseCursor = Cursor (Sourced Token) ParseError
parseListAccum :: Sourced Token -> [Sourced AST] -> ParseCursor -> Either ParseError ([Sourced AST], ParseCursor)
parseListAccum openBracketToken accum cursor =
if cursorEmpty cursor then
Left $ UnbalancedParantheses openBracketToken
else do
Sourced{val=peek} <- cursorPeek 0 cursor
case peek of
TokenCloseBracket -> return (accum, cursor)
_ -> do
(ast, cursor') <- parseSingle cursor
parseListAccum openBracketToken (accum ++ [ast]) cursor'
parseList :: ParseCursor -> Either ParseError (Sourced AST, ParseCursor)
parseList cursor = do
(tokenBegin, cursorBegin) <- cursorAdvanceUnsourced cursor
(sourcedList, cursorAtCloseBracket) <- parseListAccum tokenBegin [] cursorBegin
(tokenEnd, cursorEnd) <- cursorAdvanceUnsourced cursorAtCloseBracket
let totalRegion = region tokenBegin <> region tokenEnd
return (Sourced{val=ASTList sourcedList, region=totalRegion}, cursorEnd)
--parseAccum (accum ++ list) cursorEnd
parseIdentifier :: Cursor (Sourced Token) ParseError -> Either ParseError (Sourced AST, ParseCursor)
parseIdentifier cursor = do
(idSourcedToken, cursor') <- cursorAdvanceUnsourced cursor
case val idSourcedToken of
TokenIdentifier idStr -> return (atomId, cursor')
where atomId = Sourced {val = AtomId idStr, region=region idSourcedToken}
_ -> Left (UnexpectedToken idSourcedToken)
parseSingle :: Cursor (Sourced Token) ParseError -> Either ParseError (Sourced AST, ParseCursor)
parseSingle cursor =
if cursorEmpty cursor
then Left UnexpectedEndOfTokens
else do
sourcedPeek @ Sourced{val=peek} <- cursorPeek 0 cursor
case peek of
TokenOpenBracket -> parseList cursor
TokenIdentifier _ -> parseIdentifier cursor
TokenCloseBracket -> Left (UnbalancedParantheses sourcedPeek)
_ -> undefined
parseAccum :: [Sourced AST] -> ParseCursor -> Either ParseError [Sourced AST]
parseAccum accum cursor =
if cursorEmpty cursor
then return accum
else do
(ast, cursor') <- parseSingle cursor
parseAccum (accum ++ [ast]) cursor'
parse :: [Sourced Token] -> Either ParseError [Sourced AST]
parse tokens = parseAccum [] cursor where
cursor = Cursor { stream=tokens, point=0}
--evaluation
data EvalError = MismatchedType | EmptyList deriving(Show)
eval :: AST -> Either EvalError AST
eval (ASTList []) = Left EmptyList
main :: IO ()
main = do
input <- getLine
let tokensResult = tokenize input
case tokensResult of
Right tokens -> do
print tokens
let parseResult = parse tokens
case parseResult of
Right astList ->
putStrLn $ "parse:\n" ++ show (map val astList)
Left parseError ->
putStrLn $ "parse error:\n" ++ show parseError
Left tokenizationError -> putStrLn ("tokenization error:\n" ++ (show tokenizationError)) >> exitFailure
--print tokensResult
--let parseResult =
-- case tokensResult of
-- Right tokens -> parse tokens
-- Left _ -> Right []
-- print parseResult
| bollu/hask-lisp-interp | src/Main.hs | mit | 9,947 | 1 | 18 | 2,195 | 3,199 | 1,632 | 1,567 | 196 | 5 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.QuickTimePluginReplacement
(postEvent, getMovieSize, getTimedMetaData, getAccessLog,
getErrorLog, QuickTimePluginReplacement(..),
gTypeQuickTimePluginReplacement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/QuickTimePluginReplacement.postEvent Mozilla QuickTimePluginReplacement.postEvent documentation>
postEvent ::
(MonadDOM m, ToJSString eventName) =>
QuickTimePluginReplacement -> eventName -> m ()
postEvent self eventName
= liftDOM (void (self ^. jsf "postEvent" [toJSVal eventName]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/QuickTimePluginReplacement.movieSize Mozilla QuickTimePluginReplacement.movieSize documentation>
getMovieSize ::
(MonadDOM m) => QuickTimePluginReplacement -> m Word64
getMovieSize self
= liftDOM (round <$> ((self ^. js "movieSize") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/QuickTimePluginReplacement.timedMetaData Mozilla QuickTimePluginReplacement.timedMetaData documentation>
getTimedMetaData ::
(MonadDOM m) => QuickTimePluginReplacement -> m JSVal
getTimedMetaData self
= liftDOM ((self ^. js "timedMetaData") >>= toJSVal)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/QuickTimePluginReplacement.accessLog Mozilla QuickTimePluginReplacement.accessLog documentation>
getAccessLog ::
(MonadDOM m) => QuickTimePluginReplacement -> m JSVal
getAccessLog self = liftDOM ((self ^. js "accessLog") >>= toJSVal)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/QuickTimePluginReplacement.errorLog Mozilla QuickTimePluginReplacement.errorLog documentation>
getErrorLog ::
(MonadDOM m) => QuickTimePluginReplacement -> m JSVal
getErrorLog self = liftDOM ((self ^. js "errorLog") >>= toJSVal)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/QuickTimePluginReplacement.hs | mit | 2,756 | 0 | 12 | 362 | 585 | 350 | 235 | 39 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module ReactHaskell.Handler where
import Application
import ReactHaskell.Persistence
import Snap.Core
import Snap.Extras.CoreUtils
import Snap.Extras.JSON
import Snap.Snaplet.Heist
import Snap.Snaplet.PostgresqlSimple
index :: AppHandler ()
index = render "index"
getTodos :: AppHandler ()
getTodos = liftPG listTodos >>= writeJSON
postTodo :: AppHandler ()
postTodo = do
todo <- reqJSON
liftPG (createTodo todo)
liftPG listTodos >>= writeJSON
patchTodo :: AppHandler ()
patchTodo = do
id <- getId
todo <- reqJSON
liftPG (updateTodo id todo)
liftPG listTodos >>= writeJSON
deleteTodo :: AppHandler ()
deleteTodo = do
id <- getId
liftPG (destroyTodo id)
liftPG listTodos >>= writeJSON
getId :: AppHandler Integer
getId = maybeBadReq "Missing required param \"id\"" (readParam "id")
| sestrella/react-haskell | src/ReactHaskell/Handler.hs | mit | 846 | 0 | 9 | 137 | 249 | 125 | 124 | 31 | 1 |
module Network.CryptoNote.Types where
import Network.CryptoNote.Crypto.Types (PublicKey, SecretKey)
import Data.Binary (Binary (..))
-- cryptonote_core/cryptonote_basic.h
data AccountPublicAddress = AccountPublicAddress {
spendPubKey :: PublicKey,
viewPubKey :: PublicKey
} deriving (Eq, Show)
data KeyPair = KeyPair {
public :: PublicKey,
secret :: SecretKey
} deriving (Eq, Show)
-- cryptonote_core/account.h
-- CryptoNote.h
data AccountKeys = AccountKeys {
address :: AccountPublicAddress,
spendSecKey :: SecretKey,
viewSecKey :: SecretKey
} deriving (Eq, Show)
-- common/varint.h
newtype VarInt = VarInt Integer
toVarInt :: Integral a => a -> VarInt
toVarInt = undefined
instance Binary VarInt where
put = undefined
get = undefined
| nvmd/hs-cryptonote | src/Network/CryptoNote/Types.hs | mit | 774 | 0 | 8 | 131 | 189 | 115 | 74 | 22 | 1 |
{-# LANGUAGE RankNTypes, FlexibleInstances, FlexibleContexts, MultiWayIf #-}
{-# LANGUAGE LiberalTypeSynonyms, ImpredicativeTypes #-}
import Haste
import Haste.DOM
import Haste.Events
import Haste.Foreign hiding (get)
import Haste.Graphics.Canvas
import Haste.Graphics.AnimationFrame
import Control.Applicative
import Control.Arrow
import Control.Monad
import Control.Monad.State
import qualified Data.IntMap as IM
-- import qualified JSArray as IM
import qualified Data.Map as M
import Data.List
import Data.Ord
import Data.IORef
import Lens.Family2
import Lens.Family2.Unchecked
import Lens.Family2.State.Lazy
data V2 a = V2 !a !a deriving (Eq, Ord, Show)
type Vec2 = V2 Double
fromV2 :: V2 a -> (a,a)
fromV2 (V2 x y) = (x,y)
toV2 :: (a,a) -> V2 a
toV2 (x,y) = V2 x y
_x :: Lens' (V2 a) a
_x = lens (\(V2 x _) -> x) (\(V2 _ y) x -> V2 x y)
_y :: Lens' (V2 a) a
_y = lens (\(V2 _ y) -> y) (\(V2 x _) y -> V2 x y)
instance Functor V2 where
fmap f (V2 x y) = V2 (f x) (f y)
instance Applicative V2 where
pure a = V2 a a
V2 a b <*> V2 d e = V2 (a d) (b e)
instance Monad V2 where
return a = V2 a a
V2 a b >>= f = V2 a' b' where
V2 a' _ = f a
V2 _ b' = f b
instance Num a => Num (V2 a) where
(+) = liftA2 (+)
(-) = liftA2 (-)
(*) = liftA2 (*)
negate = fmap negate
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
instance (Random a) => Random (V2 a) where
randomR (x,y) = first toV2 . randomR (fromV2 x, fromV2 y)
norm :: Vec2 -> Double
norm (V2 x y) = sqrt $ x^2 + y^2
normalize :: Vec2 -> Vec2
normalize v = fmap (/ norm v) v
scaleV2 :: Double -> Vec2 -> Vec2
scaleV2 d = fmap (* d) . normalize
intersection :: V2 Vec2 -> V2 Vec2 -> V2 Vec2
intersection (V2 lt1 rb1) (V2 lt2 rb2) = V2 (liftA2 max lt1 lt2) (liftA2 min rb1 rb2)
windowSize :: Vec2
windowSize = V2 640 480
approx :: (RealFrac a) => a -> a -> a
approx p a = let q = fromInteger $ floor $ p / a in a * q
distance :: Vec2 -> Vec2 -> Double
distance v v' = norm $ v - v'
ix :: (ToAny b, FromAny b) => Int -> Lens' (IM.IntMap b) b
ix n = lens (IM.! n) (\l x -> IM.insert n x l)
consMap :: (ToAny a, FromAny a) => a -> IM.IntMap a -> (Int, IM.IntMap a)
consMap x m = (n, m & ix n .~ x) where
ks = filter (\(i,j) -> i /= j) $ zip (IM.keys m) [0..]
n | IM.size m == 0 = 0
| ks == [] = last (IM.keys m) + 1
| otherwise = snd $ head $ ks
consMap' :: (ToAny a, FromAny a) => a -> IM.IntMap a -> IM.IntMap a
consMap' x m = snd $ consMap x m
randomRIO :: (Random a, MonadIO m) => (a,a) -> m a
randomRIO ix = liftIO $ do
sd <- newSeed
return $ fst $ randomR ix sd
instance (Random a) => Random (a,a) where
randomR ((a,b), (c,d)) gen =
let (x,gen1) = randomR (a,c) gen
(y,gen2) = randomR (b,d) gen1
in ((x,y), gen2)
data Creature = Plant | Herbivore | Carnivore deriving (Eq, Ord, Enum, Show)
data Condition = Idle | Hunting | Dead deriving (Eq, Show)
data FieldType = Land | Forest deriving (Eq, Ord, Enum, Show)
eatBy :: Creature -> [Creature]
eatBy Plant = []
eatBy Herbivore = [Plant]
eatBy Carnivore = [Herbivore]
data Alife = Alife {
-- on canvas
_pos :: Vec2,
_arg :: Double,
-- trait
_strength :: Int,
_agility :: Int,
_creature :: Creature,
-- state
_counter :: Int,
_destination :: Vec2,
_condition :: Condition,
_life :: Double,
_viewRate :: Double,
_speedRate :: Double
} deriving (Eq, Show)
instance ToAny Alife where
toAny = toAny . toOpaque
instance FromAny Alife where
fromAny = fmap fromOpaque . fromAny
pos :: Lens' Alife Vec2; pos = lens _pos (\a x -> a { _pos = x })
arg :: Lens' Alife Double; arg = lens _arg (\a x -> a { _arg = x })
strength :: Lens' Alife Int; strength = lens _strength (\a x -> a { _strength = x })
agility :: Lens' Alife Int; agility = lens _agility (\a x -> a { _agility = x })
creature :: Lens' Alife Creature; creature = lens _creature (\a x -> a { _creature = x })
counter :: Lens' Alife Int; counter = lens _counter (\a x -> a { _counter = x })
destination :: Lens' Alife Vec2; destination = lens _destination (\a x -> a { _destination = x })
condition :: Lens' Alife Condition; condition = lens _condition (\a x -> a { _condition = x })
life :: Lens' Alife Double; life = lens _life (\a x -> a { _life = x })
viewRate :: Lens' Alife Double; viewRate = lens _viewRate (\a x -> a { _viewRate = x })
speedRate :: Lens' Alife Double; speedRate = lens _speedRate (\a x -> a { _speedRate = x })
data World = World {
_lives :: !(IM.IntMap Alife),
_cursor :: Maybe Int,
_spratio :: [(Int,Int,Int)],
_globalCounter :: Int,
_running :: Bool,
_timeStamp :: HRTimeStamp
}
lives :: Lens' World (IM.IntMap Alife); lives = lens _lives (\a x -> a { _lives = x })
cursor :: Lens' World (Maybe Int); cursor = lens _cursor (\a x -> a { _cursor = x })
spratio :: Lens' World [(Int, Int, Int)]; spratio = lens _spratio (\a x -> a { _spratio = x })
globalCounter :: Lens' World Int; globalCounter = lens _globalCounter (\a x -> a { _globalCounter = x })
running :: Lens' World Bool; running = lens _running (\a x -> a { _running = x })
timeStamp :: Lens' World HRTimeStamp; timeStamp = lens _timeStamp (\a x -> a { _timeStamp = x })
completeLoadBitmaps :: [Bitmap] -> IO () -> IO ()
completeLoadBitmaps bs cont = foldr (\b m -> void $ onEvent (elemOf b) Load $ const m) cont bs
getInside :: Vec2 -> Vec2
getInside (V2 x y)
| x < 0 = getInside $ V2 5 y
| x > (windowSize^._x) = getInside $ V2 (windowSize^._x - 5) y
| y < 0 = getInside $ V2 x 5
| y > (windowSize^._y) = getInside $ V2 x (windowSize^._y - 5)
| otherwise = V2 x y
newLife :: Creature -> Alife
newLife u = case u of
Plant -> plain & creature .~ Plant & strength .~ 8 & agility .~ 20
Herbivore -> plain & creature .~ Herbivore & strength .~ 50 & agility .~ 60
Carnivore -> plain & creature .~ Carnivore & strength .~ 90 & agility .~ 60
where
plain = Alife {
_pos = fmap (/2) windowSize, _arg = 0,
_strength = 0, _agility = 0, _creature = undefined,
_counter = 0, _destination = 0, _condition = Idle, _life = 100,
_viewRate = 1.0, _speedRate = 1.0
}
spawn :: Alife -> StateT World IO ()
spawn ai = do
ps <- IM.elems `fmap` use lives
case ai^.creature of
Plant -> when ((< 1000) $ length $ filter (\a -> a^.creature == Plant) ps) $ lives %= consMap' ai
Herbivore -> when ((< 200) $ length $ filter (\a -> a^.creature == Herbivore) ps) $ lives %= consMap' ai
Carnivore -> when ((< 50) $ length $ filter (\a -> a^.creature == Carnivore) ps) $ lives %= consMap' ai
destruct :: Int -> StateT World IO ()
destruct j = do
x <- use $ lives . ix j
destructor' j (x^.creature)
where
plantAround :: Vec2 -> Double -> StateT World IO ()
plantAround x d = do
let V2 c1 c2 = V2 (x - pure d) (x + pure d) `intersection` V2 0 windowSize
p <- randomRIO (c1, c2)
spawn (newLife Plant & pos .~ p & destination .~ p)
destructor' i Plant = return ()
destructor' i Herbivore = do
x <- use $ lives . ix j
let view = (x^.viewRate) * 80
replicateM_ (floor $ fromIntegral (x^.strength) / 10) $ plantAround (x^.pos) view
destructor' i Carnivore = do
x <- use $ lives . ix j
let view = (x^.viewRate) * 80
replicateM_ (floor $ fromIntegral (x^.strength) / 10) $ plantAround (x^.pos) view
evolve :: Int -> StateT World IO ()
evolve j = do
ai <- use (lives . ix j)
zoom (lives . ix j) $ runAI
eat j
evolve' j (ai^.creature)
where
getAI :: Int -> StateT World IO Alife
getAI i = use (lives . ix i)
runAI :: StateT Alife IO ()
runAI = do
ai <- get
V2 px py <- (-) <$> use destination <*> use pos
arg .= (atan2 py px) `approx` ((2 * pi) * 15 / 360)
spR <- use speedRate
when (ai^.agility > 20 && norm (V2 px py) > 10) $ do
let f = \x -> x / 300 + 1
let q = \x -> sqrt x / 150 + 1
let vel = fromIntegral (ai^.agility) / 20 * f (100 - ai^.life) / 2 / q (fromIntegral $ ai^.counter)
pos += scaleV2 (vel * spR) (V2 (cos $ ai^.arg) (sin $ ai^.arg))
when (ai^.life < 0) $ condition .= Dead
life -= (fromIntegral (ai^.strength) / 1000 + fromIntegral (ai^.agility) / 1000) * spR
counter += 1
eat :: Int -> StateT World IO Bool
eat i = do
x <- getAI i
xs <- filter (\(_,z) -> z^.life > 0) <$> searchIn i 10 (eatBy $ x^.creature)
when (xs /= []) $ do
let (iy,y) = head xs
lives . ix i . life += (fromIntegral $ y^.strength)^2 / 200
lives . ix i . life %= min 100
lives . ix iy . life -= (fromIntegral $ x^.strength)^3 / 100000
-- canvas %= cons (color (V4 1 0.5 0 1) $ line [y^.pos, x^.pos])
return $ xs == []
searchIn :: Int -> Double -> [Creature] -> StateT World IO [(Int, Alife)]
searchIn i d targets = do
x <- getAI i
ls <- use lives
return $
sortBy (comparing (\(_,y) -> distance (y^.pos) (x^.pos))) $
IM.assocs $
IM.filter (\a -> distance (a^.pos) (x^.pos) < d && a^.creature `elem` targets) $ ls
randomWalk :: Int -> StateT World IO ()
randomWalk i = do
x <- getAI i
when ((x^.counter) `mod` 500 == 0 || distance (x^.pos) (x^.destination) < 10) $ do
p <- randomRIO (getInside $ x^.pos - 150, getInside $ x^.pos + 150)
lives . ix i . destination .= p
runAwayFrom :: Int -> Double -> [Creature] -> StateT World IO ()
runAwayFrom i d es = do
x <- getAI i
es' <- fmap snd <$> searchIn i d es
when (es' /= []) $ do
let tvec = sum $ fmap (\e -> let v = x^.pos - e^.pos in scaleV2 (d - norm v) v) es'
lives . ix i . destination .= (getInside $ tvec)
plantAround :: Vec2 -> Double -> StateT World IO ()
plantAround x d = do
let V2 c1 c2 = V2 (x - pure d) (x + pure d) `intersection` V2 0 windowSize
p <- randomRIO (c1, c2)
spawn (newLife Plant & pos .~ p & destination .~ p)
evolve' i Plant = do
x <- getAI i
plants <- IM.filter (\a -> a ^. creature == Plant && distance (a^.pos) (x^.pos) < 100) <$> use lives
when (x ^. counter `mod` 150 == 0 && IM.size plants < 10) $ replicateM_ 3 $ plantAround (x^.pos) 30
evolve' i Herbivore = do
x <- getAI i
let view = (x^.viewRate) * 80
eat i >>= \b -> when b $ do
if
| x^.condition == Idle || x^.condition == Hunting -> do
xs <- fmap snd <$> searchIn i view (eatBy $ x^.creature)
ys <- fmap snd <$> searchIn i 20 (eatBy $ x^.creature)
zs <- fmap snd <$> searchIn i 5 (eatBy $ x^.creature)
unless (zs /= []) $ do
if
| ys /= [] -> lives . ix i . destination .= (head ys ^. pos)
| x^.life < 50 && xs /= [] -> lives . ix i . destination .= (head xs ^. pos)
| x^.life > 80
&& (200 < x^.counter)
&& x^.counter `mod` 500 == 0 -> spawn (newLife (x^.creature) & pos .~ (x^.pos))
| otherwise -> do
randomWalk i
runAwayFrom i (view / 2) [Carnivore]
| x^.condition == Dead -> return ()
evolve' i Carnivore = do
x <- getAI i
let view = (x^.viewRate) * 40
-- canvas %= cons (translate (x^.pos) $ color (V4 0.4 0.3 1 0.5) $ circleOutline $ if x^.life < 70 then double2Float view else 20)
eat i >>= \b -> when b $ do
if
| x^.condition == Idle || x^.condition == Hunting -> do
xs <- fmap snd <$> searchIn i view (eatBy $ x^.creature)
ys <- fmap snd <$> searchIn i 20 (eatBy $ x^.creature)
zs <- fmap snd <$> searchIn i 5 (eatBy $ x^.creature)
unless (zs /= []) $ do
if
| ys /= [] -> lives . ix i . destination .= (head ys ^. pos)
| x^.life < 70 && xs /= [] -> do
lives . ix i . condition .= Hunting
lives . ix i . destination .= (head xs ^. pos)
| x^.life > 80
&& (200 < x^.counter)
&& x^.counter `mod` 500 == 0 -> do
lives . ix i . condition .= Idle
spawn (newLife (x^.creature) & pos .~ (x^.pos))
| otherwise -> do
lives . ix i . condition .= Idle
randomWalk i
| x^.condition == Dead -> return ()
mainloop :: IORef World -> [Bitmap] -> Canvas -> IO ()
mainloop ref bmps cv = void $ do
render cv . stroke $ circle (0,0) 0
forM_ [1..400] $ \i -> do
k <- randomRIO (0,600)
renderOnTop cv $ do
draw (bmps !! 2) (k,i)
-- stroke $ circle (k,i) 10
onceStateT ref $ do
ls <- use lives
forM_ (IM.keys ls) $ evolve
-- onceStateT ref $ do
-- globalCounter += 1
--
-- r <- use running
-- when r $ do
-- withElem "alife-num" $ \e -> do
-- s <- IM.size <$> use lives
-- setProp e "innerText" $ show s
-- withElem "alife-all-num" $ \e -> do
-- s <- last . IM.keys <$> use lives
-- setProp e "innerText" $ show s
--
-- ls <- use lives
--
-- forM_ (IM.keys ls) $ evolve
-- forM_ (IM.assocs ls) $ \(i,x) -> do
-- when (x^.condition == Dead) $ do
-- destruct i
-- lives %= IM.delete i
--
-- render cv $ do
-- forM_ (IM.assocs ls) $ \(i,x) -> do
-- let ps = M.fromList $ zip [Plant, Herbivore, Carnivore] [0..]
-- draw (bmps !! (ps M.! (x^.creature))) $ fromV2 $ x^.pos
requestAnimationFrame $ \p -> do
onceStateT ref $ do
t <- use timeStamp
r <- use running
when r $ do
withElem "fps" $ \e -> do
setProp e "innerText" $ show $ floor $ 1000 / (p - t)
timeStamp .= p
mainloop ref bmps cv
main :: IO ()
main = do
Just cv <- getCanvasById "hakoniwa-canvas"
bmps <- mapM loadBitmap ["img/creature0.png", "img/creature1.png", "img/creature2.png"]
completeLoadBitmaps bmps $ do
ref <- newIORef $ World IM.empty Nothing [] 0 True 0
replicateM_ 50 $ onceStateT ref $ do
p <- liftIO $ randomRIO (pure 0, windowSize)
spawn (newLife Plant & pos .~ p & destination .~ p)
replicateM_ 20 $ onceStateT ref $ do
p <- liftIO $ randomRIO (pure 0, windowSize)
spawn (newLife Herbivore & pos .~ p & destination .~ p)
replicateM_ 3 $ onceStateT ref $ do
p <- liftIO $ randomRIO (pure 0, windowSize)
spawn (newLife Carnivore & pos .~ p & destination .~ p)
withElem "game-run" $ \e -> do
onEvent e Click $ \_ -> do
onceStateT ref $ running .= True
withElem "game-stop" $ \e -> do
onEvent e Click $ \_ -> do
onceStateT ref $ running .= False
mainloop ref bmps cv
onceStateT :: IORef s -> StateT s IO a -> IO a
onceStateT ref m = do
x <- readIORef ref
(a,x') <- runStateT m x
writeIORef ref $! x'
return a
| myuon/hakoniwa | src/main.hs | mit | 14,889 | 0 | 32 | 4,337 | 6,743 | 3,428 | 3,315 | -1 | -1 |
{-# LANGUAGE ForeignFunctionInterface, JavaScriptFFI, UnliftedFFITypes,
GHCForeignImportPrim, DeriveDataTypeable, GHCForeignImportPrim #-}
module GHCJS.Foreign.Callback
( Callback
, OnBlocked(..)
, releaseCallback
-- * asynchronous callbacks
, asyncCallback
, asyncCallback1
, asyncCallback2
, asyncCallback3
-- * synchronous callbacks
, syncCallback
, syncCallback1
, syncCallback2
, syncCallback3
-- * synchronous callbacks that return a value
, syncCallback'
, syncCallback1'
, syncCallback2'
, syncCallback3'
) where
import GHCJS.Concurrent
import GHCJS.Marshal
import GHCJS.Marshal.Pure
import GHCJS.Foreign.Callback.Internal
import GHCJS.Prim
import GHCJS.Types
import qualified GHC.Exts as Exts
import Data.Typeable
import Unsafe.Coerce
{- |
When you create a callback, the Haskell runtime stores a reference to
the exported IO action or function. This means that all data referenced by the
exported value stays in memory, even if nothing outside the Haskell runtime
holds a reference to to callback.
Use 'releaseCallback' to free the reference. Subsequent calls from JavaScript
to the callback will result in an exception.
-}
releaseCallback :: Callback a -> IO ()
releaseCallback x = js_release x
{- | Make a callback (JavaScript function) that runs the supplied IO action in a synchronous
thread when called.
Call 'releaseCallback' when done with the callback, freeing memory referenced
by the IO action.
-}
syncCallback :: OnBlocked -- ^ what to do when the thread blocks
-> IO () -- ^ the Haskell action
-> IO (Callback (IO ())) -- ^ the callback
syncCallback onBlocked x = js_syncCallback (onBlocked == ContinueAsync) (unsafeCoerce x)
{- | Make a callback (JavaScript function) that runs the supplied IO function in a synchronous
thread when called. The callback takes one argument that it passes as a JSVal value to
the Haskell function.
Call 'releaseCallback' when done with the callback, freeing data referenced
by the function.
-}
syncCallback1 :: OnBlocked -- ^ what to do when the thread blocks
-> (JSVal -> IO ()) -- ^ the Haskell function
-> IO (Callback (JSVal -> IO ())) -- ^ the callback
syncCallback1 onBlocked x = js_syncCallbackApply (onBlocked == ContinueAsync) 1 (unsafeCoerce x)
{- | Make a callback (JavaScript function) that runs the supplied IO function in a synchronous
thread when called. The callback takes two arguments that it passes as JSVal values to
the Haskell function.
Call 'releaseCallback' when done with the callback, freeing data referenced
by the function.
-}
syncCallback2 :: OnBlocked -- ^ what to do when the thread blocks
-> (JSVal -> JSVal -> IO ()) -- ^ the Haskell function
-> IO (Callback (JSVal -> JSVal -> IO ())) -- ^ the callback
syncCallback2 onBlocked x = js_syncCallbackApply (onBlocked == ContinueAsync) 2 (unsafeCoerce x)
{- | Make a callback (JavaScript function) that runs the supplied IO function in a synchronous
thread when called. The callback takes three arguments that it passes as JSVal values to
the Haskell function.
Call 'releaseCallback' when done with the callback, freeing data referenced
by the function.
-}
syncCallback3 :: OnBlocked -- ^ what to do when the thread blocks
-> (JSVal -> JSVal -> JSVal -> IO ()) -- ^ the Haskell function
-> IO (Callback (JSVal -> JSVal -> JSVal -> IO ())) -- ^ the callback
syncCallback3 onBlocked x = js_syncCallbackApply (onBlocked == ContinueAsync) 3 (unsafeCoerce x)
{- | Make a callback (JavaScript function) that runs the supplied IO action in a synchronous
thread when called.
Call 'releaseCallback' when done with the callback, freeing memory referenced
by the IO action.
-}
syncCallback' :: IO JSVal
-> IO (Callback (IO JSVal))
syncCallback' x = js_syncCallbackReturn (unsafeCoerce x)
syncCallback1' :: (JSVal -> IO JSVal)
-> IO (Callback (JSVal -> IO JSVal))
syncCallback1' x = js_syncCallbackApplyReturn 1 (unsafeCoerce x)
syncCallback2' :: (JSVal -> JSVal -> IO JSVal)
-> IO (Callback (JSVal -> JSVal -> IO JSVal))
syncCallback2' x = js_syncCallbackApplyReturn 2 (unsafeCoerce x)
syncCallback3' :: (JSVal -> JSVal -> JSVal -> IO JSVal)
-> IO (Callback (JSVal -> JSVal -> JSVal -> IO JSVal))
syncCallback3' x = js_syncCallbackApplyReturn 3 (unsafeCoerce x)
{- | Make a callback (JavaScript function) that runs the supplied IO action in an asynchronous
thread when called.
Call 'releaseCallback' when done with the callback, freeing data referenced
by the IO action.
-}
asyncCallback :: IO () -- ^ the action that the callback runs
-> IO (Callback (IO ())) -- ^ the callback
asyncCallback x = js_asyncCallback (unsafeCoerce x)
asyncCallback1 :: (JSVal -> IO ()) -- ^ the function that the callback calls
-> IO (Callback (JSVal -> IO ())) -- ^ the calback
asyncCallback1 x = js_asyncCallbackApply 1 (unsafeCoerce x)
asyncCallback2 :: (JSVal -> JSVal -> IO ()) -- ^ the Haskell function that the callback calls
-> IO (Callback (JSVal -> JSVal -> IO ())) -- ^ the callback
asyncCallback2 x = js_asyncCallbackApply 2 (unsafeCoerce x)
asyncCallback3 :: (JSVal -> JSVal -> JSVal -> IO ()) -- ^ the Haskell function that the callback calls
-> IO (Callback (JSVal -> JSVal -> JSVal -> IO ())) -- ^ the callback
asyncCallback3 x = js_asyncCallbackApply 3 (unsafeCoerce x)
-- ----------------------------------------------------------------------------
foreign import javascript unsafe "h$makeCallback(h$runSync, [$1], $2)"
js_syncCallback :: Bool -> Exts.Any -> IO (Callback (IO b))
foreign import javascript unsafe "h$makeCallback(h$run, [], $1)"
js_asyncCallback :: Exts.Any -> IO (Callback (IO b))
foreign import javascript unsafe "h$makeCallback(h$runSyncReturn, [false], $1)"
js_syncCallbackReturn :: Exts.Any -> IO (Callback (IO JSVal))
foreign import javascript unsafe "h$makeCallbackApply($2, h$runSync, [$1], $3)"
js_syncCallbackApply :: Bool -> Int -> Exts.Any -> IO (Callback b)
foreign import javascript unsafe "h$makeCallbackApply($1, h$run, [], $2)"
js_asyncCallbackApply :: Int -> Exts.Any -> IO (Callback b)
foreign import javascript unsafe
"h$makeCallbackApply($1, h$runSyncReturn, [false], $2)"
js_syncCallbackApplyReturn :: Int -> Exts.Any -> IO (Callback b)
foreign import javascript unsafe "h$release"
js_release :: Callback a -> IO ()
| ghcjs/ghcjs-base | GHCJS/Foreign/Callback.hs | mit | 7,041 | 21 | 16 | 1,792 | 1,222 | 638 | 584 | 83 | 1 |
module Timeline.Parser
( parseGraphs
) where
import Data.Text (Text)
import qualified Data.Text as T
import Text.Megaparsec
import Text.Megaparsec.Text
import Timeline.Parser.Aggregate
import Timeline.Parser.Graph
import Timeline.Parser.Internal
import Timeline.Types
parseGraphs :: Text -> Either String Graphs
parseGraphs = parseOnly (graphsParser <* eof) . T.strip
graphsParser :: Parser Graphs
graphsParser = do
graphResults <- concat <$> graphAndAggregatesParser `sepBy1` newline
let badListLengths = differentListLengths graphLength graphResults
noPoints = missingPoints graphLength graphResults
case (badListLengths, noPoints) of
(True, _) -> fail $ "Not all graphs had the same length: " ++ show (map graphLength graphResults)
(_, True) -> fail "No points were provided"
(_, _) -> return $ Graphs graphResults
where
missingPoints f = elem 0 . map f
graphAndAggregatesParser :: Parser [Graph]
graphAndAggregatesParser = do
initial <- graphParser
additional <- many $ choice [smaParser initial, semaParser initial, demaParser initial]
return (initial : additional)
graphParser :: Parser Graph
graphParser = Graph
<$> nameParser
<*> choice [barParser, lineParser, stackedBarParser, scatterPlotParser, boxPlotParser]
nameParser :: Parser (Maybe Text)
nameParser = fmap T.pack <$> optional (char '"' *> manyTill anyChar (char '"') <* char ':') <* space
| joshuaclayton/timeline | src/Timeline/Parser.hs | mit | 1,513 | 0 | 13 | 333 | 420 | 221 | 199 | 33 | 3 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.HTMLDivElement
(js_setAlign, setAlign, js_getAlign, getAlign, HTMLDivElement,
castToHTMLDivElement, gTypeHTMLDivElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe "$1[\"align\"] = $2;" js_setAlign
:: JSRef HTMLDivElement -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLDivElement.align Mozilla HTMLDivElement.align documentation>
setAlign ::
(MonadIO m, ToJSString val) => HTMLDivElement -> val -> m ()
setAlign self val
= liftIO (js_setAlign (unHTMLDivElement self) (toJSString val))
foreign import javascript unsafe "$1[\"align\"]" js_getAlign ::
JSRef HTMLDivElement -> IO JSString
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLDivElement.align Mozilla HTMLDivElement.align documentation>
getAlign ::
(MonadIO m, FromJSString result) => HTMLDivElement -> m result
getAlign self
= liftIO (fromJSString <$> (js_getAlign (unHTMLDivElement self))) | plow-technologies/ghcjs-dom | src/GHCJS/DOM/JSFFI/Generated/HTMLDivElement.hs | mit | 1,804 | 14 | 11 | 235 | 474 | 288 | 186 | 30 | 1 |
-----------------------------------------------------------------------------
-- $Id: HsPretty.hs,v 1.10 2001/12/17 03:38:54 bjpop Exp $
--
-- (c) The GHC Team, Noel Winstanley 1997-2000
--
-- Pretty printer for Haskell.
--
-----------------------------------------------------------------------------
module FrontEnd.HsPretty (PPLayout(..),PPHsMode(..),
render,
ppHsModule,
ppHsDecl,
ppHsDecls,
ppHsExp,
ppHsStmt,
ppHsPat,
ppHsAlt,
ppGAlt,
ppHsGuardedRhs
) where
import Data.Char
import qualified Text.PrettyPrint.HughesPJ as P
import Doc.DocLike(TextLike(..),DocLike(..))
import Doc.PPrint(pprint)
import FlagDump as FD
import FrontEnd.HsSyn
import FrontEnd.Rename(unRename)
import FrontEnd.SrcLoc(Located(..))
import Name.Name
import Name.Names
import Options
import qualified Doc.DocLike as DL
import qualified Doc.PPrint as P
infixl 5 $$$
-----------------------------------------------------------------------------
-- pretty printing monad
data PPLayout = PPOffsideRule -- classical layout
| PPSemiColon -- classical layout made explicit
| PPInLine -- inline decls, \n between them
| PPNoLayout -- everything on a single line
deriving Eq
type Indent = Int
data PPHsMode = PPHsMode {
classIndent, -- class, instance
doIndent,
caseIndent,
letIndent,
whereIndent :: Indent,
onsideIndent :: Indent,
spacing :: Bool, -- blank lines between statements?
layout :: PPLayout, -- to do
comments :: Bool -- to come later
}
defaultMode = PPHsMode{
classIndent = 8,
doIndent = 3,
caseIndent = 4,
letIndent = 4,
whereIndent = 6,
onsideIndent = 2,
spacing = True,
layout = PPOffsideRule,
comments = True
}
newtype DocM s a = DocM (s -> a)
instance Functor (DocM s) where
fmap f xs = do x <- xs; return (f x)
instance Monad (DocM s) where
(>>=) = thenDocM
(>>) = then_DocM
return = retDocM
{-# INLINE thenDocM #-}
{-# INLINE then_DocM #-}
{-# INLINE retDocM #-}
{-# INLINE unDocM #-}
{-# INLINE getPPEnv #-}
thenDocM m k = DocM $ (\s -> case unDocM m $ s of a -> unDocM (k a) $ s)
then_DocM m k = DocM $ (\s ->case unDocM m $ s of a -> unDocM k $ s)
retDocM a = DocM (\s -> a)
unDocM :: DocM s a -> (s -> a)
unDocM (DocM f) = f
-- all this extra stuff, just for this one function..
getPPEnv :: DocM s s
getPPEnv = DocM id
-- So that pp code still looks the same
-- this means we lose some generality though
type Doc = DocM PPHsMode P.Doc
-- The pretty printing combinators
nest :: Int -> Doc -> Doc
nest i m = m >>= return . P.nest i
dropAs (HsAsPat _ e) = e
dropAs e = e
-- Literals
instance DL.TextLike Doc where
empty = return P.empty
text = return . P.text
char = return . P.char
int :: Int -> Doc
int = return . P.int
integer :: Integer -> Doc
integer = return . P.integer
float :: Float -> Doc
float = return . P.float
double :: Double -> Doc
double = return . P.double
-- Simple Combining Forms
parens, brackets, braces :: Doc -> Doc
parens d = d >>= return . P.parens
parenszh d = d >>= \d' -> return $ P.text "(# " P.<> d' P.<> P.text " #)"
brackets d = d >>= return . P.brackets
braces d = d >>= return . P.braces
-- Constants
semi,comma,equals :: Doc
semi = return P.semi
comma = return P.comma
equals = return P.equals
-- Combinators
--
instance DocLike Doc where
aM <> bM = do{a<-aM;b<-bM;return (a P.<> b)}
aM <+> bM = do{a<-aM;b<-bM;return (a P.<+> b)}
aM <$> bM = do{a<-aM;b<-bM;return (a P.$$ b)}
hcat dl = sequence dl >>= return . P.hcat
hsep dl = sequence dl >>= return . P.hsep
vcat dl = sequence dl >>= return . P.vcat
($$) :: Doc -> Doc -> Doc
aM $$ bM = do{a<-aM;b<-bM;return (a P.$$ b)}
fsep :: [Doc] -> Doc
fsep dl = sequence dl >>= return . P.fsep
-- Yuk, had to cut-n-paste this one from Pretty.hs
punctuate :: Doc -> [Doc] -> [Doc]
punctuate p [] = []
punctuate p (d:ds) = go d ds
where
go d [] = [d]
go d (e:es) = (d <> p) : go e es
-- this is the equivalent of runM now.
renderWithMode :: PPHsMode -> Doc -> String
renderWithMode ppMode d = P.render . unDocM d $ ppMode
render :: Doc -> String
render = renderWithMode defaultMode
------------------------- Pretty-Print a Module --------------------
ppHsModule :: HsModule -> Doc
ppHsModule (HsModule mod _ mbExports imp decls _ _) =
topLevel (ppHsModuleHeader mod mbExports)
(map ppHsImportDecl imp ++ map ppHsDecl decls)
ppHsDecls :: [HsDecl] -> Doc
ppHsDecls ds = vcat $ map ppHsDecl ds
-------------------------- Module Header ------------------------------
ppHsModuleHeader :: Module -> Maybe [HsExportSpec] -> Doc
ppHsModuleHeader modName mbExportList = mySep [
text "module",
text $ show modName,
maybePP (parenList . map ppHsExportSpec) mbExportList,
text "where"]
ppHsExportSpec :: HsExportSpec -> Doc
ppHsExportSpec e = f e where
f (HsEVar name) = ppHsQNameParen name
f (HsEAbs name) = ppHsQName name
f (HsEThingAll name) = ppHsQName name <> text"(..)"
f (HsEThingWith name nameList) = ppHsQName name <> (parenList . map ppHsQNameParen $ nameList)
f (HsEModuleContents (show -> name)) = text "module" <+> text name
f (HsEQualified ClassName e) = text "class" <+> ppHsExportSpec e
f (HsEQualified SortName e) = text "kind" <+> ppHsExportSpec e
f (HsEQualified TypeConstructor e) = text "type" <+> ppHsExportSpec e
f (HsEQualified DataConstructor e) = text "data" <+> ppHsExportSpec e
f (HsEQualified n e) = tshow n <+> ppHsExportSpec e
tshow x = text (show x)
ppHsImportDecl (HsImportDecl pos (show -> mod) bool mbName mbSpecs) =
mySep [text "import",
if bool then text "qualified" else empty,
text mod,
maybePP (\(show -> n) -> text "as" <+> text n) mbName,
maybePP exports mbSpecs]
where
exports (b,specList)
| b = text "hiding" <+> (parenList . map ppHsExportSpec $ specList)
| otherwise = parenList . map ppHsExportSpec $ specList
ppHsTName (n,Nothing) = ppHsName n
ppHsTName (n,Just t) = parens (ppHsName n <+> text "::" <+> ppHsType t)
------------------------- Declarations ------------------------------
ppHsRule prules@HsRule {} = text (show (hsRuleString prules)) <+> text "forall" <+> vars <+> text "." $$ nest 4 rest where
vars = hsep (map ppHsTName $ hsRuleFreeVars prules)
rest = ppHsExp (hsRuleLeftExpr prules) <+> text "=" <+> ppHsExp (hsRuleRightExpr prules)
ppClassHead :: HsClassHead -> Doc
ppClassHead (HsClassHead c n ts) = ans c where
ans [] = f n ts
ans c = ppHsContext c <+> text "=>" <+> f n ts
f n ts = ppHsType (foldl HsTyApp (HsTyCon n) ts)
ppHsDecl :: HsDecl -> Doc
ppHsDecl (HsActionDecl _ p e) = ppHsPat p <+> text "<-" <+> ppHsExp e
ppHsDecl (HsDeclDeriving _ e) = text "derive instance" <+> ppClassHead e
ppHsDecl (HsPragmaRules rs@(HsRule { hsRuleIsMeta = False }:_)) = text "{-# RULES" $$ nest 4 (myVcat (map ppHsRule rs)) $$ text "#-}"
ppHsDecl (HsPragmaRules rs@(HsRule { hsRuleIsMeta = True }:_)) = text "{-# METARULES" $$ nest 4 (myVcat (map ppHsRule rs)) $$ text "#-}"
--ppHsDecl prules@HsPragmaRules {} = text ("{-# RULES " ++ show (hsDeclString prules)) <+> text "forall" <+> vars <+> text "." $$ nest 4 rest $$ text "#-}" where
-- vars = hsep (map ppHsTName $ hsDeclFreeVars prules)
-- rest = ppHsExp (hsDeclLeftExpr prules) <+> text "=" <+> ppHsExp (hsDeclRightExpr prules)
ppHsDecl prules@HsPragmaSpecialize {} = text "{-# SPECIALIZE ... #-}" -- ++ show (hsDeclString prules)) <+> text "forall" <+> vars <+> text "." $$ nest 4 rest $$ text "#-}" where
-- vars = hsep (map ppHsTName $ hsDeclFreeVars prules)
-- rest = ppHsExp (hsDeclLeftExpr prules) <+> text "=" <+> ppHsExp (hsDeclRightExpr prules)
ppHsDecl fd@(HsForeignDecl _ _ n qt) = text "ForeignDecl" <+> ppHsName n <+> ppHsQualType qt <+> text (show fd)
ppHsDecl fd@(HsForeignExport _ _ n qt) = text "ForeignExport" <+> ppHsName n <+> ppHsQualType qt <+> text (show fd)
ppHsDecl (HsTypeDecl loc name nameList htype) =
--blankline $
mySep ( [text "type",ppHsName name]
++ map ppHsType nameList
++ [equals, ppHsType htype])
ppHsDecl HsDataDecl { .. } = ans where
ans = mySep ([declType, ppHsContext hsDeclContext, ppHsName hsDeclName]
++ map ppHsName hsDeclArgs)
<+> (myVcat (zipWith (<+>) (equals : repeat (char '|'))
(map ppHsConstr hsDeclCons))
$$$ ppHsDeriving hsDeclDerives)
declType = case hsDeclDeclType of
DeclTypeKind -> text "data kind"
DeclTypeData -> text "data"
DeclTypeNewtype -> text "newtype"
-- special case for empty class declaration
ppHsDecl (HsClassDecl pos qualType []) =
--blankline $
mySep [text "class", ppClassHead qualType]
ppHsDecl (HsClassDecl pos qualType declList) =
--blankline $
mySep [text "class", ppClassHead qualType, text "where"]
$$$ body classIndent (map ppHsDecl declList)
ppHsDecl (HsClassAliasDecl pos name args context classes declList) =
--blankline $
mySep ([text "class alias", ppHsName name] ++ map ppHsType args
++ [equals, ppHsContext context, text "=>", ppHsContext classes, text "where"])
$$$ body classIndent (map ppHsDecl declList)
-- m{spacing=False}
-- special case for empty instance declaration
ppHsDecl (HsInstDecl pos qualType []) =
--blankline $
mySep [text "instance", ppClassHead qualType]
ppHsDecl (HsInstDecl pos qualType declList) =
--blankline $
mySep [text "instance", ppClassHead qualType, text "where"]
$$$ body classIndent (map ppHsDecl declList)
ppHsDecl (HsDefaultDecl pos htype) =
--blankline $
text "default" <+> ppHsType htype
ppHsDecl (HsTypeSig pos nameList qualType) =
--blankline $
mySep ((punctuate comma . map ppHsNameParen $ nameList)
++ [text "::", ppHsQualType qualType])
{-
ppHsDecl (HsFunBind pos matches)
= foldr ($$$) empty (map ppMatch matches)
-}
ppHsDecl (HsFunBind matches)
= foldr ($$$) empty (map ppMatch matches)
ppHsDecl (HsPatBind pos pat rhs whereDecls)
= myFsep [ppHsPatOrOp pat, ppHsRhs rhs] $$$ ppWhere whereDecls
where
-- special case for single operators
ppHsPatOrOp (HsPVar n) = ppHsNameParen n
ppHsPatOrOp p = ppHsPat p
ppHsDecl (HsInfixDecl pos assoc prec nameList) =
--blankline $
mySep ([ppAssoc assoc, int prec]
++ (punctuate comma . map ppHsNameInfix $ nameList))
where
ppAssoc HsAssocNone = text "infix"
ppAssoc HsAssocLeft = text "infixl"
ppAssoc HsAssocRight = text "infixr"
ppAssoc HsAssocPrefix = text "prefix"
ppAssoc HsAssocPrefixy = text "prefixy"
ppHsDecl (HsPragmaProps _ w ns) = text "{-# " <> text w <+> mySep (punctuate comma . map ppHsNameParen $ ns) <+> text "#-}"
ppHsDecl _ = error "ppHsDecl: unknown construct"
ppMatch (HsMatch pos f ps rhs whereDecls)
= myFsep (ppHsQNameParen f : map parenPrec ps ++ [ppHsRhs rhs])
$$$ ppWhere whereDecls
ppWhere [] = empty
ppWhere l = nest 2 (text "where" $$$ body whereIndent (map ppHsDecl l))
------------------------- Data & Newtype Bodies -------------------------
mprintExists :: HsConDecl -> Doc
mprintExists hcd = case hsConDeclExists hcd of
[] -> empty
vs -> text "exists" <+> hsep (map (return . pprint) vs) <+> char '.'
ppHsConstr :: HsConDecl -> Doc
ppHsConstr cd@HsRecDecl { hsConDeclName = name, hsConDeclRecArg = fieldList } =
mprintExists cd <+> ppHsName name
<> (braceList . map ppField $ fieldList)
ppHsConstr cd@HsConDecl { hsConDeclName = name, hsConDeclConArg = typeList}
| isSymbolName name && length typeList == 2 =
let [l, r] = typeList in
mprintExists cd <+> myFsep [ppHsBangType l, ppHsName name, ppHsBangType r]
| otherwise = mprintExists cd <+> (mySep $ (ppHsName name) :
map ppHsBangType typeList)
ppField :: ([HsName],HsBangType) -> Doc
ppField (names, ty) = myFsepSimple $ (punctuate comma . map ppHsName $ names) ++
[text "::", ppHsBangType ty]
ppHsBangType :: HsBangType -> Doc
ppHsBangType (HsBangedTy ty) = char '!' <> ppHsTypeArg ty
ppHsBangType (HsUnBangedTy ty) = ppHsTypeArg ty
ppHsDeriving :: [HsName] -> Doc
ppHsDeriving [] = empty
ppHsDeriving [d] = text "deriving" <+> ppHsQName d
ppHsDeriving ds = text "deriving" <+> parenList (map ppHsQName ds)
------------------------- Types -------------------------
ppHsQualType :: HsQualType -> Doc
ppHsQualType (HsQualType [] htype) = ppHsType htype
ppHsQualType (HsQualType context htype) = -- if it's HsQualType, context is never empty
myFsep [ ppHsContext context, text "=>", ppHsType htype]
parensIf :: Bool -> Doc -> Doc
parensIf True = parens
parensIf False = id
instance P.PPrint Doc HsType where
pprint = ppHsType
ppHsType :: HsType -> Doc
ppHsType = ppHsTypePrec 0
ppHsTypeArg :: HsType -> Doc
ppHsTypeArg = ppHsTypePrec 2
-- precedences:
-- 0: top level
-- 1: left argument of ->
-- 2: argument of constructor
ppHsTypePrec :: Int -> HsType -> Doc
ppHsTypePrec p (HsTyFun a b) =
parensIf (p > 0) $
myFsep [ppHsTypePrec 1 a, text "->", ppHsType b]
ppHsTypePrec p (HsTyAssoc) = text "<assoc>"
ppHsTypePrec p (HsTyEq a b) =
parensIf (p > 0) $ myFsep [ppHsType a, text "=", ppHsType b]
ppHsTypePrec p (HsTyTuple l) = parenList . map ppHsType $ l
ppHsTypePrec p (HsTyUnboxedTuple l) = parenListzh . map ppHsType $ l
-- special case
ppHsTypePrec p (HsTyApp (HsTyCon lcons) b ) | lcons == nameName tc_List = brackets $ ppHsType b
ppHsTypePrec p (HsTyApp a b) =
parensIf (p > 1) $ myFsep[ppHsType a, ppHsTypeArg b]
ppHsTypePrec p (HsTyVar name) = ppHsName name
-- special case
ppHsTypePrec p (HsTyCon name) = ppHsQName name
ppHsTypePrec p HsTyForall { hsTypeVars = vs, hsTypeType = qt } = parensIf (p > 1) $ do
pp <- ppHsQualType qt
return $ DL.text "forall" DL.<+> DL.hsep (map pprint vs) DL.<+> DL.char '.' DL.<+> pp
ppHsTypePrec p HsTyExists { hsTypeVars = vs, hsTypeType = qt } = parensIf (p > 1) $ do
pp <- ppHsQualType qt
return $ DL.text "exists" DL.<+> DL.hsep (map pprint vs) DL.<+> DL.char '.' DL.<+> pp
ppHsTypePrec _ HsTyExpKind { hsTyLType = Located _ t, hsTyKind = k } = do
t <- ppHsType t
return $ DL.parens ( t DL.<+> DL.text "::" DL.<+> pprint k)
ppHsTypePrec _ _ = error "HsPretty.ppHsTypePrec: bad."
instance DL.DocLike d => P.PPrint d HsKind where
pprint (HsKind k) = pprint k
pprint (HsKindFn (HsKind k) t) = pprint k DL.<+> DL.text "->" DL.<+> pprint t
pprint (HsKindFn a b) = DL.parens (pprint a) DL.<+> DL.text "->" DL.<+> pprint b
------------------------- Expressions -------------------------
ppHsRhs :: HsRhs -> Doc
ppHsRhs (HsUnGuardedRhs exp) = equals <+> ppHsExp exp
ppHsRhs (HsGuardedRhss guardList) =
myVcat . map (ppHsGuardedRhs equals) $ guardList
ppHsGuardedRhs :: Doc -> HsComp -> Doc
ppHsGuardedRhs equals (HsComp pos guard body) =
myFsep [ char '|',
hsep $ punctuate comma $ map ppHsStmt guard,
equals,
ppHsExp body]
{-# NOINLINE ppHsLit #-}
ppHsLit :: HsLiteral -> Doc
ppHsLit (HsInt i) = integer i
ppHsLit (HsChar c) = text (show c)
ppHsLit (HsString s) = text (show s)
ppHsLit (HsFrac r) = double (fromRational r)
-- GHC unboxed literals:
ppHsLit (HsCharPrim c) = text (show c) <> char '#'
ppHsLit (HsStringPrim s) = text (show s) <> char '#'
ppHsLit (HsIntPrim i) = integer i <> char '#'
ppHsLit (HsFloatPrim r) = float (fromRational r) <> char '#'
ppHsLit (HsDoublePrim r) = double (fromRational r) <> text "##"
-- GHC extension:
ppHsLit (HsLitLit s) = text "''" <> text s <> text "''"
{-# NOINLINE ppHsExp #-}
ppHsExp :: HsExp -> Doc
ppHsExp (HsLit l) = ppHsLit l
-- lambda stuff
ppHsExp (HsInfixApp a op b) = myFsep[mpifx a, ppInfix op, mpifx b]
where
mpifx x@HsInfixApp {} = ppHsExp $ HsParen x
mpifx x = ppHsExp x
ppInfix (HsAsPat as (HsVar n)) | dump FD.Aspats = ppHsName as <> char '@' <> ppHsQNameInfix n
ppInfix (HsAsPat _ (HsVar n)) = ppHsQNameInfix n
ppInfix (HsAsPat as (HsCon n)) | dump FD.Aspats = ppHsName as <> char '@' <> ppHsQNameInfix n
ppInfix (HsAsPat _ (HsCon n)) = ppHsQNameInfix n
ppInfix (HsVar n) = ppHsQNameInfix n
ppInfix (HsCon n) = ppHsQNameInfix n
ppInfix n = error $ "illegal infix expression: " ++ show n
ppHsExp (HsNegApp e) = myFsep [char '-', ppHsExp e]
ppHsExp (HsApp a b) = myFsep [ppHsExp a, ppHsExp b]
ppHsExp HsError { hsExpString = msg } = text $ "<error:" ++ msg ++ ">"
-- ppHsExp (HsLambda expList body) = myFsep $
ppHsExp (HsLambda _srcLoc expList body) = myFsep $ -- srcLoc added by Bernie
(((char '\\' ):) . map ppHsPat $ expList)
++ [text "->", ppHsExp body]
-- keywords
ppHsExp (HsLet expList letBody) =
myFsep [text "let" <+> body letIndent (map ppHsDecl expList),
text "in", ppHsExp letBody]
ppHsExp (HsIf cond thenexp elsexp) =
myFsep [text "if", ppHsExp cond,
text "then", ppHsExp thenexp,
text "else", ppHsExp elsexp]
ppHsExp (HsCase cond altList) = myFsep[text "case", ppHsExp cond, text "of"]
$$$ body caseIndent (map ppHsAlt altList)
ppHsExp (HsDo stmtList) = text "do" $$$ body doIndent (map ppHsStmt stmtList)
-- Constructors & Vars
ppHsExp (HsVar name ) = ppHsQNameParen name
ppHsExp (HsCon name) = ppHsQNameParen name
ppHsExp (HsTuple expList) = parenList . map ppHsExp $ expList
ppHsExp (HsUnboxedTuple expList) = parenListzh . map ppHsExp $ expList
ppHsExp (HsParen exp) = parens . ppHsExp $ exp
-- TODO arguments swapped
ppHsExp (HsLeftSection v exp) | (HsVar name) <- dropAs v =
parens (ppHsExp exp <+> ppHsQNameInfix name)
ppHsExp (HsLeftSection v exp) | (HsCon name) <- dropAs v =
parens (ppHsExp exp <+> ppHsQNameInfix name)
--ppHsExp (HsLeftSection _ _) = error "illegal left section"
ppHsExp (HsRightSection exp v) | (HsVar name) <- dropAs v =
parens (ppHsQNameInfix name <+> ppHsExp exp)
ppHsExp (HsRightSection exp v) | (HsCon name) <- dropAs v =
parens (ppHsQNameInfix name <+> ppHsExp exp)
--ppHsExp (HsRightSection _ _) = error "illegal right section"
ppHsExp (HsRecConstr c fieldList) =
ppHsQName c
<> (braceList . map ppHsFieldUpdate $ fieldList)
ppHsExp (HsRecUpdate exp fieldList) =
ppHsExp exp
<> (braceList . map ppHsFieldUpdate $ fieldList)
-- patterns
-- special case that would otherwise be buggy
ppHsExp (HsAsPat _ p) | not (dump FD.Aspats) = ppHsExp p
ppHsExp (HsAsPat name (HsIrrPat (Located _ exp))) =
myFsep[ppHsName name <> char '@', char '~' <> ppHsExp exp]
ppHsExp (HsAsPat name exp) = hcat[ppHsName name,char '@',ppHsExp exp]
ppHsExp (HsWildCard _) = char '_'
ppHsExp (HsIrrPat (Located _ exp)) = char '~' <> ppHsExp exp
ppHsExp (HsBangPat (Located _ exp)) = char '!' <> ppHsExp exp
-- Lists
ppHsExp (HsList list) =
bracketList . punctuate comma . map ppHsExp $ list
ppHsExp (HsEnumFrom exp) =
bracketList [ppHsExp exp,text ".."]
ppHsExp (HsEnumFromTo from to) =
bracketList [ppHsExp from, text "..", ppHsExp to]
ppHsExp (HsEnumFromThen from thenE) =
bracketList [ppHsExp from <> comma, ppHsExp thenE]
ppHsExp (HsEnumFromThenTo from thenE to) =
bracketList [ppHsExp from <> comma, ppHsExp thenE,
text "..", ppHsExp to]
ppHsExp (HsListComp (HsComp _ stmtList exp)) =
bracketList ([ppHsExp exp, char '|']
++ (punctuate comma . map ppHsStmt $ stmtList))
ppHsExp (HsExpTypeSig pos exp ty) =
myFsep[ppHsExp exp, text "::", ppHsQualType ty]
ppHsExp (HsLocatedExp (Located _ x)) = ppHsExp x
ppHsExp (HsBackTick e) = char '`' <> ppHsExp e <> char '`'
ppHsExp HsWords { .. } = char '«' <> hsep (map ppHsExp hsExpExps) <> char '»'
ppHsExp e = text $ show e
------------------------- Patterns -----------------------------
ppHsPat :: HsPat -> Doc
ppHsPat (HsPVar name) = ppHsNameParen name
ppHsPat (HsPLit lit) = ppHsLit lit
ppHsPat (HsPNeg p) = myFsep [char '-', parenPrec p]
ppHsPat (HsPInfixApp a op b) = myFsep[ppHsPat a, ppHsQNameInfix op, ppHsPat b]
ppHsPat (HsPApp n ps) = myFsep (ppHsQName n : map parenPrec ps)
ppHsPat (HsPTuple ps) = parenList . map ppHsPat $ ps
ppHsPat (HsPUnboxedTuple ps) = parenListzh . map ppHsPat $ ps
ppHsPat (HsPList ps) = bracketList . punctuate comma . map ppHsPat $ ps
ppHsPat (HsPParen p) = parens . ppHsPat $ p
ppHsPat (HsPRec c fields)
= ppHsQName c
<> (braceList . map ppHsPatField $ fields)
-- special case that would otherwise be buggy
ppHsPat (HsPAsPat name (HsPIrrPat (Located _ pat))) =
myFsep[ppHsName name <> char '@', char '~' <> parenPrec pat]
ppHsPat (HsPAsPat name pat) = hcat[ppHsName name,char '@',parenPrec pat]
ppHsPat HsPWildCard = char '_'
ppHsPat (HsPIrrPat (Located _ pat)) = char '~' <> parenPrec pat
ppHsPat (HsPBangPat (Located _ pat)) = char '!' <> parenPrec pat
ppHsPat (HsPatExp e) = ppHsExp e
ppHsPat (HsPTypeSig _ p qt) = parens $ ppHsPat p <+> text "::" <+> ppHsQualType qt
ppHsPat (HsPatWords ws) = char '«' <> hsep (map parenPrec ws) <> char '»'
ppHsPat (HsPatBackTick bt) = char '`' <> ppHsPat bt <> char '`'
parenPrec p = if f p then char '‹' <> ppHsPat p <> char '›' else ppHsPat p where
f HsPParen {} = False
f HsPUnboxedTuple {} = False
f HsPList {} = False
f HsPatWords {} = False
f HsPatBackTick {} = False
f HsPWildCard {} = False
f HsPVar {} = False
f HsPLit {} = False
f (HsPApp _ []) = False
f HsPTuple {} = False
f _ = True
ppHsPatField (HsField name pat) = myFsep[ppHsQName name, equals, ppHsPat pat]
------------------------- Case bodies -------------------------
ppHsAlt :: HsAlt -> Doc
ppHsAlt (HsAlt pos exp gAlts decls) =
ppHsPat exp <+> ppGAlts gAlts $$$ ppWhere decls
ppGAlts :: HsRhs -> Doc
ppGAlts (HsUnGuardedRhs exp) = text "->" <+> ppHsExp exp
ppGAlts (HsGuardedRhss altList) = myVcat . map ppGAlt $ altList
ppGAlt c = ppHsGuardedRhs (text "->") c
------------------------- Statements in monads & list comprehensions -----
ppHsStmt :: HsStmt -> Doc
ppHsStmt (HsGenerator _sloc exp from) = -- sloc added by Bernie
ppHsPat exp <+> text "<-" <+> ppHsExp from
ppHsStmt (HsQualifier exp) = ppHsExp exp
ppHsStmt (HsLetStmt declList) = text "let"
$$$ body letIndent (map ppHsDecl declList)
------------------------- Record updates
ppHsFieldUpdate :: HsFieldUpdate -> Doc
ppHsFieldUpdate (HsField name exp) =
myFsep[ppHsQName name,equals,ppHsExp exp]
------------------------- Names -------------------------
ppHsQName :: HsName -> Doc
ppHsQName n = text $ show n
--ppHsQName (UnQual name) = ppHsIdentifier name
--ppHsQName z@(Qual m@(Module mod) name)
-- | otherwise = text mod <> char '.' <> ppHsIdentifier name
ppHsName = ppHsQName
ppHsQNameParen :: HsName -> Doc
ppHsQNameParen name = parensIf (isSymbolName name) (ppHsQName name)
ppHsQNameInfix :: HsName -> Doc
ppHsQNameInfix name
| isSymbolName name = ppHsQName name
| otherwise = char '`' <> ppHsQName name <> char '`'
--ppHsIdentifier :: HsIdentifier -> Doc
--ppHsIdentifier name = text (show name)
ppHsNameParen :: HsName -> Doc
ppHsNameParen name = parensIf (isSymbolName name) (ppHsName name)
ppHsNameInfix :: HsName -> Doc
ppHsNameInfix name
| isSymbolName name = ppHsName name
| otherwise = char '`' <> ppHsName name <> char '`'
isSymbolName :: HsName -> Bool
--isSymbolName (Qual _ (HsSymbol _)) = True
--isSymbolName (UnQual (HsSymbol _)) = True
isSymbolName x | (_,_,c:_) <- nameParts (unRename x), isAlpha c || c `elem` "'_" = False
isSymbolName _ = True
ppHsContext :: HsContext -> Doc
ppHsContext [] = empty
ppHsContext context = parenList (map ppHsAsst context)
-- hacked for multi-parameter type classes
ppHsAsst :: HsAsst -> Doc
--ppHsAsst (a,ts) = myFsep(ppHsQName a : map ppHsTypeArg ts)
ppHsAsst (HsAsst a ts) = myFsep(ppHsQName a : map ppHsName ts)
ppHsAsst (HsAsstEq a b) = ppHsType a <+> char '=' <+> ppHsType b
------------------------- pp utils -------------------------
maybePP :: (a -> Doc) -> Maybe a -> Doc
maybePP pp Nothing = empty
maybePP pp (Just a) = pp a
parenList :: [Doc] -> Doc
parenList = parens . myFsepSimple . punctuate comma
parenListzh :: [Doc] -> Doc
parenListzh = parenszh . myFsepSimple . punctuate comma
braceList :: [Doc] -> Doc
braceList = braces . myFsepSimple . punctuate comma
bracketList :: [Doc] -> Doc
bracketList = brackets . myFsepSimple
-- Monadic PP Combinators -- these examine the env
topLevel :: Doc -> [Doc] -> Doc
topLevel header dl = do
e <- fmap layout getPPEnv
case e of
PPOffsideRule -> header $$ vcat dl
PPSemiColon -> header $$ (braces . vcat . punctuate semi) dl
PPInLine -> header $$ (braces . vcat . punctuate semi) dl
PPNoLayout -> header <+> (braces . hsep . punctuate semi) dl
body :: (PPHsMode -> Int) -> [Doc] -> Doc
body f dl = do
e <- fmap layout getPPEnv
case e of PPOffsideRule -> indent
PPSemiColon -> indentExplicit
_ -> inline
where
inline = braces . hsep . punctuate semi $ dl
indent = do{i <-fmap f getPPEnv;nest i . vcat $ dl}
indentExplicit = do {i <- fmap f getPPEnv;
nest i . braces . vcat . punctuate semi $ dl}
($$$) :: Doc -> Doc -> Doc
a $$$ b = layoutChoice (a $$) (a <+>) b
mySep :: [Doc] -> Doc
mySep = layoutChoice mySep' hsep
where
-- ensure paragraph fills with indentation.
mySep' [x] = x
mySep' (x:xs) = x <+> fsep xs
mySep' [] = error "Internal error: mySep"
myVcat :: [Doc] -> Doc
myVcat = layoutChoice vcat hsep
myFsepSimple :: [Doc] -> Doc
myFsepSimple = layoutChoice fsep hsep
-- same, except that continuation lines are indented,
-- which is necessary to avoid triggering the offside rule.
myFsep :: [Doc] -> Doc
myFsep = layoutChoice fsep' hsep
where fsep' [] = empty
fsep' (d:ds) = do
e <- getPPEnv
let n = onsideIndent e
nest n (fsep (nest (-n) d:ds))
layoutChoice a b dl = do e <- getPPEnv
if layout e == PPOffsideRule ||
layout e == PPSemiColon
then a dl else b dl
instance P.PPrint P.Doc HsDecl where
pprint d = unDocM (ppHsDecl d) defaultMode
instance P.PPrint P.Doc HsExp where
pprint d = unDocM (ppHsExp d) defaultMode
instance P.PPrint P.Doc HsType where
pprint d = unDocM (ppHsType d) defaultMode
instance P.PPrint P.Doc HsQualType where
pprint d = unDocM (ppHsQualType d) defaultMode
instance P.PPrint P.Doc HsTyVarBind where
pprint d = P.text (show $ hsTyVarBindName d)
instance P.PPrint P.Doc HsPat where
pprint d = unDocM (ppHsPat d) defaultMode
| m-alvarez/jhc | src/FrontEnd/HsPretty.hs | mit | 26,684 | 208 | 18 | 5,785 | 9,633 | 4,819 | 4,814 | -1 | -1 |
{-# htermination fmap :: Functor f => (a -> b) -> (f a -> f b) #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_fmap_1.hs | mit | 67 | 0 | 2 | 17 | 3 | 2 | 1 | 1 | 0 |
module Spelling (TrainingDict, nWords, correct) where
import qualified Data.ByteString.Char8 as B
import Data.Char (isAlpha, toLower)
import Data.List (foldl', maximumBy)
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Data.Function (on)
import Paths_Norvigs_Spelling_Corrector (getDataFileName)
type WordSet = S.Set B.ByteString
type TrainingDict = M.Map B.ByteString Int
alphabet :: String
alphabet = ['a'..'z']
nWords :: IO TrainingDict
nWords = do
ws <- getDataFileName "big.txt" >>= B.readFile
return $ (train . lowerWords) ws
lowerWords :: B.ByteString -> [B.ByteString]
lowerWords = B.words . B.map normalize
where normalize :: Char -> Char
normalize c = if isAlpha c then toLower c else ' '
train :: [B.ByteString] -> TrainingDict
train = foldl' (\acc x -> M.insertWith (+) x 1 acc) M.empty
edits1 :: B.ByteString -> WordSet
edits1 w = S.fromList $ deletes ++ transposes ++ replaces ++ inserts
where splits :: [(B.ByteString, B.ByteString)]
splits = [ B.splitAt n w | n <- [0 .. B.length w - 1] ]
deletes :: [B.ByteString]
deletes = map (\(a, b) -> B.concat[a, B.tail b]) splits
transposes :: [B.ByteString]
transposes = [ B.concat [a, B.tail b, b, B.drop 2 b]
| (a, b) <- splits ]
replaces :: [B.ByteString]
replaces = [ B.concat [a, B.singleton c, B.tail b]
| (a, b) <- splits, c <- alphabet]
inserts :: [B.ByteString]
inserts = [ B.concat [a, B.singleton c, b]
| (a,b) <- splits, c <- alphabet]
edits2 :: B.ByteString -> WordSet
edits2 = S.unions . S.toList . S.map edits1 . edits1
knownEdits2 :: B.ByteString -> TrainingDict -> WordSet
knownEdits2 w nwords = edits2 w `S.intersection` M.keysSet nwords
known :: WordSet -> TrainingDict -> WordSet
known inputSet nwords = inputSet `S.intersection` M.keysSet nwords
choices :: B.ByteString -> TrainingDict -> WordSet
choices w ws = foldr orNextIfEmpty (S.singleton w)
[ known (S.singleton w) ws
, known (edits1 w) ws
, knownEdits2 w ws
]
where orNextIfEmpty x y = if S.null x then y else x
chooseBest :: WordSet -> TrainingDict -> B.ByteString
chooseBest ch ws = maximumBy (compare `on` (\w -> M.findWithDefault 0 w ws)) (S.toList ch)
correct :: TrainingDict -> B.ByteString -> B.ByteString
correct ws w = chooseBest (choices w ws) ws
| MarcoSero/Norvigs-Spelling-Corrector | src/Spelling.hs | mit | 2,555 | 0 | 13 | 685 | 938 | 510 | 428 | 53 | 2 |
module PPL2.Pretty.AProg where
import PPL2.Prelude
import PPL2.VM.Types (AProg, ACode, AInstr)
import PPL2.Pretty.Instr (prettyInstr, fillLeft)
import PPL2.Pretty.MProg (prettyData)
import PPL2.System.Types (MonadCompile)
-- ----------------------------------------
prettyAProg :: (Show v) => (ACode, [v]) -> String
prettyAProg (acode, adata) =
unlines $
[ "code segment"
, "============"
, ""
] ++
prettyACode acode ++
[ ""
, "data segment"
, "============"
, ""
, prettyData adata -- preliminary
, ""
]
prettyACode :: ACode -> [String]
prettyACode is =
map pretty' is
where
pretty' :: AInstr -> String
pretty' =
prettyInstr indent' id prettyJmp' prettyLab'
indent' = (fillLeft 8 "" ++)
prettyJmp' = (:[])
prettyLab' l = l ++ ":"
-- ----------------------------------------
| UweSchmidt/ppl2 | src/PPL2/Pretty/AProg.hs | mit | 849 | 0 | 8 | 181 | 241 | 142 | 99 | 29 | 1 |
module FullSubsetsSpec (spec) where
import Test.Hspec
import DecisionTrees.Utils
import qualified Data.Set as Set
spec :: Spec
spec = describe "DecisionTrees.Utils.fullSubsets" $
it "should return a set of possible set splittings" $ do
example $ fullSubsets (Set.fromList [1..2]) `shouldBe` Set.fromList
[ Set.singleton $ Set.fromList [1..2]
, Set.fromList $ map Set.singleton [1..2]
]
example $ fullSubsets (Set.fromList [1..3]) `shouldBe` Set.fromList
[ Set.singleton $ Set.fromList [1..3]
, Set.fromList [ Set.singleton 1, Set.fromList [2, 3] ]
, Set.fromList [ Set.singleton 2, Set.fromList [1, 3] ]
, Set.fromList [ Set.singleton 3, Set.fromList [2, 1] ]
, Set.fromList $ map Set.singleton [1..3]
]
example $ fullSubsets (Set.fromList [1..4]) `shouldBe` Set.fromList
[ Set.singleton $ Set.fromList [1..4]
, Set.fromList [ Set.singleton 1, Set.fromList [2, 3, 4] ]
, Set.fromList [ Set.singleton 1, Set.singleton 2, Set.fromList [3, 4] ]
, Set.fromList [ Set.singleton 1, Set.singleton 3, Set.fromList [2, 4] ]
, Set.fromList [ Set.singleton 1, Set.singleton 4, Set.fromList [2, 3] ]
, Set.fromList [ Set.singleton 2, Set.fromList [1, 3, 4] ]
, Set.fromList [ Set.singleton 2, Set.singleton 3, Set.fromList [1, 4] ]
, Set.fromList [ Set.singleton 2, Set.singleton 4, Set.fromList [1, 3] ]
, Set.fromList [ Set.singleton 3, Set.fromList [1, 2, 4] ]
, Set.fromList [ Set.singleton 3, Set.singleton 4, Set.fromList [1, 2] ]
, Set.fromList [ Set.singleton 4, Set.fromList [1, 2, 3] ]
, Set.fromList $ map Set.singleton [1..4]
]
| fehu/min-dat--decision-trees | test/FullSubsetsSpec.hs | mit | 1,928 | 0 | 14 | 622 | 723 | 384 | 339 | 29 | 1 |
module Control.Concurrent.Actor.Tests where
import Control.Concurrent.Actor hiding ( receive, spawnReceive )
import Control.Concurrent.Actor.Debug
-- -----------------------------------------------------------------------------
-- * @receive@ is non busy
testReceive1 :: IO ()
testReceive1 = do
act <- spawnReceive $
\msg -> case msg of
"ok?" -> putStrLn "ok"
_ -> putStrLn "nothing"
act ! "ok?"
act ! "ok?"
act ! "what?"
return ()
-- > testReceive1
-- ThreadId 39: receiving...
-- ok
-- ThreadId 39: receiving...
-- ok
-- ThreadId 39: receiving...
-- nothing
-- ThreadId 39: receiving...
-- Thus, the @receive@ function don't perform busy waiting.
-- -----------------------------------------------------------------------------
-- * @tolerant@ handle exceptions
testTolerant1 :: IO ()
testTolerant1 = do
act <- spawnReceive $
\msg -> tolerant $ if msg then putStrLn "ok" else putStrLn $ tail []
act ! False
act ! True
act ! True
return ()
-- > testTolerant1
-- ThreadId 31: receiving...
-- ThreadId 31: receiving...
-- ok
-- ThreadId 31: receiving...
-- ok
-- ThreadId 31: receiving...
| treep/hactors | Control/Concurrent/Actor/Tests.hs | mit | 1,141 | 0 | 14 | 208 | 216 | 119 | 97 | 21 | 2 |
module Main.Command.Help ( command ) where
import System.Environment ( getProgName )
import System.IO
import Text.Printf ( printf )
import Main.Command as Cmd
command :: [Cmd.Command] -> Cmd.Command
command cs = Cmd.Command (thisAction cs) "help" "Show usage for a particular command." (helpLines cs)
-- |The help command
thisAction :: [Cmd.Command] -> [String] -> IO Bool
thisAction cs args = case length args of
0 -> printUsage cs >> return True
1 -> helpSpecificCmd cs (head args)
2 -> do
pn <- getProgName
hPutStrLn stderr $ printf "%s: help: multiple arguments passed to help command" pn
return False
helpLines :: [Cmd.Command] -> [String]
helpLines _ = [
"Provide help on a specific command or list available commands."
]
-- |Given the program name and set of available commands, generate a string
-- giving top-level usage information.
usage :: [Cmd.Command] -> String -> String
usage cs pn = unlines $
printf "Usage: %s %s [<command>]" pn (name $ command cs) : (full $ command cs)
++
[ ""
, "Available commands:"
, Cmd.descTable cs
]
-- |Print a brief top-level usage summary to the console.
printUsage :: [Cmd.Command] -> IO ()
printUsage = hPutUsage stdout
-- |Print a brief top-level usage summary to a filehandle
hPutUsage :: Handle -> [Cmd.Command] -> IO ()
hPutUsage h cs = getProgName >>= (hPutStr h . usage cs)
helpSpecificCmd :: [Cmd.Command] -> String -> IO Bool
helpSpecificCmd cs n = case Cmd.named cs n of
Just c -> mapM_ putStrLn (Cmd.full c) >> return True
Nothing -> do
pn <- getProgName
hPutStrLn stderr $ printf "%s: help: %s: no such command" pn n
return False
| rjw57/cloudsync | cs/Main/Command/Help.hs | mit | 1,763 | 0 | 12 | 443 | 497 | 253 | 244 | 36 | 3 |
module Tokenize
( tokenizeExpr )
where
import Data.Char (isDigit, isSeparator)
type Tokens = [String]
operators :: String
operators = "+-*/()"
pushIntIfNecessary :: (Tokens, String) -> Tokens
pushIntIfNecessary (tokens, "") = tokens
pushIntIfNecessary (tokens, int) = int:tokens
traverseExpr :: (Tokens, String) -> Char -> (Tokens, String)
traverseExpr (tokens, int) c
| isSeparator c = (tokens, int)
| isDigit c = (tokens, int ++ [c])
| c `elem` operators = ([c]:(pushIntIfNecessary (tokens, int)), "")
| otherwise = error "Unrecognized character"
parse :: String -> (Tokens, String)
parse expr = foldl traverseExpr ([], "") expr
tokenizeExpr :: String -> Tokens
tokenizeExpr = reverse . pushIntIfNecessary . parse
| DanielBrookRoberge/learning-calculator | haskell/Tokenize.hs | mit | 731 | 0 | 10 | 119 | 281 | 158 | 123 | 19 | 1 |
import qualified SRC.Log as Log
main = undefined
-- logging boilerplate
filename = "Main.hs"
fatal :: Show a => String -> a -> b
fatal msg line = Log.reportFatal filename msg line
fixme, bug, err :: Show a => a -> b
fixme = fatal L.fixMe
bug = fatal L.bug
err = fatal L.err
| Fornost461/drafts-and-stuff | Haskell/samples/Template/SRC/Main.hs | cc0-1.0 | 280 | 6 | 7 | 60 | 129 | 60 | 69 | 9 | 1 |
module Main where
import System.Environment(getArgs)
import Control.Monad
import Data.Char (digitToInt)
type Aromatic = [(Int,Int)]
evalAromatic :: Aromatic -> Int
evalAromatic [] = 0
evalAromatic ((v1,b1):xs) =
let sign = case xs of
((_,b2):_) | b1 < b2 -> -1
_ -> 1
in sign*v1*b1 + evalAromatic xs
parseAromatic :: String -> Aromatic
parseAromatic [] = []
parseAromatic (a:r:xs) =
let v = digitToInt a
b = case r of
'I' -> 1
'V' -> 5
'X' -> 10
'L' -> 50
'C' -> 100
'D' -> 500
'M' -> 1000
in (v,b) : parseAromatic xs
processLine :: String -> String
processLine = show . evalAromatic . parseAromatic
main :: IO ()
main = liftM head getArgs >>= liftM lines . readFile >>= mapM_ (putStrLn . processLine)
| cryptica/CodeEval | Challenges/150_RomanAndArabic/main.hs | gpl-3.0 | 949 | 0 | 15 | 373 | 336 | 177 | 159 | 29 | 7 |
module Portage.EMeta
( EMeta(..)
, findExistingMeta
) where
import Control.Monad
import Data.Char (isSpace)
import qualified Data.List as L
import System.Directory (doesDirectoryExist, getDirectoryContents)
import System.FilePath ((</>))
import Text.Printf
-- tries to extract value of variable in 'var="val"' format
-- There should be exactly one variable assignment in ebuild
-- It's a bit artificial limitation, but it's common for 'if / else' blocks
extract_quoted_string :: FilePath -> String -> String -> Maybe String
extract_quoted_string ebuild_path s_ebuild var_name =
case filter (L.isPrefixOf var_prefix . ltrim) $ lines s_ebuild of
[] -> Nothing
[kw_line] -> up_to_quote $ skip_prefix $ ltrim kw_line
other -> bail_out $ printf "strange '%s' assignmets:\n%s" var_name (unlines other)
where ltrim :: String -> String
ltrim = dropWhile isSpace
var_prefix = var_name ++ "=\""
skip_prefix = drop (length var_prefix)
up_to_quote l = case break (== '"') l of
("", _) -> Nothing -- empty line
(_, "") -> bail_out $ printf "failed to find closing quote for '%s'" l
(val, _) -> Just val
bail_out :: String -> e
bail_out msg = error $ printf "%s:extract_quoted_string %s" ebuild_path msg
-- tries to extract value of variable in '#hackport: var: val' format
-- There should be exactly one variable assignment in ebuild.
extract_hackport_var :: FilePath -> String -> String -> Maybe String
extract_hackport_var ebuild_path s_ebuild var_name =
case filter (L.isPrefixOf var_prefix) $ lines s_ebuild of
[] -> Nothing
[var_line] -> Just $ skip_prefix var_line
other -> bail_out $ printf "strange '%s' assignmets:\n%s" var_name (unlines other)
where var_prefix = "#hackport: " ++ var_name ++ ": "
skip_prefix = drop (length var_prefix)
bail_out :: String -> e
bail_out msg = error $ printf "%s:extract_hackport_var %s" ebuild_path msg
extractKeywords :: FilePath -> String -> Maybe [String]
extractKeywords ebuild_path s_ebuild =
words `fmap ` extract_quoted_string ebuild_path s_ebuild "KEYWORDS"
extractLicense :: FilePath -> String -> Maybe String
extractLicense ebuild_path s_ebuild =
extract_quoted_string ebuild_path s_ebuild "LICENSE"
extractCabalFlags :: FilePath -> String -> Maybe String
extractCabalFlags ebuild_path s_ebuild =
extract_hackport_var ebuild_path s_ebuild "flags"
-- aggregated (best inferred) metadata for a new ebuild of package
data EMeta = EMeta { keywords :: Maybe [String]
, license :: Maybe String
, cabal_flags :: Maybe String
}
findExistingMeta :: FilePath -> IO EMeta
findExistingMeta pkgdir =
do ebuilds <- filter (L.isSuffixOf ".ebuild") `fmap` do b <- doesDirectoryExist pkgdir
if b then getDirectoryContents pkgdir
else return []
-- TODO: version sort
e_metas <- forM ebuilds $ \e ->
do let e_path = pkgdir </> e
e_conts <- readFile e_path
return EMeta { keywords = extractKeywords e e_conts
, license = extractLicense e e_conts
, cabal_flags = extractCabalFlags e e_conts
}
let get_latest candidates = last (Nothing : filter (/= Nothing) candidates)
aggregated_meta = EMeta { keywords = get_latest $ map keywords e_metas
, license = get_latest $ map license e_metas
, cabal_flags = get_latest $ map cabal_flags e_metas
}
return aggregated_meta
| Heather/hackport | Portage/EMeta.hs | gpl-3.0 | 3,998 | 0 | 15 | 1,285 | 877 | 454 | 423 | 63 | 5 |
module Tests.GADTTyped where
import QHaskell.MyPrelude
import QHaskell.Expression.GADTTyped
import QHaskell.Variable.Scoped
import qualified QHaskell.Type.ADT as TA
import qualified QHaskell.Expression.ADTValue as V
import qualified QHaskell.Nat.ADT as NA
import QHaskell.Environment.Scoped
import QHaskell.Conversion
import QHaskell.Expression.Conversions.Evaluation.GADTTyped ()
import QHaskell.Inference
dbl :: Exp (NA.Suc NA.Zro) NA.Zro TA.Typ
dbl = Abs (Prm [TA.Wrd ,TA.Wrd] Zro
[Var Zro , Var Zro])
compose :: TA.Typ -> TA.Typ -> Exp n m TA.Typ
compose ta tb = Abs (Abs (Abs
(App tb (Var (Suc (Suc Zro)))
(App ta (Var (Suc Zro)) (Var Zro)))))
four :: Exp (NA.Suc NA.Zro) NA.Zro TA.Typ
four = App TA.Wrd
(App (TA.Arr TA.Wrd TA.Wrd)
(App (TA.Arr TA.Wrd TA.Wrd)
(compose TA.Wrd TA.Wrd) dbl) dbl) (ConI 1)
test :: Bool
test = (case runNamM (cnv (four , (Ext (V.lft ((+) :: Word32 -> Word32 -> Word32)) Emp , Emp :: Env 'NA.Zro V.Exp))) of
Rgt (V.colft -> Rgt (4 :: Word32)) -> True
_ -> False)
&& (runNamM (typChk four (Ext (TA.Arr TA.Wrd
(TA.Arr TA.Wrd TA.Wrd)) Emp , Emp :: Env 'NA.Zro TA.Typ))
==
Rgt TA.Wrd)
| shayan-najd/QHaskell | Tests/GADTTyped.hs | gpl-3.0 | 1,272 | 0 | 18 | 325 | 549 | 295 | 254 | -1 | -1 |
module DoubleAuction.Init (
initDoubleAuction
) where
import DoubleAuction.Model
import DoubleAuction.Auctioneer
import DoubleAuction.Trader
import FRP.FrABS
import FRP.Yampa
import System.Random
import Control.Monad.Random
initDoubleAuction :: Int -> IO ([DAAgentDef], DAEnvironment)
initDoubleAuction n =
do
auctioneer <- evalRandIO (createDAAuctioneer auctioneer)
traders <- evalRandIO $ mapM (createDATrader n) [1..n]
let envNet = createDeterministicNetwork (Complete n) unitEdgeLabeler
return (auctioneer : traders, envNet)
createDATrader :: Int -> AgentId -> Rand StdGen DAAgentDef
createDATrader n aid =
do
rng <- getSplit
let h = (fromIntegral aid) / (fromIntegral (n + 1))
let s = TraderState {
daTraderOptimism = h,
daTraderLimitAsset = limitPriceAsset h,
daTraderLimitLoan = limitPriceLoan h,
daTraderLimitAssetLoan = (limitPriceAsset h) / (limitPriceLoan h),
daTraderLimitCollateral = (limitPriceAsset h) - (limitPriceLoan h),
daTraderCash = cashEndow,
daTraderAssets = assetEndow,
daTraderLoansTaken = 0.0,
daTraderLoansGiven = 0.0
}
let adef = AgentDef {
adId = aid,
adState = s,
adConversation = Nothing,
adInitMessages = NoEvent,
adBeh = traderAgentBehaviour,
adRng = rng
}
return adef
createDAAuctioneer :: AgentId -> Rand StdGen DAAgentDef
createDAAuctioneer aid =
do
rng <- getSplit
let adef = AgentDef {
adId = aid,
adState = AuctioneerState, -- NOTE: again, the auctioneer does not has any domain-specific state
adConversation = Nothing,
adInitMessages = NoEvent,
adBeh = auctioneerBehaviour,
adRng = rng
}
return adef | thalerjonathan/phd | coding/libraries/chimera/examples/ABS/DoubleAuction/Init.hs | gpl-3.0 | 1,897 | 0 | 14 | 560 | 461 | 251 | 210 | 51 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( getApplicationDev
, appMain
, develMain
, makeFoundation
, makeLogWare
-- * for DevelMain
, getApplicationRepl
, shutdownApp
-- * for GHCI
, handler
, db
) where
import Control.Monad.Logger (liftLoc, runLoggingT)
import Database.Persist.Postgresql (createPostgresqlPool, pgConnStr,
pgPoolSize, runSqlPool)
import Import
import Language.Haskell.TH.Syntax (qLocation)
import Network.Wai (Middleware)
import Network.Wai.Handler.Warp (Settings, defaultSettings,
defaultShouldDisplayException,
runSettings, setHost,
setOnException, setPort, getPort)
import Network.Wai.Middleware.RequestLogger (Destination (Logger),
IPAddrSource (..),
OutputFormat (..), destination,
mkRequestLogger, outputFormat)
import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet,
toLogStr)
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Common
import Handler.Home
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
-- comments there for more details.
mkYesodDispatch "App" resourcesApp
-- | This function allocates resources (such as a database connection pool),
-- performs initialization and returns a foundation datatype value. This is also
-- the place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeFoundation :: AppSettings -> IO App
makeFoundation appSettings = do
-- Some basic initializations: HTTP connection manager, logger, and static
-- subsite.
appHttpManager <- newManager
appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger
appStatic <-
(if appMutableStatic appSettings then staticDevel else static)
(appStaticDir appSettings)
-- We need a log function to create a connection pool. We need a connection
-- pool to create our foundation. And we need our foundation to get a
-- logging function. To get out of this loop, we initially create a
-- temporary foundation without a real connection pool, get a log function
-- from there, and then create the real foundation.
let mkFoundation appConnPool = App {..}
-- The App {..} syntax is an example of record wild cards. For more
-- information, see:
-- https://ocharles.org.uk/blog/posts/2014-12-04-record-wildcards.html
tempFoundation = mkFoundation $ error "connPool forced in tempFoundation"
logFunc = messageLoggerSource tempFoundation appLogger
-- Create the database connection pool
pool <- flip runLoggingT logFunc $ createPostgresqlPool
(pgConnStr $ appDatabaseConf appSettings)
(pgPoolSize $ appDatabaseConf appSettings)
-- Perform database migration using our application's logging settings.
runLoggingT (runSqlPool (runMigration migrateAll) pool) logFunc
-- Return the foundation
return $ mkFoundation pool
-- | Convert our foundation to a WAI Application by calling @toWaiAppPlain@ and
-- applying some additional middlewares.
makeApplication :: App -> IO Application
makeApplication foundation = do
logWare <- makeLogWare foundation
-- Create the WAI application and apply middlewares
appPlain <- toWaiAppPlain foundation
return $ logWare $ defaultMiddlewaresNoLogging appPlain
makeLogWare :: App -> IO Middleware
makeLogWare foundation =
mkRequestLogger def
{ outputFormat =
if appDetailedRequestLogging $ appSettings foundation
then Detailed True
else Apache
(if appIpFromHeader $ appSettings foundation
then FromFallback
else FromSocket)
, destination = Logger $ loggerSet $ appLogger foundation
}
-- | Warp settings for the given foundation value.
warpSettings :: App -> Settings
warpSettings foundation =
setPort (appPort $ appSettings foundation)
$ setHost (appHost $ appSettings foundation)
$ setOnException (\_req e ->
when (defaultShouldDisplayException e) $ messageLoggerSource
foundation
(appLogger foundation)
$(qLocation >>= liftLoc)
"yesod"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e))
defaultSettings
getApplication' :: IO (Settings, App, Application)
getApplication' = do
settings <- getAppSettings
foundation <- makeFoundation settings
wsettings <- getDevSettings $ warpSettings foundation
app <- makeApplication foundation
pure (wsettings, foundation, app)
-- | For yesod devel, return the Warp settings and WAI Application.
getApplicationDev :: IO (Settings, Application)
getApplicationDev = do
(wsettings, _, app) <- getApplication'
return (wsettings, app)
getAppSettings :: IO AppSettings
getAppSettings = loadYamlSettings [configSettingsYml] [] useEnv
-- | main function for use by yesod devel
develMain :: IO ()
develMain = develMainHelper getApplicationDev
-- | The @main@ function for an executable running this site.
appMain :: IO ()
appMain = do
-- Get the settings from all relevant sources
settings <- loadYamlSettingsArgs
-- fall back to compile-time values, set to [] to require values at runtime
[configSettingsYmlValue]
-- allow environment variables to override
useEnv
-- Generate the foundation from the settings
foundation <- makeFoundation settings
-- Generate a WAI Application from the foundation
app <- makeApplication foundation
-- Run the application with Warp
runSettings (warpSettings foundation) app
--------------------------------------------------------------
-- Functions for DevelMain.hs (a way to run the app from GHCi)
--------------------------------------------------------------
getApplicationRepl :: IO (Int, App, Application)
getApplicationRepl = do
(wsettings, foundation, app) <- getApplication'
return (getPort wsettings, foundation, app)
shutdownApp :: App -> IO ()
shutdownApp _ = return ()
---------------------------------------------
-- Functions for use in development with GHCi
---------------------------------------------
-- | Run a handler
handler :: Handler a -> IO a
handler h = getAppSettings >>= makeFoundation >>= flip unsafeHandler h
-- | Run DB queries
db :: ReaderT SqlBackend (HandlerT App IO) a -> IO a
db = handler . runDB
| cblp/tasknight-dashboard | frontend/Application.hs | gpl-3.0 | 6,975 | 0 | 13 | 1,786 | 1,076 | 583 | 493 | -1 | -1 |
{-
This file is part of the Haskell Term Rewriting Library.
The Haskell Term Rewriting Library is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Haskell Term Rewriting Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with the Haskell Term Rewriting Library. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Termlib.Term.Parser where
import Data.List (isSuffixOf)
import Termlib.FunctionSymbol (Signature, Symbol)
import qualified Termlib.FunctionSymbol as F
import qualified Termlib.Signature as Sig
import Termlib.Variable (Variables, Variable(..))
import qualified Termlib.Variable as V
import Termlib.Problem.ParseErrors(ParseError(..), ParseWarning(..))
import Termlib.Term (Term(..), root, immediateSubterms)
import Termlib.Rule (Rule(..))
import Control.Monad.Error
import Control.Monad.Writer.Lazy
import Text.Parsec hiding (ParseError)
parseFromString :: Signature -> Variables -> TermParser a -> String -> Either ParseError ((a,Signature,Variables),[ParseWarning])
parseFromString sig vars parser input =
case runWriter $ runErrorT $ runParserT term' (sig,vars) input input of
(Left e, _ ) -> Left e
(Right (Left e), _ ) -> Left $ ParsecParseError e
(Right (Right t), warns) -> Right (t, warns)
where term' =
do e <- parser
(fs,vs) <- getState
return (e,fs,vs)
type TermParser a = ParsecT String (Signature,Variables) (ErrorT ParseError (Writer [ParseWarning])) a
rule :: TermParser (Bool,Rule)
rule =
do l <- term
str <- whitespaced (try weak <|> strict)
r <- term
return $ (str,Rule l r)
where strict = string "->" >> return True
weak = string "->=" >> return False
-- dpRule :: TermParser (Bool, Rule)
-- dpRule =
-- do (s,rl) <- rule
-- setMarked (root (lhs rl))
-- setCompound (root (rhs rl))
-- mapM_ (setMarked . root) (immediateSubterms (rhs rl))
-- return (s,rl)
-- where modifyAttrib _ (Left _) = return ()
-- modifyAttrib alter (Right f) =
-- do (fs,vs) <- getState
-- putState (Sig.alterAttributes (fmap alter) f fs, vs)
-- setCompound = modifyAttrib (\ attrib -> attrib { F.symIsCompound = True})
-- setMarked = modifyAttrib (\ attrib -> attrib { F.symIsMarked = True})
term :: TermParser Term
term =
do name <- ident
try (parseFun name) <|> parseVar name
where parseFun name = do ts <- parens $ sepBy (whitespaced term) colon
f <- getSym name (length ts)
return $ Fun f ts
parseVar name = Var `liftM` getVar name
getVar :: String -> TermParser Variable
getVar name = do (fs,vs) <- getState
let (v, vs') = Sig.runSignature (V.maybeFresh name) vs
putState (fs,vs')
return v
getSym :: String -> Int -> TermParser Symbol
getSym name ar =
do (fs,vs) <- getState
let (f, fs') = Sig.runSignature (F.maybeFresh attribs) fs
putState (fs',vs)
return f
where attribs | name == "COM" = (F.defaultAttribs name ar) {F.symIsCompound = True}
| "^#" `isSuffixOf` name = (F.defaultAttribs (dropTl 2 name) ar) {F.symIsMarked = True}
| "#" `isSuffixOf` name = (F.defaultAttribs (dropTl 1 name) ar) {F.symIsMarked = True}
| otherwise = F.defaultAttribs name ar
dropTl i s = take (length s - i) s
colon = char ','
ident = many1 $ noneOf " \n\r\t()\",|-= "
whitespace = space <|> newline <|> tab <|> char '\r'
whitespaced p = do many $ whitespace
f <- p
many $ whitespace
return f
parens p = do char '('
e <- p
char ')'
return e | mzini/termlib | Termlib/Term/Parser.hs | gpl-3.0 | 4,292 | 0 | 13 | 1,148 | 1,074 | 566 | 508 | 68 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Mirror.Timeline.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a timeline item.
--
-- /See:/ <https://developers.google.com/glass Google Mirror API Reference> for @mirror.timeline.delete@.
module Network.Google.Resource.Mirror.Timeline.Delete
(
-- * REST Resource
TimelineDeleteResource
-- * Creating a Request
, timelineDelete
, TimelineDelete
-- * Request Lenses
, tdId
) where
import Network.Google.Mirror.Types
import Network.Google.Prelude
-- | A resource alias for @mirror.timeline.delete@ method which the
-- 'TimelineDelete' request conforms to.
type TimelineDeleteResource =
"mirror" :>
"v1" :>
"timeline" :>
Capture "id" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes a timeline item.
--
-- /See:/ 'timelineDelete' smart constructor.
newtype TimelineDelete =
TimelineDelete'
{ _tdId :: Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TimelineDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tdId'
timelineDelete
:: Text -- ^ 'tdId'
-> TimelineDelete
timelineDelete pTdId_ = TimelineDelete' {_tdId = pTdId_}
-- | The ID of the timeline item.
tdId :: Lens' TimelineDelete Text
tdId = lens _tdId (\ s a -> s{_tdId = a})
instance GoogleRequest TimelineDelete where
type Rs TimelineDelete = ()
type Scopes TimelineDelete =
'["https://www.googleapis.com/auth/glass.location",
"https://www.googleapis.com/auth/glass.timeline"]
requestClient TimelineDelete'{..}
= go _tdId (Just AltJSON) mirrorService
where go
= buildClient (Proxy :: Proxy TimelineDeleteResource)
mempty
| brendanhay/gogol | gogol-mirror/gen/Network/Google/Resource/Mirror/Timeline/Delete.hs | mpl-2.0 | 2,557 | 0 | 12 | 577 | 306 | 188 | 118 | 46 | 1 |
-- This Source Code Form is subject to the terms of the Mozilla Public
-- License, v. 2.0. If a copy of the MPL was not distributed with this
-- file, You can obtain one at http://mozilla.org/MPL/2.0/.
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Network.Kafka.Protocol.Offset
( OffsetRequest
, OffsetRequestFields
, OffsetRequestPayload
, OffsetRequestPayloadFields
, OffsetResponse
, OffsetResponseFields
, OffsetResponsePayload
, OffsetResponsePayloadFields
, FMaxOffsets
, FOffsets
, FTime
, maxOffsets
, offsets
, time
)
where
import Control.Lens
import Data.Proxy
import Data.Serialize
import Data.Vinyl
import Data.Word
import GHC.Generics
import Network.Kafka.Protocol.Instances ()
import Network.Kafka.Protocol.Primitive
import Network.Kafka.Protocol.Universe
type OffsetRequestFields = '[ FReplicaId, FPayload OffsetRequestPayload ]
type OffsetRequest = Req 2 0 OffsetRequestFields
type FMaxOffsets = '("max_offsets", Word32)
type FTime = '("time" , Word64)
maxOffsets :: Proxy FMaxOffsets
maxOffsets = Proxy
time :: Proxy FTime
time = Proxy
type OffsetRequestPayloadFields = '[ FPartition, FTime, FMaxOffsets ]
type OffsetRequestPayload = FieldRec OffsetRequestPayloadFields
type OffsetResponseFields = '[ FPayload OffsetRequestPayload ]
type OffsetResponse = Resp OffsetResponseFields
type FOffsets = '("offsets", Array Word64)
offsets :: Proxy FOffsets
offsets = Proxy
type OffsetResponsePayloadFields = '[ FPartition, FErrorCode, FOffsets ]
newtype OffsetResponsePayload
= OffsetResponsePayload (FieldRec OffsetResponsePayloadFields)
deriving (Eq, Show, Generic)
instance Serialize OffsetResponsePayload
makeWrapped ''OffsetResponsePayload
| kim/kafka-protocol | src/Network/Kafka/Protocol/Offset.hs | mpl-2.0 | 1,970 | 0 | 7 | 372 | 339 | 207 | 132 | 51 | 1 |
{-# LANGUAGE Rank2Types, TypeOperators, FlexibleContexts, TypeFamilies
, TypeSynonymInstances, FlexibleInstances, UndecidableInstances
, MultiParamTypeClasses
#-}
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-}
----------------------------------------------------------------------
-- |
-- Module : Shady.ParamSurf
-- Copyright : (c) Conal Elliott 2008, 2009
-- License : AGPLv3
--
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Parametric surfaces with automatic normals
----------------------------------------------------------------------
-- This version uses Complex s instead of (s,s). Complex is consistent
-- with Image but inconsistent with 1D and 3D.
module Shady.ParamSurf where
import Control.Applicative
import Control.Arrow ((&&&))
import Data.NumInstances ()
import Data.VectorSpace
import Data.Cross hiding (One,Two,Three)
import Data.Derivative
-- import Data.MemoTrie
import Data.Basis
import Shady.Language.Exp
import Shady.Complex
import Shady.ITransform (ITrans(..))
type HeightField s = Complex s -> s
type Surf s = Complex s -> (s,s,s)
type USurf = forall s. Floating s => Surf s
type Curve2 s = s -> Complex s
type Curve3 s = s -> (s,s,s)
type Warp1 s = s -> s
type Warp2 s = Complex s -> Complex s
type Warp3 s = (s,s,s) -> (s,s,s)
-- | Trig functions with unit period ([-1,1])
cosU, sinU :: Floating s => s -> s
cosU = cos . (* pi)
sinU = sin . (* pi)
-- | Turn a height field into a surface
hfSurf :: HeightField s -> Surf s
hfSurf field = \ (u :+ v) -> (u, v, field (u :+ v))
-- | Like 'hfSurf' but for curve construction
fcurve :: Warp1 s -> Curve2 s
fcurve f = \ u -> u :+ f u
-- | Unit circle.
circle :: Floating s => Curve2 s
circle = liftA2 (:+) cosU sinU
-- | Half semi circle, with theta in [-pi/2,pi/2]
semiCircle :: Floating s => Curve2 s
semiCircle = circle . (/ 2)
-- | Torus, given radius of sweep circle and cross section
torus :: (Eq s, Floating s, VectorSpace s) => s -> s -> Surf s
-- torus sr cr = revolve (\ s -> (sr,0) ^+^ cr *^ circle s)
torus sr cr = revolve (const (sr :+ 0) ^+^ const cr *^ circle)
-- Surface of revolution, formed by rotation around Z axis. The curve is
-- parameterized by u, and the rotation by v. In this generalized
-- version, we have not a single curve, but a function from v to curves.
revolveG :: Floating s => (s -> Curve2 s) -> Surf s
revolveG curveF = \ (u :+ v) -> onXY (rotate (-pi*v)) (addY (curveF v) u)
revolve :: Floating s => Curve2 s -> Surf s
revolve curve = revolveG (const curve)
-- A sphere is a revolved semi-circle
sphere1 :: Floating s => Surf s
sphere1 = revolve semiCircle
-- | Profile product.
profile :: Num s => Curve2 s -> Curve2 s -> Surf s
profile curve prof (u :+ v) = (cx*px,cy*px,py)
where
cx :+ cy = curve u
px :+ py = prof v
-- More spheres
sphere2,sphere3 :: Floating s => Surf s
sphere2 = profile circle semiCircle
sphere3 = profile semiCircle circle
-- | Frustum, given base & cap radii and height.
frustum :: (Floating s, VectorSpace s, Scalar s ~ s) => s -> s -> s -> Surf s
frustum baseR topR h = profile circle rad
where
rad t = lerp baseR topR (t + 1/2) :+ h*t
-- | Unit cylinder. Unit height and radii
ucylinder :: (Eq s, Floating s, VectorSpace s) => Surf s
ucylinder = profile circle (const 1)
-- | XY plane as a surface
xyPlane :: Num s => Surf s
xyPlane = hfSurf (const 0)
-- | Given a combining op and two curves, make a surface. A sort of
-- Cartesian product with combination.
cartF :: (a -> b -> c) -> (s -> a) -> (s -> b) -> (Complex s -> c)
cartF op f g = \ (u :+ v) -> f u `op` g v
-- Sweep a basis curve by a sweep curve. Warning: does not reorient the
-- basis curve as cross-section. TODO: Frenet frame.
sweep :: VectorSpace s => Curve3 s -> Curve3 s -> Surf s
sweep = cartF (^+^)
-- | One period, unit height eggcrate
eggcrateH :: Floating s => HeightField s
eggcrateH = cartF (*) cosU sinU
revolveH :: (Eq s, Floating s, InnerSpace s) => Warp1 s -> HeightField s
revolveH = (. magnitude)
rippleH :: (Eq s, Floating s, InnerSpace s) => HeightField s
rippleH = revolveH sinU
-- | Simple ripply pond shape
ripple :: (AdditiveGroup s, Eq s, Floating s) => Surf s
ripple = -- onXY' (2 *^) $
revolve (const (0.5 :+ 0) - fcurve sinU)
-- | Apply a displacement map at a value
displaceV :: (InnerSpace v, s ~ Scalar v, Floating s, HasNormal v) =>
v -> Scalar v -> v
displaceV v s = v ^+^ s *^ normal v
-- | Apply a displacement map to a function (e.g., 'Curve2' or 'Surf') or
-- other container.
displace :: (InnerSpace v, Scalar v ~ s, Floating s, HasNormal v, Applicative f) =>
f v -> f (Scalar v) -> f v
displace = liftA2 displaceV
---- Misc
-- TODO: Reconcile this version with the one in Image
rotate :: Floating s => s -> Warp2 s
rotate theta = \ (x :+ y) -> (x * c - y * s) :+ (y * c + x * s)
where c = cos theta
s = sin theta
addX, addY, addZ :: Num s => (a -> Complex s) -> (a -> (s,s,s))
addX = fmap (\ (y :+ z) -> (0,y,z))
addY = fmap (\ (x :+ z) -> (x,0,z))
addZ = fmap (\ (x :+ y) -> (x,y,0))
addYZ,addXZ,addXY :: Num s => (a -> s) -> (a -> (s,s,s))
addYZ = fmap (\ x -> (x,0,0))
addXZ = fmap (\ y -> (0,y,0))
addXY = fmap (\ z -> (0,0,z))
onX,onY,onZ :: Warp1 s -> Warp3 s
onX f (x,y,z) = (f x, y, z)
onY f (x,y,z) = (x, f y, z)
onZ f (x,y,z) = (x, y, f z)
onXY,onYZ,onXZ :: Warp2 s -> Warp3 s
onXY f (x,y,z) = (x',y',z ) where x' :+ y' = f (x :+ y)
onXZ f (x,y,z) = (x',y ,z') where x' :+ z' = f (x :+ z)
onYZ f (x,y,z) = (x ,y',z') where y' :+ z' = f (y :+ z)
onX',onY',onZ' :: Warp1 s -> (a -> (s,s,s)) -> (a -> (s,s,s))
onX' = fmap . onX
onY' = fmap . onY
onZ' = fmap . onZ
onXY',onXZ',onYZ' :: Warp2 s -> (a -> (s,s,s)) -> (a -> (s,s,s))
onXY' = fmap . onXY
onXZ' = fmap . onXZ
onYZ' = fmap . onYZ
{--------------------------------------------------------------------
Normals and tessellation
--------------------------------------------------------------------}
-- -- | Derivative tower of point on a surface
-- type SurfPt = Exp R2 :> Exp R3
-- -- | Differentiable surface
-- type SurfD = Surf (Exp R2 :> Exp R)
-- -- | Vertex and normal
-- data VN = VN (Exp R3) (Exp R3)
-- -- No instances for (HasBasis (E V R2),
-- -- HasTrie (Basis (E V R2)),
-- -- HasNormal SurfPt)
-- toVN :: SurfPt -> VN
-- toVN v = VN (powVal v) (powVal (normal v))
-- TODO: move to Exp and remove -fno-warn-orphans
type V2 a = (a,a)
type V3 a = (a,a,a)
type ER = FloatE
type ER2 = V2 ER
type ER3 = V3 ER
instance HasBasis FloatE where
type Basis FloatE = ()
basisValue () = 1
decompose s = [((),s)]
decompose' s = const s
instance HasBasis R2E where
type Basis R2E = Basis ER2
basisValue b = vec2 x y where (x,y) = basisValue b
decompose w = decompose (getX w, getX w)
decompose' w = (w <.>) . basisValue
-- TODO: are these instances used?
-- TODO: move these two HasBasis orphans elsewhere.
-- instance IsNat n => HasBasis (VecE n R) where
-- type Basis (VecE n R) = n
-- basisValue = ???
-- TODO: fill out this definition. How to enumerate a basis for Vec n R,
-- for arbitrary IsNat n?
type TR = ER :> ER -- tower
type T = ER2 :> ER
-- Standard do-nothing transformation
instance ITrans (Complex T) T where (*:) = const id
-- | Derivative towers of point on a surface
type SurfPt = V3 T
-- type SurfPt = ER2 :> ER3
-- | Differentiable surface
type SurfD = Surf T
-- -- | Vertex and normal
-- data VN = VN ER3 ER3
-- powVal3 :: V3 (a :> b) -> V3 b
-- powVal3 (q,r,s) = (powVal q, powVal r, powVal s)
-- toVN :: SurfPt -> VN
-- toVN v = VN (powVal3 v) (powVal3 (normal v))
-- -- type SurfV = ER2 :~> ER3
-- type SurfVN = ER2 -> VN
-- -- or
-- -- type SurfVN = Exp R2 -> (Exp R3, Exp R3)
-- surfVN :: SurfD -> SurfVN
-- surfVN f p = toVN (f (fstD p, sndD p))
-- | Vertex and normal
type VN = (R3E, R3E)
toVN :: SurfPt -> VN
toVN = p3 &&& (p3 . normal)
where
p3 (q,r,s) = vec3 (powVal q) (powVal r) (powVal s)
-- type SurfV = ER2 :~> ER3
type SurfVN = R2E -> VN
surfVN :: SurfD -> SurfVN
surfVN f p = toVN (f (fstD p' :+ sndD p'))
where
p' = (getX p, getY p)
| conal/shady-graphics | src/Shady/ParamSurf.hs | agpl-3.0 | 8,241 | 0 | 12 | 1,934 | 2,676 | 1,495 | 1,181 | 131 | 1 |
import System.Plugins
import API
src = "../Plugin.hs"
wrap = "../Wrapper.hs"
apipath = "../api"
main = do status <- make src ["-i"++apipath]
case status of
MakeSuccess _ _ -> f
MakeFailure e -> mapM_ putStrLn e
where f = do v <- pdynload "../Plugin.o" ["../api"] [] "API.Interface" "resource"
case v of
LoadSuccess _ a -> let fn = function a in print $ 1 `fn` 2
LoadFailure e -> mapM_ putStrLn e
| Changaco/haskell-plugins | testsuite/pdynload/poly1/prog/Main.hs | lgpl-2.1 | 508 | 4 | 14 | 183 | 179 | 81 | 98 | 13 | 3 |
module BSDF (BSDF (Blinn, Lambertian), at, sample, add, scale, (&*), (&+)) where
import Vectors
import DifferentialGeometry
data BSDF =
Lambertian |
Blinn Float |
Scaled Spectrum BSDF |
Composite [BSDF]
at :: BSDF -> DifferentialGeometry -> Vec3 -> Vec3 -> Spectrum
at bsdf dg i o = locAt bsdf (worldToLoc i) (worldToLoc o)
where
locAt (Lambertian) i o
| cosTheta i > 0 && cosTheta o > 0 = vof (1 / pi)
| otherwise = vof 0
locAt (Blinn exponent) i o
| cosTheta i > 0 && cosTheta o > 0 =
let h = norm (i + o)
cosThetaO = absCosTheta o
cosThetaI = absCosTheta i
cosThetaH = i .* h
oDotH = o .* h
d = (exponent+2) * (absCosTheta h ** exponent) / (2*pi)
g = min 1 $ min (2 * cosThetaH * cosThetaO / oDotH)
(2 * cosThetaH * cosThetaI / oDotH)
f = fresnel cosThetaH
in vof $ d * g * f / (4 * cosThetaI * cosThetaO)
| otherwise = vof 0
locAt (Scaled s bsdf) i o = s * locAt bsdf i o
locAt (Composite bsdfs) i o = sum $ map (\bdsf -> locAt bdsf i o) bsdfs
fresnel ct = 1 -- TODO
nn@(Vec3 nnx nny nnz) = dgNormal dg
sn@(Vec3 snx sny snz) = norm $ dgDPDU dg
tn@(Vec3 tnx tny tnz) = nn `cross` sn
worldToLoc v = Vec3 (v .* sn) (v .* tn) (v .* nn)
locToWorld (Vec3 vx vy vz) = Vec3 (snx * vx + tnx * vy + nnx * vz)
(sny * vx + tny * vy + nny * vz)
(snz * vx + tnz * vy + nnz * vz)
cosTheta (Vec3 _ _ z) = z
absCosTheta (Vec3 _ _ z) = abs z
sample :: BSDF -> Vec3 -> Vec3 -> [Vec3]
sample _ _ _ = []
scale :: Spectrum -> BSDF -> BSDF
scale s' (Scaled s wrapped) = Scaled (s' * s) wrapped
scale s wrap = Scaled s wrap
add :: BSDF -> BSDF -> BSDF
add (Composite xs) (Composite ys) = Composite (xs ++ ys)
add (Composite xs) x = Composite (x:xs)
add x (Composite xs) = Composite (x:xs)
add x y = Composite ([x, y])
(&*) :: Spectrum -> BSDF -> BSDF
(&*) = scale
infixl 7 &*
(&+) :: BSDF -> BSDF -> BSDF
(&+) = add
infixl 6 &+
| pstiasny/mgr | BSDF.hs | lgpl-3.0 | 2,127 | 0 | 17 | 722 | 1,004 | 524 | 480 | 56 | 4 |
module Numerical.SLEPc.TestMain where
import Numerical.SLEPc.Raw.PutGet
import Numerical.SLEPc.Raw.Types
t1' = withMatCreateSeqAIJVarNZPR cw 3 3 [1,1,1] $ \pm -> do
let m = petscMatrixMat pm
matNewNZallocErrorOff m
matSetup m
matSetValuesSafe pm [0,1,2] [0,1,2] (replicate 3 pi) InsertValues
matAssembly m
matViewStdout m
where
cw = commWorld
t1 = withSlepc0 t1'
t2' = withMatCreateSeqAIJConstNZPR cw 3 3 3 $ \pm -> do
let m = petscMatrixMat pm
matNewNZallocErrorOff m
matSetup m
-- matSetValueSafe pm 1 1 pi InsertValues
-- matSetValueArraySafe pm [0,1,2] [0,1,2] (replicate 3 pi) InsertValues
matSetValueArraySafe pm [0,0,0,1,1,1,2,2,2] [0,1,2,0,1,2,0,1,2] (replicate 9 pi) InsertValues
matAssembly m
matViewStdout m
where
cw = commWorld
t2 = withSlepc0 t2'
t3' = withMatCreateSeqAIJConstNZPR cw 3 3 3 $ \pm -> do
let m = petscMatrixMat pm -- last time copying all this stuff, will abstract out
matNewNZallocErrorOff m -- <- need to figure this one out
matSetup m -- <- this should be a given
matSetValueArraySafe pm [0,0,0,1,1,1,2,2,2] [0,1,2,0,1,2,0,1,2] (replicate 9 pi) InsertValues
matAssembly m
withEpsSetupSolve cw m m EpsHep $ \eps ->
epsIsHermitian eps
where
cw = commWorld
t3 = withSlepc0 t3'
| ocramz/slepc-hs | src/Numerical/SLEPc/TestMain.hs | lgpl-3.0 | 1,292 | 0 | 12 | 263 | 467 | 248 | 219 | 31 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Utils
-- Copyright : (c) The University of Glasgow 2001-2002,
-- Simon Marlow 2003-2006,
-- David Waern 2006-2009
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
module Haddock.Utils (
-- * Misc utilities
restrictTo, emptyHsQTvs,
toDescription, toInstalledDescription,
mkEmptySigWcType, addClassContext, lHsQTyVarsToTypes,
-- * Filename utilities
moduleHtmlFile, moduleHtmlFile',
contentsHtmlFile, indexHtmlFile,
frameIndexHtmlFile,
moduleIndexFrameName, mainFrameName, synopsisFrameName,
subIndexHtmlFile,
jsFile, framesFile,
-- * Anchor and URL utilities
moduleNameUrl, moduleNameUrl', moduleUrl,
nameAnchorId,
makeAnchorId,
-- * Miscellaneous utilities
getProgramName, bye, die, dieMsg, noDieMsg, mapSnd, mapMaybeM, escapeStr,
-- * HTML cross reference mapping
html_xrefs_ref, html_xrefs_ref',
-- * Doc markup
markup,
idMarkup,
mkMeta,
-- * List utilities
replace,
spanWith,
-- * MTL stuff
MonadIO(..),
-- * Logging
parseVerbosity,
out,
-- * System tools
getProcessID
) where
import Documentation.Haddock.Doc (emptyMetaDoc)
import Haddock.Types
import Haddock.GhcUtils
import GHC
import Name
import HsTypes (selectorFieldOcc)
import Control.Monad ( liftM )
import Data.Char ( isAlpha, isAlphaNum, isAscii, ord, chr )
import Numeric ( showIntAtBase )
import Data.Map ( Map )
import qualified Data.Map as Map hiding ( Map )
import Data.IORef ( IORef, newIORef, readIORef )
import Data.List ( isSuffixOf )
import Data.Maybe ( mapMaybe )
import System.Environment ( getProgName )
import System.Exit
import System.IO ( hPutStr, stderr )
import System.IO.Unsafe ( unsafePerformIO )
import qualified System.FilePath.Posix as HtmlPath
import Distribution.Verbosity
import Distribution.ReadE
#ifndef mingw32_HOST_OS
import qualified System.Posix.Internals
#endif
import MonadUtils ( MonadIO(..) )
--------------------------------------------------------------------------------
-- * Logging
--------------------------------------------------------------------------------
parseVerbosity :: String -> Either String Verbosity
parseVerbosity = runReadE flagToVerbosity
-- | Print a message to stdout, if it is not too verbose
out :: MonadIO m
=> Verbosity -- ^ program verbosity
-> Verbosity -- ^ message verbosity
-> String -> m ()
out progVerbosity msgVerbosity msg
| msgVerbosity <= progVerbosity = liftIO $ putStrLn msg
| otherwise = return ()
--------------------------------------------------------------------------------
-- * Some Utilities
--------------------------------------------------------------------------------
-- | Extract a module's short description.
toDescription :: Interface -> Maybe (MDoc Name)
toDescription = fmap mkMeta . hmi_description . ifaceInfo
-- | Extract a module's short description.
toInstalledDescription :: InstalledInterface -> Maybe (MDoc Name)
toInstalledDescription = fmap mkMeta . hmi_description . instInfo
mkMeta :: Doc a -> MDoc a
mkMeta x = emptyMetaDoc { _doc = x }
mkEmptySigWcType :: LHsType Name -> LHsSigWcType Name
-- Dubious, because the implicit binders are empty even
-- though the type might have free varaiables
mkEmptySigWcType ty = mkEmptyImplicitBndrs (mkEmptyWildCardBndrs ty)
addClassContext :: Name -> LHsQTyVars Name -> LSig Name -> LSig Name
-- Add the class context to a class-op signature
addClassContext cls tvs0 (L pos (ClassOpSig _ lname ltype))
= L pos (TypeSig lname (mkEmptySigWcType (go (hsSigType ltype))))
-- The mkEmptySigWcType is suspicious
where
go (L loc (HsForAllTy { hst_bndrs = tvs, hst_body = ty }))
= L loc (HsForAllTy { hst_bndrs = tvs, hst_body = go ty })
go (L loc (HsQualTy { hst_ctxt = ctxt, hst_body = ty }))
= L loc (HsQualTy { hst_ctxt = add_ctxt ctxt, hst_body = ty })
go (L loc ty)
= L loc (HsQualTy { hst_ctxt = add_ctxt (L loc []), hst_body = L loc ty })
extra_pred = nlHsTyConApp cls (lHsQTyVarsToTypes tvs0)
add_ctxt (L loc preds) = L loc (extra_pred : preds)
addClassContext _ _ sig = sig -- E.g. a MinimalSig is fine
lHsQTyVarsToTypes :: LHsQTyVars Name -> [LHsType Name]
lHsQTyVarsToTypes tvs
= [ noLoc (HsTyVar (noLoc (hsLTyVarName tv)))
| tv <- hsQTvExplicit tvs ]
--------------------------------------------------------------------------------
-- * Making abstract declarations
--------------------------------------------------------------------------------
restrictTo :: [Name] -> LHsDecl Name -> LHsDecl Name
restrictTo names (L loc decl) = L loc $ case decl of
TyClD d | isDataDecl d ->
TyClD (d { tcdDataDefn = restrictDataDefn names (tcdDataDefn d) })
TyClD d | isClassDecl d ->
TyClD (d { tcdSigs = restrictDecls names (tcdSigs d),
tcdATs = restrictATs names (tcdATs d) })
_ -> decl
restrictDataDefn :: [Name] -> HsDataDefn Name -> HsDataDefn Name
restrictDataDefn names defn@(HsDataDefn { dd_ND = new_or_data, dd_cons = cons })
| DataType <- new_or_data
= defn { dd_cons = restrictCons names cons }
| otherwise -- Newtype
= case restrictCons names cons of
[] -> defn { dd_ND = DataType, dd_cons = [] }
[con] -> defn { dd_cons = [con] }
_ -> error "Should not happen"
restrictCons :: [Name] -> [LConDecl Name] -> [LConDecl Name]
restrictCons names decls = [ L p d | L p (Just d) <- map (fmap keep) decls ]
where
keep d | any (\n -> n `elem` names) (map unLoc $ getConNames d) =
case getConDetails h98d of
PrefixCon _ -> Just d
RecCon fields
| all field_avail (unL fields) -> Just d
| otherwise -> Just (h98d { con_details = PrefixCon (field_types (map unL (unL fields))) })
-- if we have *all* the field names available, then
-- keep the record declaration. Otherwise degrade to
-- a constructor declaration. This isn't quite right, but
-- it's the best we can do.
InfixCon _ _ -> Just d
where
h98d = h98ConDecl d
h98ConDecl c@ConDeclH98{} = c
h98ConDecl c@ConDeclGADT{} = c'
where
(details,_res_ty,cxt,tvs) = gadtDeclDetails (con_type c)
c' :: ConDecl Name
c' = ConDeclH98
{ con_name = head (con_names c)
, con_qvars = Just $ HsQTvs { hsq_implicit = mempty
, hsq_explicit = tvs }
, con_cxt = Just cxt
, con_details = details
, con_doc = con_doc c
}
field_avail :: LConDeclField Name -> Bool
field_avail (L _ (ConDeclField fs _ _))
= all (\f -> selectorFieldOcc (unLoc f) `elem` names) fs
field_types flds = [ t | ConDeclField _ t _ <- flds ]
keep _ = Nothing
restrictDecls :: [Name] -> [LSig Name] -> [LSig Name]
restrictDecls names = mapMaybe (filterLSigNames (`elem` names))
restrictATs :: [Name] -> [LFamilyDecl Name] -> [LFamilyDecl Name]
restrictATs names ats = [ at | at <- ats , unL (fdLName (unL at)) `elem` names ]
emptyHsQTvs :: LHsQTyVars Name
-- This function is here, rather than in HsTypes, because it *renamed*, but
-- does not necessarily have all the rigt kind variables. It is used
-- in Haddock just for printing, so it doesn't matter
emptyHsQTvs = HsQTvs { hsq_implicit = error "haddock:emptyHsQTvs"
, hsq_explicit = [] }
--------------------------------------------------------------------------------
-- * Filename mangling functions stolen from s main/DriverUtil.lhs.
--------------------------------------------------------------------------------
baseName :: ModuleName -> FilePath
baseName = map (\c -> if c == '.' then '-' else c) . moduleNameString
moduleHtmlFile :: Module -> FilePath
moduleHtmlFile mdl =
case Map.lookup mdl html_xrefs of
Nothing -> baseName mdl' ++ ".html"
Just fp0 -> HtmlPath.joinPath [fp0, baseName mdl' ++ ".html"]
where
mdl' = moduleName mdl
moduleHtmlFile' :: ModuleName -> FilePath
moduleHtmlFile' mdl =
case Map.lookup mdl html_xrefs' of
Nothing -> baseName mdl ++ ".html"
Just fp0 -> HtmlPath.joinPath [fp0, baseName mdl ++ ".html"]
contentsHtmlFile, indexHtmlFile :: String
contentsHtmlFile = "index.html"
indexHtmlFile = "doc-index.html"
-- | The name of the module index file to be displayed inside a frame.
-- Modules are display in full, but without indentation. Clicking opens in
-- the main window.
frameIndexHtmlFile :: String
frameIndexHtmlFile = "index-frames.html"
moduleIndexFrameName, mainFrameName, synopsisFrameName :: String
moduleIndexFrameName = "modules"
mainFrameName = "main"
synopsisFrameName = "synopsis"
subIndexHtmlFile :: String -> String
subIndexHtmlFile ls = "doc-index-" ++ b ++ ".html"
where b | all isAlpha ls = ls
| otherwise = concatMap (show . ord) ls
-------------------------------------------------------------------------------
-- * Anchor and URL utilities
--
-- NB: Anchor IDs, used as the destination of a link within a document must
-- conform to XML's NAME production. That, taken with XHTML and HTML 4.01's
-- various needs and compatibility constraints, means these IDs have to match:
-- [A-Za-z][A-Za-z0-9:_.-]*
-- Such IDs do not need to be escaped in any way when used as the fragment part
-- of a URL. Indeed, %-escaping them can lead to compatibility issues as it
-- isn't clear if such fragment identifiers should, or should not be unescaped
-- before being matched with IDs in the target document.
-------------------------------------------------------------------------------
moduleUrl :: Module -> String
moduleUrl = moduleHtmlFile
moduleNameUrl :: Module -> OccName -> String
moduleNameUrl mdl n = moduleUrl mdl ++ '#' : nameAnchorId n
moduleNameUrl' :: ModuleName -> OccName -> String
moduleNameUrl' mdl n = moduleHtmlFile' mdl ++ '#' : nameAnchorId n
nameAnchorId :: OccName -> String
nameAnchorId name = makeAnchorId (prefix : ':' : occNameString name)
where prefix | isValOcc name = 'v'
| otherwise = 't'
-- | Takes an arbitrary string and makes it a valid anchor ID. The mapping is
-- identity preserving.
makeAnchorId :: String -> String
makeAnchorId [] = []
makeAnchorId (f:r) = escape isAlpha f ++ concatMap (escape isLegal) r
where
escape p c | p c = [c]
| otherwise = '-' : show (ord c) ++ "-"
isLegal ':' = True
isLegal '_' = True
isLegal '.' = True
isLegal c = isAscii c && isAlphaNum c
-- NB: '-' is legal in IDs, but we use it as the escape char
-------------------------------------------------------------------------------
-- * Files we need to copy from our $libdir
-------------------------------------------------------------------------------
jsFile, framesFile :: String
jsFile = "haddock-util.js"
framesFile = "frames.html"
-------------------------------------------------------------------------------
-- * Misc.
-------------------------------------------------------------------------------
getProgramName :: IO String
getProgramName = liftM (`withoutSuffix` ".bin") getProgName
where str `withoutSuffix` suff
| suff `isSuffixOf` str = take (length str - length suff) str
| otherwise = str
bye :: String -> IO a
bye s = putStr s >> exitSuccess
dieMsg :: String -> IO ()
dieMsg s = getProgramName >>= \prog -> die (prog ++ ": " ++ s)
noDieMsg :: String -> IO ()
noDieMsg s = getProgramName >>= \prog -> hPutStr stderr (prog ++ ": " ++ s)
mapSnd :: (b -> c) -> [(a,b)] -> [(a,c)]
mapSnd _ [] = []
mapSnd f ((x,y):xs) = (x,f y) : mapSnd f xs
mapMaybeM :: Monad m => (a -> m b) -> Maybe a -> m (Maybe b)
mapMaybeM _ Nothing = return Nothing
mapMaybeM f (Just a) = liftM Just (f a)
escapeStr :: String -> String
escapeStr = escapeURIString isUnreserved
-- Following few functions are copy'n'pasted from Network.URI module
-- to avoid depending on the network lib, since doing so gives a
-- circular build dependency between haddock and network
-- (at least if you want to build network with haddock docs)
escapeURIChar :: (Char -> Bool) -> Char -> String
escapeURIChar p c
| p c = [c]
| otherwise = '%' : myShowHex (ord c) ""
where
myShowHex :: Int -> ShowS
myShowHex n r = case showIntAtBase 16 toChrHex n r of
[] -> "00"
[a] -> ['0',a]
cs -> cs
toChrHex d
| d < 10 = chr (ord '0' + fromIntegral d)
| otherwise = chr (ord 'A' + fromIntegral (d - 10))
escapeURIString :: (Char -> Bool) -> String -> String
escapeURIString = concatMap . escapeURIChar
isUnreserved :: Char -> Bool
isUnreserved c = isAlphaNumChar c || (c `elem` "-_.~")
isAlphaChar, isDigitChar, isAlphaNumChar :: Char -> Bool
isAlphaChar c = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z')
isDigitChar c = c >= '0' && c <= '9'
isAlphaNumChar c = isAlphaChar c || isDigitChar c
-----------------------------------------------------------------------------
-- * HTML cross references
--
-- For each module, we need to know where its HTML documentation lives
-- so that we can point hyperlinks to it. It is extremely
-- inconvenient to plumb this information to all the places that need
-- it (basically every function in HaddockHtml), and furthermore the
-- mapping is constant for any single run of Haddock. So for the time
-- being I'm going to use a write-once global variable.
-----------------------------------------------------------------------------
{-# NOINLINE html_xrefs_ref #-}
html_xrefs_ref :: IORef (Map Module FilePath)
html_xrefs_ref = unsafePerformIO (newIORef (error "module_map"))
{-# NOINLINE html_xrefs_ref' #-}
html_xrefs_ref' :: IORef (Map ModuleName FilePath)
html_xrefs_ref' = unsafePerformIO (newIORef (error "module_map"))
{-# NOINLINE html_xrefs #-}
html_xrefs :: Map Module FilePath
html_xrefs = unsafePerformIO (readIORef html_xrefs_ref)
{-# NOINLINE html_xrefs' #-}
html_xrefs' :: Map ModuleName FilePath
html_xrefs' = unsafePerformIO (readIORef html_xrefs_ref')
-----------------------------------------------------------------------------
-- * List utils
-----------------------------------------------------------------------------
replace :: Eq a => a -> a -> [a] -> [a]
replace a b = map (\x -> if x == a then b else x)
spanWith :: (a -> Maybe b) -> [a] -> ([b],[a])
spanWith _ [] = ([],[])
spanWith p xs@(a:as)
| Just b <- p a = let (bs,cs) = spanWith p as in (b:bs,cs)
| otherwise = ([],xs)
-----------------------------------------------------------------------------
-- * Put here temporarily
-----------------------------------------------------------------------------
markup :: DocMarkup id a -> Doc id -> a
markup m DocEmpty = markupEmpty m
markup m (DocAppend d1 d2) = markupAppend m (markup m d1) (markup m d2)
markup m (DocString s) = markupString m s
markup m (DocParagraph d) = markupParagraph m (markup m d)
markup m (DocIdentifier x) = markupIdentifier m x
markup m (DocIdentifierUnchecked x) = markupIdentifierUnchecked m x
markup m (DocModule mod0) = markupModule m mod0
markup m (DocWarning d) = markupWarning m (markup m d)
markup m (DocEmphasis d) = markupEmphasis m (markup m d)
markup m (DocBold d) = markupBold m (markup m d)
markup m (DocMonospaced d) = markupMonospaced m (markup m d)
markup m (DocUnorderedList ds) = markupUnorderedList m (map (markup m) ds)
markup m (DocOrderedList ds) = markupOrderedList m (map (markup m) ds)
markup m (DocDefList ds) = markupDefList m (map (markupPair m) ds)
markup m (DocCodeBlock d) = markupCodeBlock m (markup m d)
markup m (DocHyperlink l) = markupHyperlink m l
markup m (DocAName ref) = markupAName m ref
markup m (DocPic img) = markupPic m img
markup m (DocMathInline mathjax) = markupMathInline m mathjax
markup m (DocMathDisplay mathjax) = markupMathDisplay m mathjax
markup m (DocProperty p) = markupProperty m p
markup m (DocExamples e) = markupExample m e
markup m (DocHeader (Header l t)) = markupHeader m (Header l (markup m t))
markupPair :: DocMarkup id a -> (Doc id, Doc id) -> (a, a)
markupPair m (a,b) = (markup m a, markup m b)
-- | The identity markup
idMarkup :: DocMarkup a (Doc a)
idMarkup = Markup {
markupEmpty = DocEmpty,
markupString = DocString,
markupParagraph = DocParagraph,
markupAppend = DocAppend,
markupIdentifier = DocIdentifier,
markupIdentifierUnchecked = DocIdentifierUnchecked,
markupModule = DocModule,
markupWarning = DocWarning,
markupEmphasis = DocEmphasis,
markupBold = DocBold,
markupMonospaced = DocMonospaced,
markupUnorderedList = DocUnorderedList,
markupOrderedList = DocOrderedList,
markupDefList = DocDefList,
markupCodeBlock = DocCodeBlock,
markupHyperlink = DocHyperlink,
markupAName = DocAName,
markupPic = DocPic,
markupMathInline = DocMathInline,
markupMathDisplay = DocMathDisplay,
markupProperty = DocProperty,
markupExample = DocExamples,
markupHeader = DocHeader
}
-----------------------------------------------------------------------------
-- * System tools
-----------------------------------------------------------------------------
#ifdef mingw32_HOST_OS
foreign import ccall unsafe "_getpid" getProcessID :: IO Int -- relies on Int == Int32 on Windows
#else
getProcessID :: IO Int
getProcessID = fmap fromIntegral System.Posix.Internals.c_getpid
#endif
| randen/haddock | haddock-api/src/Haddock/Utils.hs | bsd-2-clause | 18,236 | 0 | 22 | 4,046 | 4,567 | 2,440 | 2,127 | 290 | 5 |
-- |
-- Module : Text.Megaparsec.Char
-- Copyright : © 2015 Megaparsec contributors
-- © 2007 Paolo Martini
-- © 1999–2001 Daan Leijen
-- License : BSD3
--
-- Maintainer : Mark Karpov <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- Commonly used character parsers.
module Text.Megaparsec.Char
( -- * Simple parsers
newline
, crlf
, eol
, tab
, space
-- * Categories of characters
, controlChar
, spaceChar
, upperChar
, lowerChar
, letterChar
, alphaNumChar
, printChar
, digitChar
, octDigitChar
, hexDigitChar
, markChar
, numberChar
, punctuationChar
, symbolChar
, separatorChar
, asciiChar
, latin1Char
, charCategory
, categoryName
-- * More general parsers
, char
, char'
, anyChar
, oneOf
, oneOf'
, noneOf
, noneOf'
, satisfy
-- * Sequence of characters
, string
, string' )
where
import Control.Applicative ((<|>))
import Data.Char
import Data.List (nub)
import Data.Maybe (fromJust)
import Text.Megaparsec.Combinator
import Text.Megaparsec.Error (Message (..))
import Text.Megaparsec.Pos
import Text.Megaparsec.Prim
import Text.Megaparsec.ShowToken
-- | Parses a newline character.
newline :: Stream s m Char => ParsecT s u m Char
newline = char '\n' <?> "newline"
-- | Parses a carriage return character followed by a newline
-- character. Returns sequence of characters parsed.
crlf :: Stream s m Char => ParsecT s u m String
crlf = string "\r\n"
-- | Parses a CRLF (see 'crlf') or LF (see 'newline') end of line.
-- Returns the sequence of characters parsed.
--
-- > eol = (pure <$> newline) <|> crlf
eol :: Stream s m Char => ParsecT s u m String
eol = (pure <$> newline) <|> crlf <?> "end of line"
-- | Parses a tab character.
tab :: Stream s m Char => ParsecT s u m Char
tab = char '\t' <?> "tab"
-- | Skips /zero/ or more white space characters. See also 'skipMany' and
-- 'spaceChar'.
space :: Stream s m Char => ParsecT s u m ()
space = skipMany spaceChar
-- | Parses control characters, which are the non-printing characters of the
-- Latin-1 subset of Unicode.
controlChar :: Stream s m Char => ParsecT s u m Char
controlChar = satisfy isControl <?> "control character"
-- | Parses a Unicode space character, and the control characters: tab,
-- newline, carriage return, form feed, and vertical tab.
spaceChar :: Stream s m Char => ParsecT s u m Char
spaceChar = satisfy isSpace <?> "white space"
-- | Parses an upper-case or title-case alphabetic Unicode character. Title
-- case is used by a small number of letter ligatures like the
-- single-character form of Lj.
upperChar :: Stream s m Char => ParsecT s u m Char
upperChar = satisfy isUpper <?> "uppercase letter"
-- | Parses a lower-case alphabetic Unicode character.
lowerChar :: Stream s m Char => ParsecT s u m Char
lowerChar = satisfy isLower <?> "lowercase letter"
-- | Parses alphabetic Unicode characters: lower-case, upper-case and
-- title-case letters, plus letters of case-less scripts and modifiers
-- letters.
letterChar :: Stream s m Char => ParsecT s u m Char
letterChar = satisfy isLetter <?> "letter"
-- | Parses alphabetic or numeric digit Unicode characters.
--
-- Note that numeric digits outside the ASCII range are parsed by this
-- parser but not by 'digitChar'. Such digits may be part of identifiers but
-- are not used by the printer and reader to represent numbers.
alphaNumChar :: Stream s m Char => ParsecT s u m Char
alphaNumChar = satisfy isAlphaNum <?> "alphanumeric character"
-- | Parses printable Unicode characters: letters, numbers, marks,
-- punctuation, symbols and spaces.
printChar :: Stream s m Char => ParsecT s u m Char
printChar = satisfy isPrint <?> "printable character"
-- | Parses an ASCII digit, i.e between “0” and “9”.
digitChar :: Stream s m Char => ParsecT s u m Char
digitChar = satisfy isDigit <?> "digit"
-- | Parses an octal digit, i.e. between “0” and “7”.
octDigitChar :: Stream s m Char => ParsecT s u m Char
octDigitChar = satisfy isOctDigit <?> "octal digit"
-- | Parses a hexadecimal digit, i.e. between “0” and “9”, or “a” and “f”,
-- or “A” and “F”.
hexDigitChar :: Stream s m Char => ParsecT s u m Char
hexDigitChar = satisfy isHexDigit <?> "hexadecimal digit"
-- | Parses Unicode mark characters, for example accents and the like, which
-- combine with preceding characters.
markChar :: Stream s m Char => ParsecT s u m Char
markChar = satisfy isMark <?> "mark character"
-- | Parses Unicode numeric characters, including digits from various
-- scripts, Roman numerals, et cetera.
numberChar :: Stream s m Char => ParsecT s u m Char
numberChar = satisfy isNumber <?> "numeric character"
-- | Parses Unicode punctuation characters, including various kinds of
-- connectors, brackets and quotes.
punctuationChar :: Stream s m Char => ParsecT s u m Char
punctuationChar = satisfy isPunctuation <?> "punctuation"
-- | Parses Unicode symbol characters, including mathematical and currency
-- symbols.
symbolChar :: Stream s m Char => ParsecT s u m Char
symbolChar = satisfy isSymbol <?> "symbol"
-- | Parses Unicode space and separator characters.
separatorChar :: Stream s m Char => ParsecT s u m Char
separatorChar = satisfy isSeparator <?> "separator"
-- | Parses a character from the first 128 characters of the Unicode character set,
-- corresponding to the ASCII character set.
asciiChar :: Stream s m Char => ParsecT s u m Char
asciiChar = satisfy isAscii <?> "ASCII character"
-- | Parses a character from the first 256 characters of the Unicode
-- character set, corresponding to the ISO 8859-1 (Latin-1) character set.
latin1Char :: Stream s m Char => ParsecT s u m Char
latin1Char = satisfy isLatin1 <?> "Latin-1 character"
-- | @charCategory cat@ Parses character in Unicode General Category @cat@,
-- see 'Data.Char.GeneralCategory'.
charCategory :: Stream s m Char => GeneralCategory -> ParsecT s u m Char
charCategory cat = satisfy ((== cat) . generalCategory) <?> categoryName cat
-- | Returns human-readable name of Unicode General Category.
categoryName :: GeneralCategory -> String
categoryName cat =
fromJust $ lookup cat
[ (UppercaseLetter , "uppercase letter")
, (LowercaseLetter , "lowercase letter")
, (TitlecaseLetter , "titlecase letter")
, (ModifierLetter , "modifier letter")
, (OtherLetter , "other letter")
, (NonSpacingMark , "non-spacing mark")
, (SpacingCombiningMark, "spacing combining mark")
, (EnclosingMark , "enclosing mark")
, (DecimalNumber , "decimal number character")
, (LetterNumber , "letter number character")
, (OtherNumber , "other number character")
, (ConnectorPunctuation, "connector punctuation")
, (DashPunctuation , "dash punctuation")
, (OpenPunctuation , "open punctuation")
, (ClosePunctuation , "close punctuation")
, (InitialQuote , "initial quote")
, (FinalQuote , "final quote")
, (OtherPunctuation , "other punctuation")
, (MathSymbol , "math symbol")
, (CurrencySymbol , "currency symbol")
, (ModifierSymbol , "modifier symbol")
, (OtherSymbol , "other symbol")
, (Space , "white space")
, (LineSeparator , "line separator")
, (ParagraphSeparator , "paragraph separator")
, (Control , "control character")
, (Format , "format character")
, (Surrogate , "surrogate character")
, (PrivateUse , "private-use Unicode character")
, (NotAssigned , "non-assigned Unicode character") ]
-- | @char c@ parses a single character @c@.
--
-- > semicolon = char ';'
char :: Stream s m Char => Char -> ParsecT s u m Char
char c = satisfy (== c) <?> showToken c
-- | The same as 'char' but case-insensitive. This parser returns actually
-- parsed character preserving its case.
--
-- >>> parseTest (char' 'e') "E"
-- 'E'
-- >>> parseTest (char' 'e') "G"
-- parse error at line 1, column 1:
-- unexpected 'G'
-- expecting 'E' or 'e'
char' :: Stream s m Char => Char -> ParsecT s u m Char
char' = choice . fmap char . extendi . pure
-- | Extends given list of characters adding uppercase version of every
-- lowercase characters and vice versa. Resulting list is guaranteed to have
-- no duplicates.
extendi :: String -> String
extendi cs = nub (cs >>= f)
where f c | isLower c = [c, toUpper c]
| isUpper c = [c, toLower c]
| otherwise = [c]
-- | This parser succeeds for any character. Returns the parsed character.
anyChar :: Stream s m Char => ParsecT s u m Char
anyChar = satisfy (const True) <?> "character"
-- | @oneOf cs@ succeeds if the current character is in the supplied
-- list of characters @cs@. Returns the parsed character. Note that this
-- parser doesn't automatically generate “expected” component of error
-- message, so usually you should label it manually with 'label' or
-- ('<?>').
--
-- See also 'satisfy'.
--
-- > digit = oneOf ['0'..'9'] <?> "digit"
oneOf :: Stream s m Char => String -> ParsecT s u m Char
oneOf cs = satisfy (`elem` cs)
-- | The same as 'oneOf', but case-insensitive. Returns the parsed character
-- preserving its case.
--
-- > vowel = oneOf' "aeiou" <?> "vowel"
oneOf' :: Stream s m Char => String -> ParsecT s u m Char
oneOf' = oneOf . extendi
-- | As the dual of 'oneOf', @noneOf cs@ succeeds if the current
-- character /not/ in the supplied list of characters @cs@. Returns the
-- parsed character.
noneOf :: Stream s m Char => String -> ParsecT s u m Char
noneOf cs = satisfy (`notElem` cs)
-- | The same as 'noneOf', but case-insensitive.
--
-- > consonant = noneOf' "aeiou" <?> "consonant"
noneOf' :: Stream s m Char => String -> ParsecT s u m Char
noneOf' = noneOf . extendi
-- | The parser @satisfy f@ succeeds for any character for which the
-- supplied function @f@ returns 'True'. Returns the character that is
-- actually parsed.
--
-- > digitChar = satisfy isDigit <?> "digit"
-- > oneOf cs = satisfy (`elem` cs)
satisfy :: Stream s m Char => (Char -> Bool) -> ParsecT s u m Char
satisfy f = token nextPos testChar
where nextPos pos x _ = updatePosChar pos x
testChar x = if f x
then Right x
else Left . pure . Unexpected . showToken $ x
-- | @string s@ parses a sequence of characters given by @s@. Returns
-- the parsed string (i.e. @s@).
--
-- > divOrMod = string "div" <|> string "mod"
string :: Stream s m Char => String -> ParsecT s u m String
string = tokens updatePosString (==)
-- | The same as 'string', but case-insensitive. On success returns string
-- cased as argument of the function.
--
-- >>> parseTest (string' "foobar") "foObAr"
-- "foobar"
string' :: Stream s m Char => String -> ParsecT s u m String
string' = tokens updatePosString test
where test x y = toLower x == toLower y
| omefire/megaparsec | Text/Megaparsec/Char.hs | bsd-2-clause | 10,984 | 0 | 11 | 2,342 | 2,078 | 1,158 | 920 | -1 | -1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE BangPatterns, CPP, ForeignFunctionInterface,
ScopedTypeVariables #-}
-- |
-- Module : Criterion.Measurement
-- Copyright : (c) 2009-2014 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Benchmark measurement code.
module Criterion.Measurement
(
initializeTime
, getTime
, getCPUTime
, getCycles
, getGCStats
, secs
, measure
, runBenchmark
, measured
, applyGCStats
, threshold
) where
import Criterion.Types (Benchmarkable(..), Measured(..))
import Data.Int (Int64)
import Data.List (unfoldr)
import Data.Word (Word64)
import GHC.Stats (GCStats(..))
import System.Mem (performGC)
import Text.Printf (printf)
import qualified Control.Exception as Exc
import qualified Data.Vector as V
import qualified GHC.Stats as Stats
-- | Try to get GC statistics, bearing in mind that the GHC runtime
-- will throw an exception if statistics collection was not enabled
-- using \"@+RTS -T@\".
getGCStats :: IO (Maybe GCStats)
getGCStats =
(Just `fmap` Stats.getGCStats) `Exc.catch` \(_::Exc.SomeException) ->
return Nothing
-- | Measure the execution of a benchmark a given number of times.
measure :: Benchmarkable -- ^ Operation to benchmark.
-> Int64 -- ^ Number of iterations.
-> IO (Measured, Double)
measure (Benchmarkable run) iters = do
startStats <- getGCStats
startTime <- getTime
startCpuTime <- getCPUTime
startCycles <- getCycles
run iters
endTime <- getTime
endCpuTime <- getCPUTime
endCycles <- getCycles
endStats <- getGCStats
let !m = applyGCStats endStats startStats $ measured {
measTime = max 0 (endTime - startTime)
, measCpuTime = max 0 (endCpuTime - startCpuTime)
, measCycles = max 0 (fromIntegral (endCycles - startCycles))
, measIters = iters
}
return (m, endTime)
{-# INLINE measure #-}
-- | The amount of time a benchmark must run for in order for us to
-- have some trust in the raw measurement.
--
-- We set this threshold so that we can generate enough data to later
-- perform meaningful statistical analyses.
--
-- The threshold is 30 milliseconds. One use of 'runBenchmark' must
-- accumulate more than 300 milliseconds of total measurements above
-- this threshold before it will finish.
threshold :: Double
threshold = 0.03
{-# INLINE threshold #-}
-- | Run a single benchmark, and return measurements collected while
-- executing it, along with the amount of time the measurement process
-- took.
runBenchmark :: Benchmarkable
-> Double
-- ^ Lower bound on how long the benchmarking process
-- should take. In practice, this time limit may be
-- exceeded in order to generate enough data to perform
-- meaningful statistical analyses.
-> IO (V.Vector Measured, Double)
runBenchmark bm@(Benchmarkable run) timeLimit = do
run 1
start <- performGC >> getTime
let loop [] !_ !_ _ = error "unpossible!"
loop (iters:niters) prev count acc = do
(m, endTime) <- measure bm iters
let overThresh = max 0 (measTime m - threshold) + prev
-- We try to honour the time limit, but we also have more
-- important constraints:
--
-- We must generate enough data that bootstrapping won't
-- simply crash.
--
-- We need to generate enough measurements that have long
-- spans of execution to outweigh the (rather high) cost of
-- measurement.
if endTime - start >= timeLimit &&
overThresh > threshold * 10 &&
count >= (4 :: Int)
then do
let !v = V.reverse (V.fromList acc)
return (v, endTime - start)
else loop niters overThresh (count+1) (m:acc)
loop (squish (unfoldr series 1)) 0 0 []
-- Our series starts its growth very slowly when we begin at 1, so we
-- eliminate repeated values.
squish :: (Eq a) => [a] -> [a]
squish ys = foldr go [] ys
where go x xs = x : dropWhile (==x) xs
series :: Double -> Maybe (Int64, Double)
series k = Just (truncate l, l)
where l = k * 1.05
-- | An empty structure.
measured :: Measured
measured = Measured {
measTime = 0
, measCpuTime = 0
, measCycles = 0
, measIters = 0
, measAllocated = minBound
, measNumGcs = minBound
, measBytesCopied = minBound
, measMutatorWallSeconds = bad
, measMutatorCpuSeconds = bad
, measGcWallSeconds = bad
, measGcCpuSeconds = bad
} where bad = -1/0
-- | Apply the difference between two sets of GC statistics to a
-- measurement.
applyGCStats :: Maybe GCStats
-- ^ Statistics gathered at the __end__ of a run.
-> Maybe GCStats
-- ^ Statistics gathered at the __beginning__ of a run.
-> Measured
-- ^ Value to \"modify\".
-> Measured
applyGCStats (Just end) (Just start) m = m {
measAllocated = diff bytesAllocated
, measNumGcs = diff numGcs
, measBytesCopied = diff bytesCopied
, measMutatorWallSeconds = diff mutatorWallSeconds
, measMutatorCpuSeconds = diff mutatorCpuSeconds
, measGcWallSeconds = diff gcWallSeconds
, measGcCpuSeconds = diff gcCpuSeconds
} where diff f = f end - f start
applyGCStats _ _ m = m
-- | Convert a number of seconds to a string. The string will consist
-- of four decimal places, followed by a short description of the time
-- units.
secs :: Double -> String
secs k
| k < 0 = '-' : secs (-k)
| k >= 1 = k `with` "s"
| k >= 1e-3 = (k*1e3) `with` "ms"
#ifdef mingw32_HOST_OS
| k >= 1e-6 = (k*1e6) `with` "us"
#else
| k >= 1e-6 = (k*1e6) `with` "μs"
#endif
| k >= 1e-9 = (k*1e9) `with` "ns"
| k >= 1e-12 = (k*1e12) `with` "ps"
| k >= 1e-15 = (k*1e15) `with` "fs"
| k >= 1e-18 = (k*1e18) `with` "as"
| otherwise = printf "%g s" k
where with (t :: Double) (u :: String)
| t >= 1e9 = printf "%.4g %s" t u
| t >= 1e3 = printf "%.0f %s" t u
| t >= 1e2 = printf "%.1f %s" t u
| t >= 1e1 = printf "%.2f %s" t u
| otherwise = printf "%.3f %s" t u
-- | Set up time measurement.
foreign import ccall unsafe "criterion_inittime" initializeTime :: IO ()
-- | Read the CPU cycle counter.
foreign import ccall unsafe "criterion_rdtsc" getCycles :: IO Word64
-- | Return the current wallclock time, in seconds since some
-- arbitrary time.
--
-- You /must/ call 'initializeTime' once before calling this function!
foreign import ccall unsafe "criterion_gettime" getTime :: IO Double
-- | Return the amount of elapsed CPU time, combining user and kernel
-- (system) time into a single measure.
foreign import ccall unsafe "criterion_getcputime" getCPUTime :: IO Double
| iu-parfunc/criterion | Criterion/Measurement.hs | bsd-2-clause | 7,086 | 0 | 21 | 1,963 | 1,540 | 845 | 695 | 126 | 3 |
-- Vectorise a modules type and class declarations.
--
-- This produces new type constructors and family instances top be included in the module toplevel
-- as well as bindings for worker functions, dfuns, and the like.
module Vectorise.Type.Env (
vectTypeEnv,
) where
#include "HsVersions.h"
import Vectorise.Env
import Vectorise.Vect
import Vectorise.Monad
import Vectorise.Builtins
import Vectorise.Type.TyConDecl
import Vectorise.Type.Classify
import Vectorise.Generic.PADict
import Vectorise.Generic.PAMethods
import Vectorise.Generic.PData
import Vectorise.Generic.Description
import Vectorise.Utils
import CoreSyn
import CoreUtils
import CoreUnfold
import DataCon
import TyCon
import Type
import FamInstEnv
import Id
import MkId
import NameEnv
import NameSet
import OccName
import Util
import Outputable
import FastString
import MonadUtils
import Control.Monad
import Data.Maybe
import Data.List
-- Note [Pragmas to vectorise tycons]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- VECTORISE pragmas for type constructors cover three different flavours of vectorising data type
-- constructors:
--
-- (1) Data type constructor 'T' that may be used in vectorised code, where 'T' represents itself,
-- but the representation of 'T' is opaque in vectorised code.
--
-- An example is the treatment of 'Int'. 'Int's can be used in vectorised code and remain
-- unchanged by vectorisation. However, the representation of 'Int' by the 'I#' data
-- constructor wrapping an 'Int#' is not exposed in vectorised code. Instead, computations
-- involving the representation need to be confined to scalar code.
--
-- 'PData' and 'PRepr' instances need to be explicitly supplied for 'T' (they are not generated
-- by the vectoriser).
--
-- Type constructors declared with {-# VECTORISE SCALAR type T #-} are treated in this manner.
-- (The vectoriser never treats a type constructor automatically in this manner.)
--
-- (2) Data type constructor 'T' that may be used in vectorised code, where 'T' is represented by an
-- explicitly given 'Tv', but the representation of 'T' is opaque in vectorised code.
--
-- An example is the treatment of '[::]'. '[::]'s can be used in vectorised code and is
-- vectorised to 'PArray'. However, the representation of '[::]' is not exposed in vectorised
-- code. Instead, computations involving the representation need to be confined to scalar code.
--
-- 'PData' and 'PRepr' instances need to be explicitly supplied for 'T' (they are not generated
-- by the vectoriser).
--
-- Type constructors declared with {-# VECTORISE SCALAR type T = T' #-} are treated in this
-- manner. (The vectoriser never treats a type constructor automatically in this manner.)
--
-- (3) Data type constructor 'T' that together with its constructors 'Cn' may be used in vectorised
-- code, where 'T' and the 'Cn' are automatically vectorised in the same manner as data types
-- declared in a vectorised module. This includes the case where the vectoriser determines that
-- the original representation of 'T' may be used in vectorised code (as it does not embed any
-- parallel arrays.) This case is for type constructors that are *imported* from a non-
-- vectorised module, but that we want to use with full vectorisation support.
--
-- An example is the treatment of 'Ordering' and '[]'. The former remains unchanged by
-- vectorisation, whereas the latter is fully vectorised.
-- 'PData' and 'PRepr' instances are automatically generated by the vectoriser.
--
-- Type constructors declared with {-# VECTORISE type T #-} are treated in this manner.
--
-- (4) Data type constructor 'T' that together with its constructors 'Cn' may be used in vectorised
-- code, where 'T' is represented by an explicitly given 'Tv' whose constructors 'Cvn' represent
-- the original constructors in vectorised code. As a special case, we can have 'Tv = T'
--
-- An example is the treatment of 'Bool', which is represented by itself in vectorised code
-- (as it cannot embed any parallel arrays). However, we do not want any automatic generation
-- of class and family instances, which is why Case (3) does not apply.
--
-- 'PData' and 'PRepr' instances need to be explicitly supplied for 'T' (they are not generated
-- by the vectoriser).
--
-- Type constructors declared with {-# VECTORISE type T = T' #-} are treated in this manner.
--
-- In addition, we have also got a single pragma form for type classes: {-# VECTORISE class C #-}.
-- It implies that the class type constructor may be used in vectorised code together with its data
-- constructor. We generally produce a vectorised version of the data type and data constructor.
-- We do not generate 'PData' and 'PRepr' instances for class type constructors. This pragma is the
-- default for all type classes declared in this module, but the pragma can also be used explitly on
-- imported classes.
-- Note [Vectorising classes]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- We vectorise classes essentially by just vectorising their desugared Core representation, but we
-- do generate a 'Class' structure along the way (see 'Vectorise.Type.TyConDecl.vectTyConDecl').
--
-- Here is an example illustrating the mapping — assume
--
-- class Num a where
-- (+) :: a -> a -> a
--
-- It desugars to
--
-- data Num a = D:Num { (+) :: a -> a -> a }
--
-- which we vectorise to
--
-- data V:Num a = D:V:Num { ($v+) :: PArray a :-> PArray a :-> PArray a }
--
-- while adding the following entries to the vectorisation map:
--
-- tycon : Num --> V:Num
-- datacon: D:Num --> D:V:Num
-- var : (+) --> ($v+)
-- |Vectorise type constructor including class type constructors.
--
vectTypeEnv :: [TyCon] -- Type constructors defined in this module
-> [CoreVect] -- All 'VECTORISE [SCALAR] type' declarations in this module
-> [CoreVect] -- All 'VECTORISE class' declarations in this module
-> VM ( [TyCon] -- old TyCons ++ new TyCons
, [FamInst] -- New type family instances.
, [(Var, CoreExpr)]) -- New top level bindings.
vectTypeEnv tycons vectTypeDecls vectClassDecls
= do { traceVt "** vectTypeEnv" $ ppr tycons
; let -- {-# VECTORISE SCALAR type T -#} (imported and local tycons)
localAbstractTyCons = [tycon | VectType True tycon Nothing <- vectTypeDecls]
-- {-# VECTORISE type T -#} (ONLY the imported tycons)
impVectTyCons = ( [tycon | VectType False tycon Nothing <- vectTypeDecls]
++ [tycon | VectClass tycon <- vectClassDecls])
\\ tycons
-- {-# VECTORISE [SCALAR] type T = T' -#} (imported and local tycons)
vectTyConsWithRHS = [ (tycon, rhs, isAbstract)
| VectType isAbstract tycon (Just rhs) <- vectTypeDecls]
-- filter VECTORISE SCALAR tycons and VECTORISE tycons with explicit rhses
vectSpecialTyConNames = mkNameSet . map tyConName $
localAbstractTyCons ++ map fst3 vectTyConsWithRHS
notVectSpecialTyCon tc = not $ (tyConName tc) `elemNameSet` vectSpecialTyConNames
-- Build a map containing all vectorised type constructor. If they are scalar, they are
-- mapped to 'False' (vectorised type constructor == original type constructor).
; allScalarTyConNames <- globalScalarTyCons -- covers both current and imported modules
; vectTyCons <- globalVectTyCons
; let vectTyConBase = mapNameEnv (const True) vectTyCons -- by default fully vectorised
vectTyConFlavour = vectTyConBase
`plusNameEnv`
mkNameEnv [ (tyConName tycon, True)
| (tycon, _, _) <- vectTyConsWithRHS]
`plusNameEnv`
mkNameEnv [ (tcName, False) -- original representation
| tcName <- nameSetToList allScalarTyConNames]
`plusNameEnv`
mkNameEnv [ (tyConName tycon, False) -- original representation
| tycon <- localAbstractTyCons]
-- Split the list of 'TyCons' into the ones (1) that we must vectorise and those (2)
-- that we could, but don't need to vectorise. Type constructors that are not data
-- type constructors or use non-Haskell98 features are being dropped. They may not
-- appear in vectorised code. (We also drop the local type constructors appearing in a
-- VECTORISE SCALAR pragma or a VECTORISE pragma with an explicit right-hand side, as
-- these are being handled separately. NB: Some type constructors may be marked SCALAR
-- /and/ have an explicit right-hand side.)
--
-- Furthermore, 'drop_tcs' are those type constructors that we cannot vectorise.
; let maybeVectoriseTyCons = filter notVectSpecialTyCon tycons ++ impVectTyCons
(conv_tcs, keep_tcs, drop_tcs) = classifyTyCons vectTyConFlavour maybeVectoriseTyCons
; traceVt " VECT SCALAR : " $ ppr localAbstractTyCons
; traceVt " VECT [class] : " $ ppr impVectTyCons
; traceVt " VECT with rhs : " $ ppr (map fst3 vectTyConsWithRHS)
; traceVt " -- after classification (local and VECT [class] tycons) --" empty
; traceVt " reuse : " $ ppr keep_tcs
; traceVt " convert : " $ ppr conv_tcs
-- warn the user about unvectorised type constructors
; let explanation = ptext (sLit "(They use unsupported language extensions") $$
ptext (sLit "or depend on type constructors that are not vectorised)")
drop_tcs_nosyn = filter (not . isSynTyCon) drop_tcs
; unless (null drop_tcs_nosyn) $
emitVt "Warning: cannot vectorise these type constructors:" $
pprQuotedList drop_tcs_nosyn $$ explanation
; mapM_ addGlobalScalarTyCon keep_tcs
; let mapping =
-- Type constructors that we don't need to vectorise, use the same
-- representation in both unvectorised and vectorised code; they are not
-- abstract.
[(tycon, tycon, False) | tycon <- keep_tcs]
-- We do the same for type constructors declared VECTORISE SCALAR /without/
-- an explicit right-hand side, but ignore their representation (data
-- constructors) as they are abstract.
++ [(tycon, tycon, True) | tycon <- localAbstractTyCons]
-- Type constructors declared VECTORISE /with/ an explicit vectorised type,
-- we map from the original to the given type; whether they are abstract depends
-- on whether the vectorisation declaration was SCALAR.
++ vectTyConsWithRHS
; syn_tcs <- catMaybes <$> mapM defTyConDataCons mapping
-- Vectorise all the data type declarations that we can and must vectorise (enter the
-- type and data constructors into the vectorisation map on-the-fly.)
; new_tcs <- vectTyConDecls conv_tcs
; let dumpTc tc vTc = traceVt "---" (ppr tc <+> text "::" <+> ppr (dataConSig tc) $$
ppr vTc <+> text "::" <+> ppr (dataConSig vTc))
dataConSig tc | Just dc <- tyConSingleDataCon_maybe tc = dataConRepType dc
| otherwise = panic "dataConSig"
; zipWithM_ dumpTc (filter isClassTyCon conv_tcs) (filter isClassTyCon new_tcs)
-- We don't need new representation types for dictionary constructors. The constructors
-- are always fully applied, and we don't need to lift them to arrays as a dictionary
-- of a particular type always has the same value.
; let orig_tcs = filter (not . isClassTyCon) $ keep_tcs ++ conv_tcs
vect_tcs = filter (not . isClassTyCon) $ keep_tcs ++ new_tcs
-- Build 'PRepr' and 'PData' instance type constructors and family instances for all
-- type constructors with vectorised representations.
; reprs <- mapM tyConRepr vect_tcs
; repr_tcs <- zipWith3M buildPReprTyCon orig_tcs vect_tcs reprs
; pdata_tcs <- zipWith3M buildPDataTyCon orig_tcs vect_tcs reprs
; pdatas_tcs <- zipWith3M buildPDatasTyCon orig_tcs vect_tcs reprs
; let inst_tcs = repr_tcs ++ pdata_tcs ++ pdatas_tcs
fam_insts = map mkLocalFamInst inst_tcs
; updGEnv $ extendFamEnv fam_insts
-- Generate workers for the vectorised data constructors, dfuns for the 'PA' instances of
-- the vectorised type constructors, and associate the type constructors with their dfuns
-- in the global environment. We get back the dfun bindings (which we will subsequently
-- inject into the modules toplevel).
; (_, binds) <- fixV $ \ ~(dfuns, _) ->
do { defTyConPAs (zipLazy vect_tcs dfuns)
-- Query the 'PData' instance type constructors for type constructors that have a
-- VECTORISE pragma with an explicit right-hand side (this is Item (4) of
-- "Note [Pragmas to vectorise tycons]" above).
; let (withRHS_non_abstract, vwithRHS_non_abstract)
= unzip [(tycon, vtycon) | (tycon, vtycon, False) <- vectTyConsWithRHS]
; pdata_withRHS_tcs <- mapM pdataReprTyConExact withRHS_non_abstract
-- Build workers for all vectorised data constructors (except abstract ones)
; sequence_ $
zipWith3 vectDataConWorkers (orig_tcs ++ withRHS_non_abstract)
(vect_tcs ++ vwithRHS_non_abstract)
(pdata_tcs ++ pdata_withRHS_tcs)
-- Build a 'PA' dictionary for all type constructors (except abstract ones & those
-- defined with an explicit right-hand side where the dictionary is user-supplied)
; dfuns <- sequence $
zipWith4 buildTyConPADict
vect_tcs
repr_tcs
pdata_tcs
pdatas_tcs
; binds <- takeHoisted
; return (dfuns, binds)
}
-- Return the vectorised variants of type constructors as well as the generated instance
-- type constructors, family instances, and dfun bindings.
; return (new_tcs ++ inst_tcs ++ syn_tcs, fam_insts, binds)
}
where
fst3 (a, _, _) = a
-- Add a mapping from the original to vectorised type constructor to the vectorisation map.
-- Unless the type constructor is abstract, also mappings from the orignal's data constructors
-- to the vectorised type's data constructors.
--
-- We have three cases: (1) original and vectorised type constructor are the same, (2) the
-- name of the vectorised type constructor is canonical (as prescribed by 'mkVectTyConOcc'), or
-- (3) the name is not canonical. In the third case, we additionally introduce a type synonym
-- with the canonical name that is set equal to the non-canonical name (so that we find the
-- right type constructor when reading vectorisation information from interface files).
--
defTyConDataCons (origTyCon, vectTyCon, isAbstract)
= do { canonName <- mkLocalisedName mkVectTyConOcc origName
; if origName == vectName -- Case (1)
|| vectName == canonName -- Case (2)
then do
{ defTyCon origTyCon vectTyCon -- T --> vT
; defDataCons -- Ci --> vCi
; return Nothing
}
else do -- Case (3)
{ let synTyCon = mkSyn canonName (mkTyConTy vectTyCon) -- type S = vT
; defTyCon origTyCon synTyCon -- T --> S
; defDataCons -- Ci --> vCi
; return $ Just synTyCon
}
}
where
origName = tyConName origTyCon
vectName = tyConName vectTyCon
mkSyn canonName ty = mkSynTyCon canonName (typeKind ty) [] (SynonymTyCon ty) NoParentTyCon
defDataCons
| isAbstract = return ()
| otherwise
= do { MASSERT(length (tyConDataCons origTyCon) == length (tyConDataCons vectTyCon))
; zipWithM_ defDataCon (tyConDataCons origTyCon) (tyConDataCons vectTyCon)
}
-- Helpers --------------------------------------------------------------------
buildTyConPADict :: TyCon -> TyCon -> TyCon -> TyCon -> VM Var
buildTyConPADict vect_tc prepr_tc pdata_tc pdatas_tc
= tyConRepr vect_tc >>= buildPADict vect_tc prepr_tc pdata_tc pdatas_tc
-- Produce a custom-made worker for the data constructors of a vectorised data type. This includes
-- all data constructors that may be used in vetcorised code — i.e., all data constructors of data
-- types other than scalar ones. Also adds a mapping from the original to vectorised worker into
-- the vectorisation map.
--
-- FIXME: It's not nice that we need create a special worker after the data constructors has
-- already been constructed. Also, I don't think the worker is properly added to the data
-- constructor. Seems messy.
vectDataConWorkers :: TyCon -> TyCon -> TyCon -> VM ()
vectDataConWorkers orig_tc vect_tc arr_tc
= do { traceVt "Building vectorised worker for datatype" (ppr orig_tc)
; bs <- sequence
. zipWith3 def_worker (tyConDataCons orig_tc) rep_tys
$ zipWith4 mk_data_con (tyConDataCons vect_tc)
rep_tys
(inits rep_tys)
(tail $ tails rep_tys)
; mapM_ (uncurry hoistBinding) bs
}
where
tyvars = tyConTyVars vect_tc
var_tys = mkTyVarTys tyvars
ty_args = map Type var_tys
res_ty = mkTyConApp vect_tc var_tys
cons = tyConDataCons vect_tc
arity = length cons
[arr_dc] = tyConDataCons arr_tc
rep_tys = map dataConRepArgTys $ tyConDataCons vect_tc
mk_data_con con tys pre post
= liftM2 (,) (vect_data_con con)
(lift_data_con tys pre post (mkDataConTag con))
sel_replicate len tag
| arity > 1 = do
rep <- builtin (selReplicate arity)
return [rep `mkApps` [len, tag]]
| otherwise = return []
vect_data_con con = return $ mkConApp con ty_args
lift_data_con tys pre_tys post_tys tag
= do
len <- builtin liftingContext
args <- mapM (newLocalVar (fsLit "xs"))
=<< mapM mkPDataType tys
sel <- sel_replicate (Var len) tag
pre <- mapM emptyPD (concat pre_tys)
post <- mapM emptyPD (concat post_tys)
return . mkLams (len : args)
. wrapFamInstBody arr_tc var_tys
. mkConApp arr_dc
$ ty_args ++ sel ++ pre ++ map Var args ++ post
def_worker data_con arg_tys mk_body
= do
arity <- polyArity tyvars
body <- closedV
. inBind orig_worker
. polyAbstract tyvars $ \args ->
liftM (mkLams (tyvars ++ args) . vectorised)
$ buildClosures tyvars [] [] arg_tys res_ty mk_body
raw_worker <- mkVectId orig_worker (exprType body)
let vect_worker = raw_worker `setIdUnfolding`
mkInlineUnfolding (Just arity) body
defGlobalVar orig_worker vect_worker
return (vect_worker, body)
where
orig_worker = dataConWorkId data_con
| ilyasergey/GHC-XAppFix | compiler/vectorise/Vectorise/Type/Env.hs | bsd-3-clause | 20,722 | 0 | 19 | 6,473 | 2,507 | 1,352 | 1,155 | 194 | 2 |
-- these bindings were derived from the `lzma-enumerator` package
#include <stdio.h>
#include <string.h>
#include <bindings.dsl.h>
#include <lzma.h>
module LibLzma where
#strict_import
-- lzma_ret
#integral_t lzma_ret
#num LZMA_OK
#num LZMA_STREAM_END
#num LZMA_NO_CHECK
#num LZMA_UNSUPPORTED_CHECK
#num LZMA_GET_CHECK
#num LZMA_MEM_ERROR
#num LZMA_MEMLIMIT_ERROR
#num LZMA_FORMAT_ERROR
#num LZMA_OPTIONS_ERROR
#num LZMA_DATA_ERROR
#num LZMA_BUF_ERROR
#num LZMA_PROG_ERROR
-- lzma_action
#integral_t lzma_action
#num LZMA_RUN
#num LZMA_SYNC_FLUSH
#num LZMA_FULL_FLUSH
#num LZMA_FINISH
-- lzma_flags
#num LZMA_TELL_NO_CHECK
#num LZMA_TELL_UNSUPPORTED_CHECK
#num LZMA_TELL_ANY_CHECK
#num LZMA_CONCATENATED
-- lzma_check
#integral_t lzma_check
#num LZMA_CHECK_NONE
#num LZMA_CHECK_CRC32
#num LZMA_CHECK_CRC64
#num LZMA_CHECK_SHA256
#num LZMA_PRESET_DEFAULT
#num LZMA_PRESET_LEVEL_MASK
#num LZMA_PRESET_EXTREME
-- lzma_stream
#starttype lzma_stream
#field next_in , Ptr CUChar
#field avail_in , CSize
#field total_in , CULong
#field next_out , Ptr CUChar
#field avail_out , CSize
#field total_out , CULong
#stoptype
-- figure out what to put here?
-- #cinline LZMA_STREAM_INIT , IO <lzma_stream>
-- base.h
#ccall lzma_code , Ptr <lzma_stream> -> <lzma_action> -> IO <lzma_ret>
#ccall lzma_end , Ptr <lzma_stream> -> IO ()
#ccall lzma_memusage , Ptr <lzma_stream> -> IO CULong
#ccall lzma_memlimit_get , Ptr <lzma_stream> -> IO CULong
#ccall lzma_memlimit_set , Ptr <lzma_stream> -> CULong -> IO <lzma_ret>
-- container.h
#ccall lzma_easy_encoder_memusage , CInt -> IO CULong
#ccall lzma_easy_decoder_memusage , CInt -> IO CULong
#ccall lzma_easy_encoder , Ptr <lzma_stream> -> CInt -> <lzma_check> -> IO <lzma_ret>
#ccall lzma_auto_decoder , Ptr <lzma_stream> -> CULong -> CUInt -> IO <lzma_ret>
pokeNextIn :: Ptr C'lzma_stream -> Ptr a -> IO ()
pokeNextIn = #poke lzma_stream, next_in
pokeAvailIn :: Ptr C'lzma_stream -> CSize -> IO ()
pokeAvailIn = #poke lzma_stream, avail_in
pokeNextOut :: Ptr C'lzma_stream -> Ptr a -> IO ()
pokeNextOut = #poke lzma_stream, next_out
pokeAvailOut :: Ptr C'lzma_stream -> CSize -> IO ()
pokeAvailOut = #poke lzma_stream, avail_out
peekNextIn :: Ptr C'lzma_stream -> IO (Ptr a)
peekNextIn = #peek lzma_stream, next_in
peekAvailIn :: Ptr C'lzma_stream -> IO CSize
peekAvailIn = #peek lzma_stream, avail_in
peekNextOut :: Ptr C'lzma_stream -> IO (Ptr a)
peekNextOut = #peek lzma_stream, next_out
peekAvailOut :: Ptr C'lzma_stream -> IO CSize
peekAvailOut = #peek lzma_stream, avail_out
| hvr/lzma-streams | src/foo.hs | bsd-3-clause | 2,542 | 11 | 10 | 353 | 336 | 188 | 148 | -1 | -1 |
{-# LANGUAGE CPP #-}
-- Module: Blaze.Text.Double
-- Copyright: (c) 2011 MailRank, Inc.
-- License: BSD3
-- Maintainer: Bryan O'Sullivan <[email protected]>
-- Stability: experimental
-- Portability: portable
--
-- Efficiently serialize a Double as a lazy 'L.ByteString'.
module Blaze.Text.Double
(
float
, double
) where
#ifdef NATIVE
import Blaze.Text.Double.Native
#else
import Blaze.ByteString.Builder (Builder, fromByteString)
import Data.Double.Conversion.ByteString (toShortest)
float :: Float -> Builder
float = double . realToFrac
double :: Double -> Builder
double f = fromByteString (toShortest f)
#endif
| bos/blaze-textual | Blaze/Text/Double.hs | bsd-3-clause | 654 | 0 | 4 | 116 | 34 | 27 | 7 | 11 | 1 |
module Level
( fillVoid
, loadLevel
) where
import Data.Array.IArray
import Types
import Utils
chrToBlock :: Char -> Block
chrToBlock 'X' = Wall
chrToBlock _ = Empty
loadLevel :: [String] -> [(Char, Coords -> Actor ())] -> (Level, [Actor ()])
loadLevel input actorMap =
let level = fillVoid $ parseLevel chrToBlock input
chrLevel = parseLevel id input
actors = concat [map f $ findBlocks c chrLevel | (c, f) <- actorMap]
in (level, actors)
parseLevel :: (a -> b) -> [[a]] -> Array Coords b
parseLevel fn input =
let h = toInteger $ length input
w = toInteger . length . head $ input in
listArray ((0, 0), (h - 1, w - 1)) $ map fn (concat input)
-- return a list of all coordinates for a given block type
findBlocks :: Eq a => a -> Array Coords a -> [Coords]
findBlocks x = map fst . filter ((== x) . snd) . assocs
{- Find all squares that are surrounded by Void or Wall. and convert them to Voids.
- This works by making copies of the map that are offset by +/-1 in all directions,
- and then merging them together by appending duplicate values in a list, and then
- checking the resulting list. Quite horrible,
- -}
fillVoid :: Level -> Level
fillVoid level =
let newBlock blocks = if all (`elem` [Void, Wall]) blocks then Void else head blocks
in
amap newBlock $ makeAdjArr level
offsetArr :: (Num a, Ix a) => Array (a, a) e -> (a, a) -> Array (a, a) e
offsetArr arr offset =
let (b1, b2) = bounds arr in
array (b1 |+| offset, b2 |+| offset) [(i |+| offset, arr ! i) | i <- range (b1, b2)]
growArr :: (Num a, Ix a) => Array (a, a) [e] -> Array (a, a) [e]
growArr arr =
let (b1, b2) = bounds arr
(b1', b2') = (b1 |-| (1, 1), b2 |+| (1, 1))
in
-- make a new map filled with empty lists that is one block bigger in all dimensions
-- and then fill it with the original array
array (b1', b2') [(i, []) | i <- range (b1', b2')] //
[(i, arr ! i) | i <- range (b1, b2)]
shrinkArr :: (Num a, Ix a) => Array (a, a) [e] -> Array (a, a) [e]
shrinkArr arr =
let (b1, b2) = bounds arr
bounds' = (b1 |+| (1, 1), b2 |-| (1, 1))
in
array bounds' [(i, arr ! i) | i <- range bounds']
makeAdjArr :: LevelArray Block -> LevelArray [Block]
makeAdjArr level =
let listArr = amap (:[]) level
offsets = [(y, x) | x <- [-1, 0, 1], y <- [-1, 0, 1], x /= 0 || y/= 0]
offsetLevels = map (offsetArr listArr) offsets
foldStep :: LevelArray [Block] -> LevelArray [Block] -> LevelArray [Block]
foldStep arr seed = accum (++) seed (assocs arr)
in
shrinkArr $ foldr foldStep (growArr listArr) offsetLevels
| bjornars/HaskellGame | src/Level.hs | bsd-3-clause | 2,677 | 0 | 13 | 698 | 1,112 | 599 | 513 | 49 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Y2016.Day22 (answer1, answer2) where
import Control.Monad
import Control.Monad.Loops as Loops
import GHC.Word
import Data.Functor
import Data.Array as Arr
import Data.Void
import qualified Data.Set as Set
import qualified Data.Map.Strict as Map
import Text.Megaparsec
import Text.Megaparsec.Char
import Text.Megaparsec.Char.Lexer
import qualified Data.Text as Tx
import qualified Data.Text.IO as Tx.IO
type Parser = Parsec Void Tx.Text
type Coord = (Word16, Word16)
data Node = Node
{ nSize :: Word16
, nUsed :: Word16
}
deriving (Show)
type Grid = Array Coord Node
answer1, answer2 :: IO Int
answer1 = do
grid <- parseData
let ns = Arr.assocs grid
let viablePairs = do
(x, nx) <- ns
(y, ny) <- ns
guard $ x /= y
guard $ nUsed nx /= 0
guard $ (nSize ny - nUsed ny) >= nUsed nx
pure x
pure $ length viablePairs
answer2 = error "wip"
parseData :: IO (Array Coord Node)
parseData = do
raw <- Tx.IO.readFile "./data/2016/day22.txt"
case parse dataParser "day22" raw of
Left err -> print err *> error "failed"
Right x -> pure x
dataParser :: Parser (Array Coord Node)
dataParser = do
void $ takeLine *> char '\n'
void $ takeLine *> char '\n'
lines <- parseLine `Loops.untilM` isEOF
let m = maximum $ fmap fst lines
pure $ array ((0,0), m) lines
parseLine :: Parser (Coord, Node)
parseLine = do
string "/dev/grid/node-x"
x <- decimal
string "-y"
y <- decimal
void takeSpaces
size <- decimal
char 'T'
takeSpaces
used <- decimal
takeLine
char '\n'
let node = Node size used
pure ((x, y), node)
takeLine :: Parser Tx.Text
takeLine = takeWhileP Nothing (/= '\n')
takeSpaces :: Parser Tx.Text
takeSpaces = takeWhileP Nothing (== ' ')
isEOF :: Parser Bool
isEOF = try (eof $> True) <|> pure False
aStar :: Coord -> Coord -> Array Coord Node -> Maybe [Coord]
aStar start goal grid = aStar' start goal grid Set.empty [(start, 0)] initScores
where
initScores
= listArray (Arr.bounds grid) (repeat (Nothing, maxBound, maxBound))
// [(start, (Nothing, 0, manhattan start goal))]
aStar'
:: Coord
-- ^ start
-> Coord
-- ^ goal
-> Array Coord Node
-- ^ original grid
-> Set.Set Coord
-- ^ closedSet (visited nodes)
-> [(Coord, Word16)]
-- ^ openSet (nodes to visit next, head is the most likely candidate)
-> Array Coord (Maybe Coord, Word16, Word16)
-- ^ array of (parentNode, startCost, totalCost)
-- parentNode is used to reconstruct the path
-- startCost: cost from start to this node
-- totalCost: total cost from start to goal going through that node
-> Maybe [Coord]
-- ^ path from start to goal
aStar' _ _ _ _ [] _ = Nothing
aStar' current goal grid closedSet (x:xs) scores =
let
in error "wip"
manhattan :: Coord -> Coord -> Word16
manhattan (x1, y1) (x2, y2) =
let x = if x2 > x1 then x2 - x1 else x1 - x2
y = if y2 > y1 then y2 - y1 else y1 - y2
in x + y
comeFrom (x, _, _) = x
fromStart (_, x, _) = x
totalScore (_, _, x) = x
insertOrd a [] = [a]
insertOrd a@(_, n) l@(x@(_, m):xs) =
if n <= m
then a : l
else x : insertOrd a xs
| geekingfrog/advent-of-code | src/Y2016/Day22.hs | bsd-3-clause | 3,331 | 0 | 16 | 905 | 1,177 | 626 | 551 | 98 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Construct a @Plan@ for how to build
module Stack.Build.ConstructPlan
( constructPlan
) where
import Control.Arrow ((&&&), second)
import Control.Exception.Lifted
import Control.Monad
import Control.Monad.Catch (MonadCatch)
import Control.Monad.IO.Class
import Control.Monad.Logger (MonadLogger, logWarn)
import Control.Monad.RWS.Strict
import Control.Monad.Trans.Resource
import Data.Either
import Data.Function
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Distribution.Package (Dependency (..))
import Distribution.Version (anyVersion)
import Network.HTTP.Client.Conduit (HasHttpManager)
import Prelude hiding (pi, writeFile)
import Stack.Build.Cache
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Types.Build
import Stack.BuildPlan
import Stack.Package
import Stack.PackageDump
import Stack.Types
data PackageInfo
= PIOnlyInstalled InstallLocation Installed
| PIOnlySource PackageSource
| PIBoth PackageSource Installed
combineSourceInstalled :: PackageSource
-> (InstallLocation, Installed)
-> PackageInfo
combineSourceInstalled ps (location, installed) =
assert (piiVersion ps == installedVersion installed) $
assert (piiLocation ps == location) $
case location of
-- Always trust something in the snapshot
Snap -> PIOnlyInstalled location installed
Local -> PIBoth ps installed
type CombinedMap = Map PackageName PackageInfo
combineMap :: SourceMap -> InstalledMap -> CombinedMap
combineMap = Map.mergeWithKey
(\_ s i -> Just $ combineSourceInstalled s i)
(fmap PIOnlySource)
(fmap (uncurry PIOnlyInstalled))
data AddDepRes
= ADRToInstall Task
| ADRFound InstallLocation Installed
deriving Show
data W = W
{ wFinals :: !(Map PackageName (Either ConstructPlanException Task))
, wInstall :: !(Map Text InstallLocation)
-- ^ executable to be installed, and location where the binary is placed
, wDirty :: !(Map PackageName Text)
-- ^ why a local package is considered dirty
, wDeps :: !(Set PackageName)
-- ^ Packages which count as dependencies
, wWarnings :: !([Text] -> [Text])
-- ^ Warnings
}
instance Monoid W where
mempty = W mempty mempty mempty mempty mempty
mappend (W a b c d e) (W w x y z z') = W (mappend a w) (mappend b x) (mappend c y) (mappend d z) (mappend e z')
type M = RWST
Ctx
W
(Map PackageName (Either ConstructPlanException AddDepRes))
IO
data Ctx = Ctx
{ mbp :: !MiniBuildPlan
, baseConfigOpts :: !BaseConfigOpts
, loadPackage :: !(PackageName -> Version -> Map FlagName Bool -> IO Package)
, combinedMap :: !CombinedMap
, toolToPackages :: !(Dependency -> Map PackageName VersionRange)
, ctxEnvConfig :: !EnvConfig
, callStack :: ![PackageName]
, extraToBuild :: !(Set PackageName)
, ctxVersions :: !(Map PackageName (Set Version))
, wanted :: !(Set PackageName)
, localNames :: !(Set PackageName)
}
instance HasStackRoot Ctx
instance HasPlatform Ctx
instance HasGHCVariant Ctx
instance HasConfig Ctx
instance HasBuildConfig Ctx where
getBuildConfig = getBuildConfig . getEnvConfig
instance HasEnvConfig Ctx where
getEnvConfig = ctxEnvConfig
constructPlan :: forall env m.
(MonadCatch m, MonadReader env m, HasEnvConfig env, MonadIO m, MonadLogger m, MonadBaseControl IO m, HasHttpManager env)
=> MiniBuildPlan
-> BaseConfigOpts
-> [LocalPackage]
-> Set PackageName -- ^ additional packages that must be built
-> [DumpPackage () ()] -- ^ locally registered
-> (PackageName -> Version -> Map FlagName Bool -> IO Package) -- ^ load upstream package
-> SourceMap
-> InstalledMap
-> m Plan
constructPlan mbp0 baseConfigOpts0 locals extraToBuild0 localDumpPkgs loadPackage0 sourceMap installedMap = do
let locallyRegistered = Map.fromList $ map (dpGhcPkgId &&& dpPackageIdent) localDumpPkgs
bconfig <- asks getBuildConfig
let versions =
Map.fromListWith Set.union $
map (second Set.singleton . toTuple) $
Map.keys (bcPackageCaches bconfig)
econfig <- asks getEnvConfig
let onWanted = void . addDep False . packageName . lpPackage
let inner = do
mapM_ onWanted $ filter lpWanted locals
mapM_ (addDep False) $ Set.toList extraToBuild0
((), m, W efinals installExes dirtyReason deps warnings) <-
liftIO $ runRWST inner (ctx econfig versions) M.empty
mapM_ $logWarn (warnings [])
let toEither (_, Left e) = Left e
toEither (k, Right v) = Right (k, v)
(errlibs, adrs) = partitionEithers $ map toEither $ M.toList m
(errfinals, finals) = partitionEithers $ map toEither $ M.toList efinals
errs = errlibs ++ errfinals
if null errs
then do
let toTask (_, ADRFound _ _) = Nothing
toTask (name, ADRToInstall task) = Just (name, task)
tasks = M.fromList $ mapMaybe toTask adrs
takeSubset =
case boptsBuildSubset $ bcoBuildOpts baseConfigOpts0 of
BSAll -> id
BSOnlySnapshot -> stripLocals
BSOnlyDependencies -> stripNonDeps deps
return $ takeSubset Plan
{ planTasks = tasks
, planFinals = M.fromList finals
, planUnregisterLocal = mkUnregisterLocal tasks dirtyReason locallyRegistered sourceMap
, planInstallExes =
if boptsInstallExes $ bcoBuildOpts baseConfigOpts0
then installExes
else Map.empty
}
else throwM $ ConstructPlanExceptions errs (bcStackYaml $ getBuildConfig econfig)
where
ctx econfig versions = Ctx
{ mbp = mbp0
, baseConfigOpts = baseConfigOpts0
, loadPackage = loadPackage0
, combinedMap = combineMap sourceMap installedMap
, toolToPackages = \ (Dependency name _) ->
maybe Map.empty (Map.fromSet (const anyVersion)) $
Map.lookup (T.pack . packageNameString . fromCabalPackageName $ name) toolMap
, ctxEnvConfig = econfig
, callStack = []
, extraToBuild = extraToBuild0
, ctxVersions = versions
, wanted = wantedLocalPackages locals
, localNames = Set.fromList $ map (packageName . lpPackage) locals
}
-- TODO Currently, this will only consider and install tools from the
-- snapshot. It will not automatically install build tools from extra-deps
-- or local packages.
toolMap = getToolMap mbp0
-- | Determine which packages to unregister based on the given tasks and
-- already registered local packages
mkUnregisterLocal :: Map PackageName Task
-> Map PackageName Text
-> Map GhcPkgId PackageIdentifier
-> SourceMap
-> Map GhcPkgId (PackageIdentifier, Maybe Text)
mkUnregisterLocal tasks dirtyReason locallyRegistered sourceMap =
Map.unions $ map toUnregisterMap $ Map.toList locallyRegistered
where
toUnregisterMap (gid, ident) =
case M.lookup name tasks of
Nothing ->
case M.lookup name sourceMap of
Just (PSUpstream _ Snap _) -> Map.singleton gid
( ident
, Just "Switching to snapshot installed package"
)
_ -> Map.empty
Just _ -> Map.singleton gid
( ident
, Map.lookup name dirtyReason
)
where
name = packageIdentifierName ident
addFinal :: LocalPackage -> Package -> Bool -> M ()
addFinal lp package isAllInOne = do
depsRes <- addPackageDeps False package
res <- case depsRes of
Left e -> return $ Left e
Right (missing, present, _minLoc) -> do
ctx <- ask
return $ Right Task
{ taskProvides = PackageIdentifier
(packageName package)
(packageVersion package)
, taskConfigOpts = TaskConfigOpts missing $ \missing' ->
let allDeps = Map.union present missing'
in configureOpts
(getEnvConfig ctx)
(baseConfigOpts ctx)
allDeps
True -- wanted
True -- local
Local
package
, taskPresent = present
, taskType = TTLocal lp
, taskAllInOne = isAllInOne
}
tell mempty { wFinals = Map.singleton (packageName package) res }
addDep :: Bool -- ^ is this being used by a dependency?
-> PackageName
-> M (Either ConstructPlanException AddDepRes)
addDep treatAsDep' name = do
ctx <- ask
let treatAsDep = treatAsDep' || name `Set.notMember` wanted ctx
when treatAsDep $ markAsDep name
m <- get
case Map.lookup name m of
Just res -> return res
Nothing -> do
res <- if name `elem` callStack ctx
then return $ Left $ DependencyCycleDetected $ name : callStack ctx
else local (\ctx' -> ctx' { callStack = name : callStack ctx' }) $
case Map.lookup name $ combinedMap ctx of
-- TODO look up in the package index and see if there's a
-- recommendation available
Nothing -> return $ Left $ UnknownPackage name
Just (PIOnlyInstalled loc installed) -> do
-- slightly hacky, no flags since they likely won't affect executable names
tellExecutablesUpstream name (installedVersion installed) loc Map.empty
return $ Right $ ADRFound loc installed
Just (PIOnlySource ps) -> do
tellExecutables name ps
installPackage treatAsDep name ps Nothing
Just (PIBoth ps installed) -> do
tellExecutables name ps
installPackage treatAsDep name ps (Just installed)
modify $ Map.insert name res
return res
tellExecutables :: PackageName -> PackageSource -> M ()
tellExecutables _ (PSLocal lp)
| lpWanted lp = tellExecutablesPackage Local $ lpPackage lp
| otherwise = return ()
tellExecutables name (PSUpstream version loc flags) =
tellExecutablesUpstream name version loc flags
tellExecutablesUpstream :: PackageName -> Version -> InstallLocation -> Map FlagName Bool -> M ()
tellExecutablesUpstream name version loc flags = do
ctx <- ask
when (name `Set.member` extraToBuild ctx) $ do
p <- liftIO $ loadPackage ctx name version flags
tellExecutablesPackage loc p
tellExecutablesPackage :: InstallLocation -> Package -> M ()
tellExecutablesPackage loc p = do
cm <- asks combinedMap
-- Determine which components are enabled so we know which ones to copy
let myComps =
case Map.lookup (packageName p) cm of
Nothing -> assert False Set.empty
Just (PIOnlyInstalled _ _) -> Set.empty
Just (PIOnlySource ps) -> goSource ps
Just (PIBoth ps _) -> goSource ps
goSource (PSLocal lp)
| lpWanted lp = exeComponents (lpComponents lp)
| otherwise = Set.empty
goSource (PSUpstream{}) = Set.empty
tell mempty { wInstall = Map.fromList $ map (, loc) $ Set.toList $ filterComps myComps $ packageExes p }
where
filterComps myComps x
| Set.null myComps = x
| otherwise = Set.intersection x myComps
installPackage :: Bool -- ^ is this being used by a dependency?
-> PackageName
-> PackageSource
-> Maybe Installed
-> M (Either ConstructPlanException AddDepRes)
installPackage treatAsDep name ps minstalled = do
ctx <- ask
case ps of
PSUpstream version _ flags -> do
package <- liftIO $ loadPackage ctx name version flags
resolveDepsAndInstall False treatAsDep ps package minstalled
PSLocal lp ->
case lpTestBench lp of
Nothing -> resolveDepsAndInstall False treatAsDep ps (lpPackage lp) minstalled
Just tb -> do
-- Attempt to find a plan which performs an all-in-one
-- build. Ignore the writer action + reset the state if
-- it fails.
s <- get
res <- pass $ do
res <- addPackageDeps treatAsDep tb
let writerFunc w = case res of
Left _ -> mempty
_ -> w
return (res, writerFunc)
case res of
Right deps -> do
adr <- installPackageGivenDeps True ps tb minstalled deps
-- FIXME: this redundantly adds the deps (but
-- they'll all just get looked up in the map)
addFinal lp tb True
return $ Right adr
Left _ -> do
-- Reset the state to how it was before
-- attempting to find an all-in-one build
-- plan.
put s
-- Otherwise, fall back on building the
-- tests / benchmarks in a separate step.
res' <- resolveDepsAndInstall False treatAsDep ps (lpPackage lp) minstalled
when (isRight res') $ do
-- Insert it into the map so that it's
-- available for addFinal.
modify $ Map.insert name res'
addFinal lp tb False
return res'
resolveDepsAndInstall :: Bool
-> Bool
-> PackageSource
-> Package
-> Maybe Installed
-> M (Either ConstructPlanException AddDepRes)
resolveDepsAndInstall isAllInOne treatAsDep ps package minstalled = do
res <- addPackageDeps treatAsDep package
case res of
Left err -> return $ Left err
Right deps -> liftM Right $ installPackageGivenDeps isAllInOne ps package minstalled deps
installPackageGivenDeps :: Bool
-> PackageSource
-> Package
-> Maybe Installed
-> ( Set PackageIdentifier
, Map PackageIdentifier GhcPkgId
, InstallLocation )
-> M AddDepRes
installPackageGivenDeps isAllInOne ps package minstalled (missing, present, minLoc) = do
let name = packageName package
ctx <- ask
mRightVersionInstalled <- case (minstalled, Set.null missing) of
(Just installed, True) -> do
shouldInstall <- checkDirtiness ps installed package present (wanted ctx)
return $ if shouldInstall then Nothing else Just installed
(Just _, False) -> do
let t = T.intercalate ", " $ map (T.pack . packageNameString . packageIdentifierName) (Set.toList missing)
tell mempty { wDirty = Map.singleton name $ "missing dependencies: " <> addEllipsis t }
return Nothing
(Nothing, _) -> return Nothing
return $ case mRightVersionInstalled of
Just installed -> ADRFound (piiLocation ps) installed
Nothing -> ADRToInstall Task
{ taskProvides = PackageIdentifier
(packageName package)
(packageVersion package)
, taskConfigOpts = TaskConfigOpts missing $ \missing' ->
let allDeps = Map.union present missing'
destLoc = piiLocation ps <> minLoc
in configureOpts
(getEnvConfig ctx)
(baseConfigOpts ctx)
allDeps
(psWanted ps)
(psLocal ps)
-- An assertion to check for a recurrence of
-- https://github.com/commercialhaskell/stack/issues/345
(assert (destLoc == piiLocation ps) destLoc)
package
, taskPresent = present
, taskType =
case ps of
PSLocal lp -> TTLocal lp
PSUpstream _ loc _ -> TTUpstream package $ loc <> minLoc
, taskAllInOne = isAllInOne
}
addEllipsis :: Text -> Text
addEllipsis t
| T.length t < 100 = t
| otherwise = T.take 97 t <> "..."
addPackageDeps :: Bool -- ^ is this being used by a dependency?
-> Package -> M (Either ConstructPlanException (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, InstallLocation))
addPackageDeps treatAsDep package = do
ctx <- ask
deps' <- packageDepsWithTools package
deps <- forM (Map.toList deps') $ \(depname, range) -> do
eres <- addDep treatAsDep depname
let mlatestApplicable =
(latestApplicableVersion range <=< Map.lookup depname) (ctxVersions ctx)
case eres of
Left e ->
let bd =
case e of
UnknownPackage name -> assert (name == depname) NotInBuildPlan
_ -> Couldn'tResolveItsDependencies
in return $ Left (depname, (range, mlatestApplicable, bd))
Right adr -> do
inRange <- if adrVersion adr `withinRange` range
then return True
else do
let warn reason =
tell mempty { wWarnings = (msg:) }
where
msg = T.concat
[ "WARNING: Ignoring out of range dependency"
, reason
, ": "
, T.pack $ packageIdentifierString $ PackageIdentifier depname (adrVersion adr)
, ". "
, T.pack $ packageNameString $ packageName package
, " requires: "
, versionRangeText range
]
allowNewer <- asks $ configAllowNewer . getConfig
if allowNewer
then do
warn " (allow-newer enabled)"
return True
else do
x <- inSnapshot (packageName package) (packageVersion package)
y <- inSnapshot depname (adrVersion adr)
if x && y
then do
warn " (trusting snapshot over Hackage revisions)"
return True
else return False
if inRange
then case adr of
ADRToInstall task -> return $ Right
(Set.singleton $ taskProvides task, Map.empty, taskLocation task)
ADRFound loc (Executable _) -> return $ Right
(Set.empty, Map.empty, loc)
ADRFound loc (Library ident gid) -> return $ Right
(Set.empty, Map.singleton ident gid, loc)
else return $ Left (depname, (range, mlatestApplicable, DependencyMismatch $ adrVersion adr))
case partitionEithers deps of
([], pairs) -> return $ Right $ mconcat pairs
(errs, _) -> return $ Left $ DependencyPlanFailures
package
(Map.fromList errs)
where
adrVersion (ADRToInstall task) = packageIdentifierVersion $ taskProvides task
adrVersion (ADRFound _ installed) = installedVersion installed
checkDirtiness :: PackageSource
-> Installed
-> Package
-> Map PackageIdentifier GhcPkgId
-> Set PackageName
-> M Bool
checkDirtiness ps installed package present wanted = do
ctx <- ask
moldOpts <- tryGetFlagCache installed
let configOpts = configureOpts
(getEnvConfig ctx)
(baseConfigOpts ctx)
present
(psWanted ps)
(psLocal ps)
(piiLocation ps) -- should be Local always
package
buildOpts = bcoBuildOpts (baseConfigOpts ctx)
wantConfigCache = ConfigCache
{ configCacheOpts = configOpts
, configCacheDeps = Set.fromList $ Map.elems present
, configCacheComponents =
case ps of
PSLocal lp -> Set.map renderComponent $ lpComponents lp
PSUpstream{} -> Set.empty
, configCacheHaddock =
shouldHaddockPackage buildOpts wanted (packageName package) ||
-- Disabling haddocks when old config had haddocks doesn't make dirty.
maybe False configCacheHaddock moldOpts
}
let mreason =
case moldOpts of
Nothing -> Just "old configure information not found"
Just oldOpts
| Just reason <- describeConfigDiff config oldOpts wantConfigCache -> Just reason
| True <- psForceDirty ps -> Just "--force-dirty specified"
| Just files <- psDirty ps -> Just $ "local file changes: " <>
addEllipsis (T.pack $ unwords $ Set.toList files)
| otherwise -> Nothing
config = getConfig ctx
case mreason of
Nothing -> return False
Just reason -> do
tell mempty { wDirty = Map.singleton (packageName package) reason }
return True
describeConfigDiff :: Config -> ConfigCache -> ConfigCache -> Maybe Text
describeConfigDiff config old new
| not (configCacheDeps new `Set.isSubsetOf` configCacheDeps old) = Just "dependencies changed"
| not $ Set.null newComponents =
Just $ "components added: " `T.append` T.intercalate ", "
(map (decodeUtf8With lenientDecode) (Set.toList newComponents))
| not (configCacheHaddock old) && configCacheHaddock new = Just "rebuilding with haddocks"
| oldOpts /= newOpts = Just $ T.pack $ concat
[ "flags changed from "
, show oldOpts
, " to "
, show newOpts
]
| otherwise = Nothing
where
-- options set by stack
isStackOpt t = any (`T.isPrefixOf` t)
[ "--dependency="
, "--constraint="
, "--package-db="
, "--libdir="
, "--bindir="
, "--datadir="
, "--libexecdir="
, "--sysconfdir"
, "--docdir="
, "--htmldir="
, "--haddockdir="
, "--enable-tests"
, "--enable-benchmarks"
] || elem t
[ "--user"
]
stripGhcOptions =
go
where
go [] = []
go ("--ghc-option":x:xs) = go' x xs
go ("--ghc-options":x:xs) = go' x xs
go ((T.stripPrefix "--ghc-option=" -> Just x):xs) = go' x xs
go ((T.stripPrefix "--ghc-options=" -> Just x):xs) = go' x xs
go (x:xs) = x : go xs
go' x xs = checkKeepers x $ go xs
checkKeepers x xs =
case filter isKeeper $ T.words x of
[] -> xs
keepers -> "--ghc-options" : T.unwords keepers : xs
-- GHC options which affect build results and therefore should always
-- force a rebuild
--
-- For the most part, we only care about options generated by Stack
-- itself
isKeeper = (== "-fhpc") -- more to be added later
userOpts = filter (not . isStackOpt)
. (if configRebuildGhcOptions config
then id
else stripGhcOptions)
. map T.pack
. (\(ConfigureOpts x y) -> x ++ y)
. configCacheOpts
(oldOpts, newOpts) = removeMatching (userOpts old) (userOpts new)
removeMatching (x:xs) (y:ys)
| x == y = removeMatching xs ys
removeMatching xs ys = (xs, ys)
newComponents = configCacheComponents new `Set.difference` configCacheComponents old
psForceDirty :: PackageSource -> Bool
psForceDirty (PSLocal lp) = lpForceDirty lp
psForceDirty (PSUpstream {}) = False
psDirty :: PackageSource -> Maybe (Set FilePath)
psDirty (PSLocal lp) = lpDirtyFiles lp
psDirty (PSUpstream {}) = Nothing -- files never change in an upstream package
psWanted :: PackageSource -> Bool
psWanted (PSLocal lp) = lpWanted lp
psWanted (PSUpstream {}) = False
psLocal :: PackageSource -> Bool
psLocal (PSLocal _) = True
psLocal (PSUpstream {}) = False
-- | Get all of the dependencies for a given package, including guessed build
-- tool dependencies.
packageDepsWithTools :: Package -> M (Map PackageName VersionRange)
packageDepsWithTools p = do
ctx <- ask
return $ Map.unionsWith intersectVersionRanges
$ packageDeps p
: map (toolToPackages ctx) (packageTools p)
-- | Strip out anything from the @Plan@ intended for the local database
stripLocals :: Plan -> Plan
stripLocals plan = plan
{ planTasks = Map.filter checkTask $ planTasks plan
, planFinals = Map.empty
, planUnregisterLocal = Map.empty
, planInstallExes = Map.filter (/= Local) $ planInstallExes plan
}
where
checkTask task =
case taskType task of
TTLocal _ -> False
TTUpstream _ Local -> False
TTUpstream _ Snap -> True
stripNonDeps :: Set PackageName -> Plan -> Plan
stripNonDeps deps plan = plan
{ planTasks = Map.filter checkTask $ planTasks plan
, planFinals = Map.empty
, planInstallExes = Map.empty -- TODO maybe don't disable this?
}
where
checkTask task = packageIdentifierName (taskProvides task) `Set.member` deps
markAsDep :: PackageName -> M ()
markAsDep name = tell mempty { wDeps = Set.singleton name }
-- | Is the given package/version combo defined in the snapshot?
inSnapshot :: PackageName -> Version -> M Bool
inSnapshot name version = do
p <- asks mbp
ls <- asks localNames
return $ fromMaybe False $ do
guard $ not $ name `Set.member` ls
mpi <- Map.lookup name (mbpPackages p)
return $ mpiVersion mpi == version
| luigy/stack | src/Stack/Build/ConstructPlan.hs | bsd-3-clause | 28,264 | 0 | 31 | 10,413 | 6,739 | 3,384 | 3,355 | 593 | 11 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module Acc.Lift where
import Data.Array.Accelerate
lift2A
:: (Arrays a, Arrays b)
=> (Acc a, Acc b)
-> Acc (a,b)
lift2A = lift
lift3A
:: (Arrays a, Arrays b, Arrays c)
=> (Acc a, Acc b, Acc c)
-> Acc (a,b,c)
lift3A = lift
lift2E
:: (Elt a, Elt b)
=> (Exp a, Exp b)
-> Exp (a,b)
lift2E = lift
lift3E
:: (Elt a, Elt b, Elt c)
=> (Exp a, Exp b, Exp c)
-> Exp (a,b,c)
lift3E = lift
lift4E
:: (Elt a, Elt b, Elt c, Elt d)
=> (Exp a, Exp b, Exp c, Exp d)
-> Exp (a,b,c,d)
lift4E = lift
lift6E
:: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f)
=> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f)
-> Exp (a,b,c,d,e,f)
lift6E = lift
unlift2A
:: (Arrays a, Arrays b)
=> Acc (a,b)
-> (Acc a, Acc b)
unlift2A = unlift
unlift3A
:: (Arrays a, Arrays b, Arrays c)
=> Acc (a,b,c)
-> (Acc a, Acc b, Acc c)
unlift3A = unlift
unlift4A
:: (Arrays a, Arrays b, Arrays c, Arrays d)
=> Acc (a,b,c,d)
-> (Acc a, Acc b, Acc c, Acc d)
unlift4A = unlift
unlift5A
:: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e)
=> Acc (a,b,c,d,e)
-> (Acc a, Acc b, Acc c, Acc d, Acc e)
unlift5A = unlift
unlift6A
:: (Arrays a, Arrays b, Arrays c, Arrays d, Arrays e, Arrays f)
=> Acc (a,b,c,d,e,f)
-> (Acc a, Acc b, Acc c, Acc d, Acc e, Acc f)
unlift6A = unlift
unlift2E
:: (Elt a, Elt b)
=> Exp (a,b)
-> (Exp a, Exp b)
unlift2E = unlift
unlift3E
:: (Elt a, Elt b, Elt c)
=> Exp (a,b,c)
-> (Exp a, Exp b, Exp c)
unlift3E = unlift
unlift4E
:: (Elt a, Elt b, Elt c, Elt d)
=> Exp (a,b,c,d)
-> (Exp a, Exp b, Exp c, Exp d)
unlift4E = unlift
unlift5E
:: (Elt a, Elt b, Elt c, Elt d, Elt e)
=> Exp (a,b,c,d,e)
-> (Exp a, Exp b, Exp c, Exp d, Exp e)
unlift5E = unlift
unlift6E
:: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f)
=> Exp (a,b,c,d,e,f)
-> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f)
unlift6E = unlift
unlift7E
:: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g)
=> Exp (a,b,c,d,e,f,g)
-> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g)
unlift7E = unlift
unlift8E
:: (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f, Elt g, Elt h)
=> Exp (a,b,c,d,e,f,g,h)
-> (Exp a, Exp b, Exp c, Exp d, Exp e, Exp f, Exp g, Exp h)
unlift8E = unlift
index3
:: (Elt i, Slice (Z :. i), Slice (Z :. i :. i))
=> Exp i
-> Exp i
-> Exp i
-> Exp (Z :. i :. i :. i)
index3 i j k = lift (Z :. i :. j :. k)
unindex3
:: forall i. (Elt i, Slice (Z :. i), Slice (Z :. i :. i))
=> Exp (Z :. i :. i :. i)
-> Exp (i, i, i)
unindex3 ix =
let
Z :. i :. j :. k = unlift ix :: Z :. Exp i :. Exp i :. Exp i
in lift (i, j, k)
index4
:: (Elt i, Slice (Z :. i), Slice (Z :. i :. i), Slice (Z :. i :. i :. i))
=> Exp i
-> Exp i
-> Exp i
-> Exp i
-> Exp (Z :. i :. i :. i :. i)
index4 i j k w = lift (Z :. i :. j :. k :. w)
unindex4
:: forall i.
(Elt i, Slice (Z :. i), Slice (Z :. i :. i), Slice (Z :. i :. i :. i))
=> Exp (Z :. i :. i :. i :. i)
-> Exp (i, i, i, i)
unindex4 ix =
let
Z :. i :. j :. k :. w = unlift ix :: Z :. Exp i :. Exp i :. Exp i :. Exp i
in lift (i, j, k, w)
| cpdurham/accelerate-camera-sandbox | src/Acc/Lift.hs | bsd-3-clause | 3,250 | 0 | 15 | 982 | 2,114 | 1,122 | 992 | 127 | 1 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -Wall #-}
-- Metric structure
module Tower.Metric (
-- * Metric
BoundedField(..)
, infinity
, neginfinity
, Metric(..)
, Normed(..)
, Signed(..)
, Epsilon(..)
, (≈)
, QuotientField(..)
) where
import qualified Protolude as P
import Protolude (Double, Float, Int, Integer, ($), (<$>), Foldable(..), foldr, Bool(..), Ord(..), Eq(..), any)
import Data.Functor.Rep
import Tower.Ring
import Tower.Field
import Tower.Additive
import Tower.Exponential
import Tower.Multiplicative
class (Field a) => BoundedField a where
maxBound :: a
maxBound = one/zero
minBound :: a
minBound = negate (one/zero)
nan :: a
nan = zero/zero
isNaN :: a -> Bool
infinity :: BoundedField a => a
infinity = maxBound
neginfinity :: BoundedField a => a
neginfinity = minBound
instance BoundedField Float where isNaN = P.isNaN
instance BoundedField Double where isNaN = P.isNaN
instance (Foldable r, Representable r, BoundedField a) =>
BoundedField (r a) where
isNaN a = any isNaN a
class ( AdditiveUnital a
, AdditiveGroup a
, Multiplicative a
) => Signed a where
sign :: a -> a
abs :: a -> a
instance Signed Double where
sign a = if a >= zero then one else negate one
abs = P.abs
instance Signed Float where
sign a = if a >= zero then one else negate one
abs = P.abs
instance Signed Int where
sign a = if a >= zero then one else negate one
abs = P.abs
instance Signed Integer where
sign a = if a >= zero then one else negate one
abs = P.abs
instance (Representable r, Signed a) => Signed (r a) where
sign = fmapRep sign
abs = fmapRep abs
-- | Normed
class Normed a b where
size :: a -> b
instance Normed Double Double where size = P.abs
instance Normed Float Float where size = P.abs
instance Normed Int Int where size = P.abs
instance Normed Integer Integer where size = P.abs
instance (Foldable r, Representable r, ExpField a, ExpRing a) =>
Normed (r a) a where
size r = sqrt $ foldr (+) zero $ (**(one+one)) <$> r
-- | Epsilon
class (AdditiveGroup a) => Epsilon a where
nearZero :: a -> Bool
aboutEqual :: a -> a -> Bool
infixl 4 ≈
(≈) :: (Epsilon a) => a -> a -> Bool
(≈) = aboutEqual
instance Epsilon Double where
nearZero a = abs a <= (1e-12 :: Double)
aboutEqual a b = nearZero $ a - b
instance Epsilon Float where
nearZero a = abs a <= (1e-6 :: Float)
aboutEqual a b = nearZero $ a - b
instance Epsilon Int where
nearZero a = a == zero
aboutEqual a b = nearZero $ a - b
instance Epsilon Integer where
nearZero a = a == zero
aboutEqual a b = nearZero $ a - b
instance (Foldable r, Representable r, Epsilon a) => Epsilon (r a) where
nearZero a = any nearZero $ toList a
aboutEqual a b = any P.identity $ liftR2 aboutEqual a b
-- | Metric
class Metric a b where
distance :: a -> a -> b
instance Metric Double Double where distance a b = abs (a - b)
instance Metric Float Float where distance a b = abs (a - b)
instance Metric Int Int where distance a b = abs (a - b)
instance Metric Integer Integer where distance a b = abs (a - b)
instance (P.Foldable r, Representable r, ExpField a) => Metric (r a) a where
distance a b = size (a - b)
class (Ring a) => QuotientField a where
round :: a -> Integer
ceiling :: a -> Integer
floor :: a -> Integer
(^^) :: a -> Integer -> a
instance QuotientField Float where
round = P.round
ceiling = P.ceiling
floor = P.floor
(^^) = (P.^^)
instance QuotientField Double where
round = P.round
ceiling = P.ceiling
floor = P.floor
(^^) = (P.^^)
| tonyday567/tower | src/Tower/Metric.hs | bsd-3-clause | 3,780 | 0 | 10 | 923 | 1,438 | 786 | 652 | -1 | -1 |
module AERN2.Linear.Matrix.Type where
import qualified Prelude as P
import MixedTypesNumPrelude
import qualified Numeric.CollectErrors as CN
import AERN2.Linear.Vector.Type (Vector, (!))
import qualified AERN2.Linear.Vector.Type as V
import qualified Data.List as List
-- import Data.Maybe
-- import Debug.Trace
import AERN2.MP.Ball
-- import AERN2.MP.Float
-- import AERN2.MP.Dyadic
data (Matrix a) =
Matrix
{
width :: Integer,
entries :: Vector a
} deriving (Show)
height :: Matrix a -> Integer
height (Matrix w e) =
(V.length e) `P.div` w
get :: Matrix a -> Integer -> Integer -> a
get m i j =
entries m ! (i * (width m) + j)
identity :: (HasIntegers a) => Integer -> Integer -> Matrix a
identity m n =
diag m n (convertExactly 1)
diag :: (HasIntegers a) => Integer -> Integer -> a -> Matrix a
diag m n x =
create m n (\i j -> if i == j then x else (convertExactly 0))
rows :: Matrix a -> [Vector a]
rows m@(Matrix w e) =
[V.slice (i*w) w e| i <- [0 .. height m - 1]]
columns :: Matrix a -> Vector (Vector a)
columns m =
V.map (\j -> V.map (\i -> get m i j) $ V.enumFromTo 0 (height m - 1)) $ V.enumFromTo 0 (width m - 1)
create :: Integer -> Integer -> (Integer -> Integer -> a) -> Matrix a
create m n f =
Matrix n $ V.map (\x -> f (i x) (j x)) $ V.enumFromTo 0 (m*n - 1)
where
j x = x `mod` n
i x = (x - j x) `P.div` n
imap :: (Integer -> Integer -> a -> a) -> Matrix a -> Matrix a
imap f (Matrix w ents) =
Matrix w (V.imap g ents)
where
j x = x `mod` w
i x = (x - j x) `P.div` w
g k x = f (i k) (j k) x
instance CanIntersectAsymmetric (Matrix (CN MPBall)) (Matrix (CN MPBall)) where
type IntersectionType (Matrix (CN MPBall)) (Matrix (CN MPBall)) = Matrix (CN MPBall)
intersect (Matrix w0 v0) (Matrix _w1 v1) =
Matrix w0 $ V.zipWith intersect v0 v1
inftyNorm :: (CanAddSameType a, CanSubSameType a, CanAbsSameType a, HasIntegers a, CanMinMaxSameType a) => Matrix a -> a
inftyNorm (m :: Matrix a) =
-- TODO: could be optimised.
List.foldl' max (convertExactly 0 :: a)
[
V.foldl' (+) (convertExactly 0 :: a) $ V.map abs r
|
r <- rows m
]
instance Functor Matrix where
fmap h m =
Matrix (width m) (V.map h (entries m))
instance
(CanAddSameType a, CanMulSameType a, HasIntegers a) =>
CanMulAsymmetric (Matrix a) (Matrix a)
where
type MulType (Matrix a) (Matrix a) = Matrix a
mul m0 m1 =
create (height m0) (width m1) (aux 0 (convertExactly 0))
where
aux k sm i j =
if k == width m0 then
sm
else
aux (k + 1) (sm + (get m0 i k) * (get m1 k j)) i j
instance
(CanAddSameType a) =>
CanAddAsymmetric (Matrix a) (Matrix a)
where
type AddType (Matrix a) (Matrix a) = Matrix a
add (Matrix w e) (Matrix _ e') =
Matrix w (e + e')
instance
(CanSubSameType a) =>
CanSub (Matrix a) (Matrix a)
where
type SubType (Matrix a) (Matrix a) = Matrix a
sub (Matrix w e) (Matrix _ e') =
Matrix w (e - e')
instance
(CanAddSameType a, CanMulSameType a, HasIntegers a) =>
CanMulAsymmetric (Matrix a) (Vector a)
where
type MulType (Matrix a) (Vector a) = Vector a
mul m@(Matrix _w _e) v =
V.fromList [r * v| r <- rows m]
instance
(HasAccuracy a, HasPrecision a) => HasAccuracy (Matrix a)
where
getAccuracy m =
V.foldl' max NoInformation $ V.map getAccuracy (entries m)
instance
(HasPrecision a) => HasPrecision (Matrix a)
where
getPrecision m =
V.foldl' max (prec 2) $ V.map getPrecision (entries m)
instance
(CN.CanTestErrorsPresent a) => CN.CanTestErrorsPresent (Matrix a)
where
hasError m = V.foldl' (||) False $ V.map (CN.hasError) (entries m)
| michalkonecny/aern2 | aern2-mfun/src/AERN2/Linear/Matrix/Type.hs | bsd-3-clause | 3,871 | 0 | 14 | 1,093 | 1,762 | 912 | 850 | -1 | -1 |
-- |
-- Module : Git.FastExport.Filter
-- Maintainer : [email protected]
--
-- A collection of filters for git
module Git.FastExport.Filter where
import Git.FastExport.Types
import qualified Data.ByteString as B
import Data.List
import qualified Data.Trie as T
splitBranches :: [(Path, Branch)] -> CmdFilter
splitBranches paths (GCommit commit@Commit{commitChanges = c}) =
flip concatMap paths $
\(p,b) -> let newChanges =
flip concatMap c $
\chg -> if p `B.isPrefixOf` chgPath chg then
[chg{chgPath = B.drop (B.length p) (chgPath chg)}]
else []
in if null newChanges then [] else [GCommit commit{commitHeader=(commitHeader commit){chBranch = b}, commitChanges = newChanges}]
splitBranches _ GReset{} = []
splitBranches _ c@GProgress{} = [c]
dropPaths :: [Path] -> CmdFilter
dropPaths paths (GCommit commit@Commit{commitChanges = c}) =
let filtered = filter (\p -> not . any (`B.isPrefixOf` chgPath p) $ paths) c
in if null filtered then [] else [GCommit commit{commitChanges = filtered}]
dropPaths _ c@GProgress{} = [c]
dropPaths _ _ = []
| lumimies/git-fastexport-filter | src/Git/FastExport/Filter.hs | bsd-3-clause | 1,080 | 37 | 11 | 191 | 397 | 234 | 163 | 22 | 3 |
{-|
This module provides a Template Haskell function for automatically generating
reified typeclass dictionaries for use with "Control.Monad.TestFixture".
These generated dictionaries can be used with functions like
'Control.Monad.TestFixture.unTestFixture' and
'Control.Monad.TestFixture.logTestFixture' to quickly implement monadic
typeclasses in a way that can be used to “stub out” functionality in unit
tests.
The 'mkFixture' function is a Template Haskell code generation tool, which
generates three things:
1. A record type that represents a reified typeclass dictionary (or set of
typeclass dictionaries). The record contains fields that correspond to
the methods of the provided typeclasses, with ordinary method names
prefixed with a @_@ character and infix method names prefixed with a @~@
character.
2. A 'Default' instance for the generated record type, which automatically
fills all fields with stub implementations that will throw using
'unimplemented'.
3. Typeclass implementations for all of the provided typeclasses using
'TestFixture' and the generated record type that defer to the
implementations provided through the reified dictionary.
In practice, this is used for generate “fixture” types that are used within
tests. For example, consider some typeclasses that encode side-effectful
monadic operations:
> class Monad m => DB m where
> fetchRecord :: DBRecord a => Id a -> m (Either DBError a)
> insertRecord :: DBRecord a => a -> m (Either DBError (Id a))
>
> class Monad m => HTTP m where
> sendRequest :: HTTPRequest -> m (Either HTTPError HTTPResponse)
The typeclasses may have relatively straightforward instances for 'IO'.
However, one of the main values of them is that alternative instances may be
provided in unit tests, which is what 'TestFixture' provides. Therefore,
one might use 'mkFixture' to create some utilities for stubbing these
typeclasses out:
> mkFixture "Fixture" [ts| DB, HTTP |]
This generates code much like the following:
> data Fixture m =
> { _fetchRecord :: DBRecord a => Id a -> m (Either DBError a)
> , _insertRecord :: DBRecord a => a -> m (Either DBError (Id a))
> , _sendRequest :: HTTPRequest -> m (Either HTTPError HTTPResponse)
> }
>
> instance Default (Fixture m) where
> def = Fixture
> { _fetchRecord = unimplemented "_fetchRecord"
> , _insertRecord = unimplemented "_insertRecord"
> , _sendRequest = unimplemented "_sendRequest"
> }
>
> type FixturePure = Fixture (TestFixture Fixture () ())
> type FixtureLog log = Fixture (TestFixture Fixture log ())
> type FixtureState state = Fixture (TestFixture Fixture () state)
> type FixtureLogState log state = Fixture (TestFixture Fixture log state)
>
> type FixturePureT m = Fixture (TestFixture Fixture () () m)
> type FixtureLogT log m = Fixture (TestFixture Fixture log () m)
> type FixtureStateT state m = Fixture (TestFixture Fixture () state m)
> type FixtureLogStateT log state m = Fixture (TestFixtureT Fixture log state m)
>
> instance Monad m => DB (TestFixtureT Fixture w s m) where
> fetchRecord r = do
> fn <- asks _fetchRecord
> fn r
> insertRecord r = do
> fn <- asks _insertRecord
> fn r
>
> instance Monad m => HTTP (TestFixtureT Fixture w s m) where
> sendRequest r = do
> fn <- asks _sendRequest
> fn r
This type can then be used in tandem with "Control.Monad.TestFixture" to
create stubbed typeclass instances and run computations using them.
-}
module Control.Monad.TestFixture.TH
( mkFixture
, def
, ts
) where
import Control.Monad.TestFixture.TH.Internal (mkFixture)
import Control.Monad.TestFixture.TH.Internal.TypesQuasi (ts)
import Data.Default.Class (def)
| cjdev/test-fixture | src/Control/Monad/TestFixture/TH.hs | bsd-3-clause | 3,878 | 0 | 5 | 838 | 58 | 40 | 18 | 7 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE LambdaCase #-}
module Duckling.Quantity.PT.Rules
( rules ) where
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Text as Text
import Prelude
import Data.String
import Data.Text (Text)
import Duckling.Dimensions.Types
import Duckling.Numeral.Types (NumeralData (..))
import qualified Duckling.Numeral.Types as TNumeral
import Duckling.Numeral.Helpers
import Duckling.Quantity.Helpers
import qualified Duckling.Quantity.Types as TQuantity
import Duckling.Regex.Types
import Duckling.Types
quantities :: [(Text, String, TQuantity.Unit)]
quantities =
[ ("<quantity> copos", "(copos?)", TQuantity.Cup)
, ("<quantity> gramas", "((((mili)|(quilo))?(grama)s?)|(quilos?)|((m|k)?g))", TQuantity.Gram)
, ("<quantity> libras", "((lb|libra)s?)", TQuantity.Pound)
]
opsMap :: HashMap Text (Double -> Double)
opsMap = HashMap.fromList
[ ( "miligrama" , (/ 1000))
, ( "miligramas" , (/ 1000))
, ( "mg" , (/ 1000))
, ( "mgs" , (/ 1000))
, ( "quilograma" , (* 1000))
, ( "quilogramas", (* 1000))
, ( "quilo" , (* 1000))
, ( "quilos" , (* 1000))
, ( "kg" , (* 1000))
, ( "kgs" , (* 1000))
]
ruleNumeralQuantities :: [Rule]
ruleNumeralQuantities = map go quantities
where
go :: (Text, String, TQuantity.Unit) -> Rule
go (name, regexPattern, u) = Rule
{ name = name
, pattern = [Predicate isPositive, regex regexPattern]
, prod = \case
(Token Numeral nd:
Token RegexMatch (GroupMatch (match:_)):
_) -> do
let value = getValue opsMap match $ TNumeral.value nd
Just $ Token Quantity $ quantity u value
_ -> Nothing
}
ruleQuantityOfProduct :: Rule
ruleQuantityOfProduct = Rule
{ name = "<quantity> of product"
, pattern =
[ dimension Quantity
, regex "de (\\w+)"
]
, prod = \case
(Token Quantity qd:Token RegexMatch (GroupMatch (product:_)):_) ->
Just $ Token Quantity $ withProduct (Text.toLower product) qd
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleQuantityOfProduct ]
++ ruleNumeralQuantities
| facebookincubator/duckling | Duckling/Quantity/PT/Rules.hs | bsd-3-clause | 2,436 | 0 | 19 | 541 | 665 | 403 | 262 | 63 | 2 |
{-# LANGUAGE CPP #-}
module Gidl.Backend.Haskell.Interface where
import Data.Monoid
import Data.List (intercalate, nub)
import Data.Char (toUpper)
import Gidl.Types hiding (typeName)
import Gidl.Interface
import Gidl.Schema
import Gidl.Backend.Haskell.Types
import Ivory.Artifact
#if MIN_VERSION_mainland_pretty(0,6,0)
import Text.PrettyPrint.Mainland.Class
#endif
import Text.PrettyPrint.Mainland
interfaceModule :: Bool -> [String] -> Interface -> Artifact
interfaceModule useAeson modulepath i =
artifactPath (intercalate "/" modulepath) $
artifactText ((ifModuleName i) ++ ".hs") $
prettyLazyText 1000 $
stack $
[ text "{-# LANGUAGE DeriveDataTypeable #-}"
, text "{-# LANGUAGE DeriveGeneric #-}"
, text "{-# OPTIONS_GHC -fno-warn-unused-imports #-}"
, empty
, text "module"
<+> im (ifModuleName i)
<+> text "where"
, empty
, stack $ typeimports ++ extraimports
, empty
, schemaDoc useAeson (ifModuleName i) (producerSchema i)
, empty
, schemaDoc useAeson (ifModuleName i) (consumerSchema i)
, empty
]
where
im mname = mconcat $ punctuate dot
$ map text (modulepath ++ [mname])
tm mname = mconcat $ punctuate dot
$ map text (typepath modulepath ++ ["Types", mname])
where typepath = reverse . drop 1 . reverse
typeimports = map (\a -> importDecl tm a </> qualifiedImportDecl tm a)
$ nub
$ map importType
$ (++ [sequence_num_t])
$ interfaceTypes i
extraimports = [ text "import Data.Serialize"
, text "import Data.Typeable"
, text "import Data.Data"
, text "import GHC.Generics (Generic)"
, text "import qualified Test.QuickCheck as Q"
] ++
[ text "import Data.Aeson (ToJSON,FromJSON)" | useAeson ]
schemaDoc :: Bool -> String -> Schema -> Doc
schemaDoc _ interfaceName (Schema schemaName []) =
text "-- Cannot define" <+> text schemaName <+> text "schema for"
<+> text interfaceName <+> text "interface: schema is empty"
schemaDoc useAeson interfaceName s@(Schema schemaName schema) = stack $
[ text "-- Define" <+> text schemaName <+> text "schema for"
<+> text interfaceName <+> text "interface"
, text "data" <+> text typeName
, indent 2 $ encloseStack equals deriv (text "|")
[ text (constructorName n) <+> text (typeHaskellType t)
| (_, (Message n t)) <- schema
]
, empty
, text ("put" ++ typeName) <+> colon <> colon <+> text "Putter" <+> text typeName
, stack
[ text ("put" ++ typeName)
<+> parens (text (constructorName n) <+> text "m")
<+> equals
<+> primTypePutter (sizedPrim Bits32) <+> ppr h <+> text ">>"
<+> text "put" <+> text "m"
| (h, Message n _) <- schema ]
, empty
, text ("get" ++ typeName) <+> colon <> colon <+> text "Get" <+> text typeName
, text ("get" ++ typeName) <+> equals <+> text "do"
, indent 2 $ stack
[ text "a <-" <+> primTypeGetter (sizedPrim Bits32)
, text "case a of"
, indent 2 $ stack $
[ ppr h <+> text "-> do" </> (indent 2 (stack
[ text "m <- get"
, text "return" <+> parens (text (constructorName n) <+> text "m")
]))
| (h,Message n _) <- schema
] ++
[ text "_ -> fail"
<+> dquotes (text "encountered unknown tag in get" <> text typeName)
]
]
, empty
, serializeInstance typeName
, empty
, text ("arbitrary" ++ typeName) <+> colon <> colon <+> text "Q.Gen" <+> text typeName
, text ("arbitrary" ++ typeName) <+> equals
, indent 2 $ text "Q.oneof" <+> encloseStack lbracket rbracket comma
[ text "do" </> (indent 4 (stack
[ text "a <- Q.arbitrary"
, text "return" <+> parens (text (constructorName n) <+> text "a")
]))
| (_, Message n _) <- schema
]
, empty
, arbitraryInstance typeName
, empty
] ++
[ toJSONInstance typeName | useAeson ] ++
[ fromJSONInstance typeName | useAeson ] ++
[ seqnumGetter typeName s ]
where
constructorName n = userTypeModuleName n ++ schemaName
deriv = text "deriving (Eq, Show, Data, Typeable, Generic)"
typeName = interfaceName ++ schemaName
ifModuleName :: Interface -> String
ifModuleName (Interface iname _ _) = aux iname
where
aux :: String -> String
aux = first_cap . u_to_camel
first_cap (s:ss) = (toUpper s) : ss
first_cap [] = []
u_to_camel ('_':'i':[]) = []
u_to_camel ('_':[]) = []
u_to_camel ('_':a:as) = (toUpper a) : u_to_camel as
u_to_camel (a:as) = a : u_to_camel as
u_to_camel [] = []
seqnumGetter :: String -> Schema -> Doc
seqnumGetter _ (Schema _ []) = empty
seqnumGetter typeName (Schema schemaName ms) = stack
[ text "seqNumGetter" <> text typeName
<+> colon <> colon <+> text typeName
<+> text "->" <+> text "SequenceNum"
, stack [ text "seqNumGetter" <> text typeName
<+> parens (text (constructorName mname) <+> text "_a")
<+> equals <+> aux mtype
| (_,Message mname mtype) <- ms
]
]
where
constructorName n = userTypeModuleName n ++ schemaName
aux mtype
| isSeqNum mtype = text "_a"
| isSeqNumbered mtype = text (userTypeModuleName (structTypeName mtype))
<> dot <> text "seqnum" <+> text "_a"
| otherwise = text "error \"impossible: should not be asking for"
<+> text "sequence number of non-attribute\""
isSeqNum a = a == sequence_num_t
-- XXX the following is ugly and i know it:
isSeqNumbered (StructType _ [("seqnum",_),("val",_)]) = True
isSeqNumbered _ = False
structTypeName (StructType a _) = a
structTypeName _ = error "impossible"
| GaloisInc/gidl | src/Gidl/Backend/Haskell/Interface.hs | bsd-3-clause | 5,975 | 0 | 28 | 1,745 | 1,880 | 943 | 937 | 135 | 6 |
-- | Extra functions for optparse-applicative.
module Options.Applicative.Builder.Extra
(boolFlags
,boolFlagsNoDefault
,maybeBoolFlags
,enableDisableFlags
,enableDisableFlagsNoDefault
,extraHelpOption
,execExtraHelp
,textOption
,textArgument)
where
import Control.Monad (when)
import Options.Applicative
import Options.Applicative.Types (readerAsk)
import System.Environment (withArgs)
import System.FilePath (takeBaseName)
import Data.Text (Text)
import qualified Data.Text as T
-- | Enable/disable flags for a @Bool@.
boolFlags :: Bool -> String -> String -> Mod FlagFields Bool -> Parser Bool
boolFlags defaultValue = enableDisableFlags defaultValue True False
-- | Enable/disable flags for a @Bool@, without a default case (to allow chaining @<|>@s).
boolFlagsNoDefault :: Maybe Bool -> String -> String -> Mod FlagFields Bool -> Parser Bool
boolFlagsNoDefault = enableDisableFlagsNoDefault True False
-- | Enable/disable flags for a @(Maybe Bool)@.
maybeBoolFlags :: String -> String -> Mod FlagFields (Maybe Bool) -> Parser (Maybe Bool)
maybeBoolFlags = enableDisableFlags Nothing (Just True) (Just False)
-- | Enable/disable flags for any type.
enableDisableFlags :: (Eq a) => a -> a -> a -> String -> String -> Mod FlagFields a -> Parser a
enableDisableFlags defaultValue enabledValue disabledValue name helpSuffix mods =
enableDisableFlagsNoDefault enabledValue disabledValue (Just defaultValue) name helpSuffix mods <|>
pure defaultValue
-- | Enable/disable flags for any type, without a default (to allow chaining @<|>@s)
enableDisableFlagsNoDefault :: (Eq a) => a -> a -> Maybe a -> String -> String -> Mod FlagFields a -> Parser a
enableDisableFlagsNoDefault enabledValue disabledValue maybeHideValue name helpSuffix mods =
last <$> some (enableDisableFlagsNoDefault' enabledValue disabledValue maybeHideValue name helpSuffix mods)
enableDisableFlagsNoDefault' :: (Eq a) => a -> a -> Maybe a -> String -> String -> Mod FlagFields a -> Parser a
enableDisableFlagsNoDefault' enabledValue disabledValue maybeHideValue name helpSuffix mods =
let hideEnabled = Just enabledValue == maybeHideValue
hideDisabled = Just disabledValue == maybeHideValue
in flag'
enabledValue
((if hideEnabled
then hidden <> internal
else idm) <>
long name <>
help
(concat $ concat
[ ["Enable ", helpSuffix]
, [" (--no-" ++ name ++ " to disable)" | hideDisabled]]) <>
mods) <|>
flag'
enabledValue
(hidden <> internal <> long ("enable-" ++ name) <> mods) <|>
flag'
disabledValue
((if hideDisabled
then hidden <> internal
else idm) <>
long ("no-" ++ name) <>
help
(concat $ concat
[ ["Disable ", helpSuffix]
, [" (--" ++ name ++ " to enable)" | hideEnabled]]) <>
mods) <|>
flag'
disabledValue
(hidden <> internal <> long ("disable-" ++ name) <> mods)
-- | Show an extra help option (e.g. @--docker-help@ shows help for all @--docker*@ args).
-- To actually show have that help appear, use 'execExtraHelp' before executing the main parser.
extraHelpOption :: Bool -> String -> String -> String -> Parser (a -> a)
extraHelpOption hide progName fakeName helpName =
infoOption (optDesc' ++ ".") (long helpName <> hidden <> internal) <*>
infoOption (optDesc' ++ ".") (long fakeName <>
help optDesc' <>
(if hide then hidden <> internal else idm))
where optDesc' = concat ["Run '", takeBaseName progName, " --", helpName, "' for details"]
-- | Display extra help if extea help option passed in arguments.
-- Since optparse-applicative doesn't allow an arbirary IO action for an 'abortOption', this
-- was the best way I found that doesn't require manually formatting the help.
execExtraHelp :: [String] -> String -> Parser a -> String -> IO ()
execExtraHelp args helpOpt parser pd =
when (args == ["--" ++ helpOpt]) $
withArgs ["--help"] $ do
_ <- execParser (info (hiddenHelper <*>
((,) <$>
parser <*>
some (strArgument (metavar "OTHER ARGUMENTS"))))
(fullDesc <> progDesc pd))
return ()
where hiddenHelper = abortOption ShowHelpText (long "help" <> hidden <> internal)
textOption :: Mod OptionFields Text -> Parser Text
textOption = option (T.pack <$> readerAsk)
textArgument :: Mod ArgumentFields Text -> Parser Text
textArgument = argument (T.pack <$> readerAsk)
| mathhun/stack | src/Options/Applicative/Builder/Extra.hs | bsd-3-clause | 4,788 | 0 | 21 | 1,243 | 1,161 | 604 | 557 | 84 | 3 |
{-# LANGUAGE PatternGuards #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Elab.Value(elabVal, elabValBind, elabDocTerms,
elabExec, elabREPL) where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.DSL
import Idris.Error
import Idris.Delaborate
import Idris.Imports
import Idris.Coverage
import Idris.DataOpts
import Idris.Providers
import Idris.Primitives
import Idris.Inliner
import Idris.PartialEval
import Idris.DeepSeq
import Idris.Output (iputStrLn, pshow, iWarn, sendHighlighting)
import IRTS.Lang
import Idris.Elab.Utils
import Idris.Elab.Term
import Idris.Core.TT
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.Evaluate hiding (Unchecked)
import Idris.Core.Execute
import Idris.Core.Typecheck
import Idris.Core.CaseTree
import Idris.Docstrings
import Prelude hiding (id, (.))
import Control.Category
import Control.Applicative hiding (Const)
import Control.DeepSeq
import Control.Monad
import Control.Monad.State.Strict as State
import Data.List
import Data.Maybe
import qualified Data.Traversable as Traversable
import Debug.Trace
import qualified Data.Map as Map
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Char(isLetter, toLower)
import Data.List.Split (splitOn)
import Util.Pretty(pretty, text)
-- | Elaborate a value, returning any new bindings created (this will only
-- happen if elaborating as a pattern clause)
elabValBind :: ElabInfo -> ElabMode -> Bool -> PTerm -> Idris (Term, Type, [(Name, Type)])
elabValBind info aspat norm tm_in
= do ctxt <- getContext
i <- getIState
let tm = addImpl [] i tm_in
logLvl 10 (showTmImpls tm)
-- try:
-- * ordinary elaboration
-- * elaboration as a Type
-- * elaboration as a function a -> b
(ElabResult tm' defer is ctxt' newDecls highlights, _) <-
tclift (elaborate ctxt (idris_datatypes i) (sMN 0 "val") infP initEState
(build i info aspat [Reflection] (sMN 0 "val") (infTerm tm)))
-- Extend the context with new definitions created
setContext ctxt'
processTacticDecls info newDecls
sendHighlighting highlights
let vtm = orderPats (getInferTerm tm')
def' <- checkDef (fileFC "(input)") iderr defer
let def'' = map (\(n, (i, top, t)) -> (n, (i, top, t, True))) def'
addDeferred def''
mapM_ (elabCaseBlock info []) is
logLvl 3 ("Value: " ++ show vtm)
(vtm_in, vty) <- recheckC (fileFC "(input)") id [] vtm
let vtm = if norm then normalise (tt_ctxt i) [] vtm_in
else vtm_in
let bargs = getPBtys vtm
return (vtm, vty, bargs)
elabVal :: ElabInfo -> ElabMode -> PTerm -> Idris (Term, Type)
elabVal info aspat tm_in
= do (tm, ty, _) <- elabValBind info aspat False tm_in
return (tm, ty)
elabDocTerms :: ElabInfo -> Docstring (Either Err PTerm) -> Idris (Docstring DocTerm)
elabDocTerms info str = do typechecked <- Traversable.mapM decorate str
return $ checkDocstring mkDocTerm typechecked
where decorate (Left err) = return (Left err)
decorate (Right pt) = fmap (fmap fst) (tryElabVal info ERHS pt)
tryElabVal :: ElabInfo -> ElabMode -> PTerm -> Idris (Either Err (Term, Type))
tryElabVal info aspat tm_in
= idrisCatch (fmap Right $ elabVal info aspat tm_in)
(return . Left)
mkDocTerm :: String -> [String] -> String -> Either Err Term -> DocTerm
mkDocTerm lang attrs src (Left err)
| map toLower lang == "idris" = Failing err
| otherwise = Unchecked
mkDocTerm lang attrs src (Right tm)
| map toLower lang == "idris" = if "example" `elem` map (map toLower) attrs
then Example tm
else Checked tm
| otherwise = Unchecked
-- | Try running the term directly (as IO ()), then printing it as an Integer
-- (as a default numeric tye), then printing it as any Showable thing
elabExec :: FC -> PTerm -> PTerm
elabExec fc tm = runtm (PAlternative FirstSuccess
[printtm (PApp fc (PRef fc (sUN "the"))
[pexp (PConstant NoFC (AType (ATInt ITBig))), pexp tm]),
tm,
printtm tm
])
where
runtm t = PApp fc (PRef fc (sUN "run__IO")) [pexp t]
printtm t = PApp fc (PRef fc (sUN "printLn"))
[pimp (sUN "ffi") (PRef fc (sUN "FFI_C")) False, pexp t]
elabREPL :: ElabInfo -> ElabMode -> PTerm -> Idris (Term, Type)
elabREPL info aspat tm
= idrisCatch (elabVal info aspat tm) catchAmbig
where
catchAmbig (CantResolveAlts _)
= elabVal info aspat (PDisamb [[txt "List"]] tm)
catchAmbig e = ierror e
| bkoropoff/Idris-dev | src/Idris/Elab/Value.hs | bsd-3-clause | 4,932 | 0 | 19 | 1,354 | 1,505 | 797 | 708 | 104 | 4 |
module Problem29 where
import Data.List
main :: IO ()
main = print . length . nub $ [ a ^ b | a <- [2 .. 100], b <- [2 .. 100] ]
| adityagupta1089/Project-Euler-Haskell | src/problems/Problem29.hs | bsd-3-clause | 131 | 0 | 9 | 35 | 71 | 40 | 31 | 4 | 1 |
------------------------------------------------------------------------------
-- |
-- Module : Data.TokyoDystopia
-- Copyright : 8c6794b6 <[email protected]>
-- License : BSD3
-- Maintainer : 8c6794b6
-- Stability : experimental
-- Portability : non-portable
--
-- Haskell binding for tokyodystopia full text search.
-- For more information about tokyo dystopia, visit:
--
-- * <http://fallabs.com/tokyodystopia/>
--
-- /Examples/:
--
-- Doing put and get of ByteString value with IDB:
--
-- > import qualified Data.ByteString.Char8 as C8
-- > import qualified Database.TokyoDystopia as TD
-- >
-- > main :: IO ()
-- > main = do
-- > foo <- TD.runTDM $ do
-- > db <- TD.new :: TD.TDM TD.IDB
-- > TD.open db "casket" [TD.OCREAT, TD.OWRITER]
-- > TD.put db 1 (C8.pack "foo")
-- > result <- TD.get db 1
-- > TD.close db
-- > return result
-- > print foo
--
-- Searching IDB database:
--
-- > import Control.Monad (zipWithM_)
-- > import qualified Data.ByteString.Char8 as C8
-- > import qualified Database.TokyoDystopia as TD
-- >
-- > main :: IO ()
-- > main = do
-- > vals <- C8.lines `fmap` C8.readFile "/etc/group"
-- > keys <- TD.runTDM $ do
-- > db <- TD.new :: TD.TDM TD.IDB
-- > TD.open db "casket" [TD.OCREAT, TD.OWRITER]
-- > zipWithM_ (TD.put db) [1..] vals
-- > result <- TD.search db "root" [TD.GMSUBSTR]
-- > TD.close db
-- > return result
-- > print keys
--
module Database.TokyoDystopia
( IDB.IDB
, QDB.QDB
, JDB.JDB
, WDB.WDB
, module Database.TokyoDystopia.Class
, module Database.TokyoDystopia.Types
, module Database.TokyoDystopia.Utils
) where
import Database.TokyoDystopia.Class
import Database.TokyoDystopia.Types
import Database.TokyoDystopia.Utils
import qualified Database.TokyoDystopia.QDB as QDB
import qualified Database.TokyoDystopia.JDB as JDB
import qualified Database.TokyoDystopia.IDB as IDB
import qualified Database.TokyoDystopia.WDB as WDB
| 8c6794b6/tokyodystopia-haskell | Database/TokyoDystopia.hs | bsd-3-clause | 1,982 | 0 | 5 | 388 | 153 | 122 | 31 | 15 | 0 |
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.THEff
import Control.THEff.Writer
import Control.Monad(forM_)
import Data.Monoid
type IntAccum = Sum Int
mkEff "StrWriter" ''Writer ''String ''NoEff
mkEff "IntWriter" ''Writer ''IntAccum ''StrWriter
main:: IO ()
main = putStrLn $ uncurry (flip (++)) $ runStrWriter $ do
tell "Result"
(r, Sum v) <- runIntWriter $ do
tell "="
forM_ [1::Int .. 10]
(tell . Sum)
return (pi :: Float)
return $ show $ r * fromIntegral v | KolodeznyDiver/THEff | samples/SampleWriter.hs | bsd-3-clause | 708 | 0 | 14 | 207 | 203 | 105 | 98 | 21 | 1 |
Subsets and Splits